summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMicheil Smith <micheil@brandedcode.com>2010-11-15 10:50:14 +1100
committerRyan Dahl <ry@tinyclouds.org>2010-11-15 19:49:52 -0800
commitade8cf96e0f19fab240b2a7fcf970e8b1d24d10b (patch)
tree8afa0896103c99c794d4e09436632485e8190999
parent879a9e05486337190aadc4fead03e35f5b732bb4 (diff)
downloadandroid-node-v8-ade8cf96e0f19fab240b2a7fcf970e8b1d24d10b.tar.gz
android-node-v8-ade8cf96e0f19fab240b2a7fcf970e8b1d24d10b.tar.bz2
android-node-v8-ade8cf96e0f19fab240b2a7fcf970e8b1d24d10b.zip
Build API page into build/doc; simplify doctool.
-rw-r--r--LICENSE3
-rw-r--r--Makefile63
-rw-r--r--tools/doctool/doctool.js184
3 files changed, 117 insertions, 133 deletions
diff --git a/LICENSE b/LICENSE
index 3969b37b29..4a270f7c96 100644
--- a/LICENSE
+++ b/LICENSE
@@ -25,6 +25,9 @@ are:
Apache-style license. OpenSSL is not included in the Node distribution.
See http://openssl.org/ for more information.
+ - tools/doctool/markdown.js is Released under MIT license and
+ Copyright 2009-2010 Dominic Baggott and Ash Berli
+
- HTTP Parser, located at deps/http_parser, is a small C library
copyrighted by Ryan Lienhart Dahl and has a MIT license.
diff --git a/Makefile b/Makefile
index bd1901ca00..8be0a6f46f 100644
--- a/Makefile
+++ b/Makefile
@@ -1,5 +1,7 @@
WAF=python tools/waf-light
+web_root = ryan@nodejs.org:~/web/nodejs.org/
+
all: program
all-progress:
@@ -37,30 +39,60 @@ test-message: all
test-simple: all
python tools/test.py simple
-
+
test-pummel: all
python tools/test.py pummel
-
+
test-internet: all
python tools/test.py internet
+build/default/node: all
+
+apidoc_sources = $(wildcard doc/api/*.markdown)
+apidocs = $(addprefix build/,$(apidoc_sources:.markdown=.html))
+
+apidoc_dirs = build/doc build/doc/api/ build/doc/api/assets
-doc: doc/api/all.html doc/changelog.html
+apiassets = $(subst api_assets,api/assets,$(addprefix build/,$(wildcard doc/api_assets/*)))
-docopen: doc/api/all.html
- -google-chrome doc/api/all.html
+doc: build/default/node $(apidoc_dirs) $(apiassets) $(apidocs) build/doc/changelog.html
-doc/api/all.html: node doc/api/*.markdown
- ./node tools/doctool/doctool.js
+$(apidoc_dirs):
+ mkdir -p $@
-doc/changelog.html: ChangeLog doc/changelog_header.html doc/changelog_footer.html
- cat doc/changelog_header.html ChangeLog doc/changelog_footer.html > doc/changelog.html
+build/doc/api/assets/%: doc/api_assets/% build/doc/api/assets/
+ cp $< $@
+
+build/doc/api/%.html: doc/api/%.markdown build/default/node $(apidoc_dirs) $(apiassets)
+ build/default/node tools/doctool/doctool.js doc/template.html $< > $@
+
+build/doc/changelog.html: ChangeLog build/default/node build/doc/ $(apidoc_dirs) $(apiassets)
+ build/default/node tools/doctool/doctool.js doc/template.html $< \
+ | sed 's|assets/|api/assets/|g' \
+ | sed 's|<body>|<body id="changelog">|g' > $@
+ @echo $(apiassets)
+
+
+website_files = \
+ doc/index.html \
+ doc/cla.html \
+ doc/jquery.js \
+ doc/sh_main.js \
+ doc/sh_javascript.min.js \
+ doc/sh_vim-dark.css \
+ doc/logo.png \
+ doc/sponsored.png \
+ doc/pipe.css
website-upload: doc
- scp doc/* ryan@nodejs.org:~/web/nodejs.org/
+ scp -r build/doc/* $(web_root)
+ scp $(website_files) $(web_root)
+
+docopen: build/doc/api/all.html
+ -google-chrome build/doc/api/all.html
docclean:
- @-rm -f doc/api/*.html doc/changelog.html
+ -rm -rf build/doc
clean:
@$(WAF) clean
@@ -76,11 +108,12 @@ check:
VERSION=$(shell git describe)
TARNAME=node-$(VERSION)
-dist: doc/node.1 doc/api.html
- git archive --format=tar --prefix=$(TARNAME)/ HEAD | tar xf -
+#dist: doc/node.1 doc/api
+dist: doc
+ git archive --format=tar --prefix=$(TARNAME)/ HEAD | tar xf -
mkdir -p $(TARNAME)/doc
cp doc/node.1 $(TARNAME)/doc/node.1
- cp doc/api.html $(TARNAME)/doc/api.html
+ cp -r build/doc/api $(TARNAME)/doc/api
rm -rf $(TARNAME)/deps/v8/test # too big
tar -cf $(TARNAME).tar $(TARNAME)
rm -rf $(TARNAME)
@@ -95,4 +128,4 @@ bench-idle:
./node benchmark/idle_clients.js &
-.PHONY: bench clean docclean dist distclean check uninstall install all program staticlib dynamiclib test test-all website-upload
+.PHONY: bench clean docclean doc dist distclean check uninstall install all program staticlib dynamiclib test test-all website-upload
diff --git a/tools/doctool/doctool.js b/tools/doctool/doctool.js
index 47153a2ffb..4f3c6d4c57 100644
--- a/tools/doctool/doctool.js
+++ b/tools/doctool/doctool.js
@@ -1,138 +1,86 @@
-var fs = require("fs")
- , path = require("path")
- , cp = require('child_process')
- , markdown = require("./markdown");
+/*
+ Process a single doc file
-var cwd = process.cwd()
- , doc_root = path.join(cwd, "doc/api/")
- , build_root = path.join(cwd, "doc/api/")
- , assets_path = path.join(cwd, "doc/api_assets/")
- , bassets_path = path.join(build_root, "assets/");
+ argv[2] = template file
+ argv[3] = input file
+ argv[4] = output file
-/*
-A simple step / flow-control pattern, so that I can make the code in this file
-just a little bit more easy to follow.
*/
-var step = function(){
- var self = this;
- this.steps = Array.prototype.slice.call(arguments);
- this.index = 0;
- this.next = function(){
- var index = self.index++;
- return function(){
- if(index < self.steps.length){
- self.steps[index](self.next());
- } else {
- return function(){};
- }
- };
- };
- return this.next();
-};
+var fs = require("fs"),
+ path = require("path"),
+ markdown = require("./markdown"),
+ argv = process.argv,
+ argc = argv.length;
+
+var template = fs.readFileSync(argv[2], "utf8");
+
+var ids = {};
+
+function formatIdString(str){
+ str = str
+ .replace(/\([^)}]*\)/gmi, "")
+ .replace(/[^A-Za-z0-9_.]+/gmi, "_");
+
+ return str.substr(0,1).toLowerCase() + str.substr(1);
+}
var includeExpr = /^@include\s+([A-Za-z0-9-_]+)(?:\.)?([a-zA-Z]*)$/gmi;
-function convertData(data){
+function convertData(data, current_file){
// Allow including other pages in the data.
- data = data.replace(includeExpr, function(src, name, ext){
- try {
- var inc_path = path.join(doc_root, name+"."+(ext || "markdown"));
- return fs.readFileSync(inc_path, "utf8");
- } catch(e) {
- return "";
- }
- });
-
+ function loadIncludes(data){
+ return data.replace(includeExpr, function(src, name, ext){
+ try {
+ var include_path = path.join(current_file, "../", name+"."+(ext || "markdown"))
+ return loadIncludes(fs.readFileSync(include_path, "utf8"));
+ } catch(e) {
+ return "";
+ }
+ });
+ };
+
+ data = loadIncludes(data);
+
// Convert it to HTML from Markdown
if(data.length == 0){
data = "Sorry, this section is currently undocumented, but we'll be working on it.";
}
-
- return markdown.toHTML(markdown.parse(data), {xhtml:true});
-};
-/*
-Ensures that the output directory exists, this can probably be done in the
-makefile.
-*/
-function checkdir(next){
- fs.stat(build_root, function(err){
- if(err) {
- // easiest way to recursively create directories without doing loops.
- cp.exec("mkdir -p "+build_root, function(err, stdout, stderr){
- next();
- });
- } else {
- next();
- }
- })
-};
+ data = markdown.toHTML(markdown.parse(data), {xhtml:true});
-/*
-Loads the template for which the documentation should be outputed into.
-*/
-var template;
-
-function loadTemplates(next){
- fs.readFile(path.join(doc_root, "../template.html"), "utf8", function(e, d){
- if(e) throw e;
-
- template = d;
- next();
+ data = data.replace(/<hr><\/hr>/g, "<hr />");
+
+ data = data.replace(/(\<h[2-6])\>([^<]+)(\<\/h[1-6]\>)/gmi, function(o, ts, c, te){
+ var id = formatIdString(c);
+ return ts+' id="'+id+'">'+c+te;
});
+
+ return data;
};
+if(argc > 3){
+ var filename = argv[3];
-/*
-This function reads the doc/api/* directory, and filters out any files
-that are not markdown files. It then converts the markdown to HTML, and
-outputs it into the previously loaded template file.
-*/
-function convertFiles(next){
- fs.readdir(doc_root, function(err, files){
+ fs.readFile(filename, "utf8", function(err, data){
if(err) throw err;
-
- files.filter(function(file){
- var basename = path.basename(file, ".markdown");
- return path.extname(file) == ".markdown" &&
- basename.substr(0,1) != "_";
- }).forEach(function(file){
- var filename = path.basename(file, '.markdown')
- , build_path = path.join(build_root, filename+".html")
- , doc_path = path.join(doc_root, file);
-
- fs.readFile(doc_path, "utf8", function(err, data){
- if(err) throw err;
-
- // do conversion stuff.
- var html = convertData(data);
- var output = template.replace("{{content}}", html);
-
- if(filename == "index"){
- output = output.replace("{{section}}", "");
- } else {
- output = output.replace("{{section}}", filename+" - ")
- }
-
- fs.writeFile(build_path, output, function(err){
- if(err) throw err;
- });
- });
- });
- });
- // we don't need the next call to wait at all, so stick it here.
- next();
-};
-function copyAssets(next){
- cp.exec("cp -R "+assets_path+" "+bassets_path, function(err, stdout, stderr){
- next();
- });
-};
+ // do conversion stuff.
+ var html = convertData(data, filename);
+ var output = template.replace("{{content}}", html);
+
+ filename = path.basename(filename, '.markdown');
+
+ if(filename == "index"){
+ output = output.replace("{{section}}", "");
+ output = output.replace(/<body([^>]*)>/, '<body class="index" $1>');
+ } else {
+ output = output.replace("{{section}}", filename+" - ")
+ }
-step(
- checkdir,
- copyAssets,
- loadTemplates,
- convertFiles
-)();
+ if(argc > 4) {
+ fs.writeFile(argv[4], output);
+ } else {
+ process.stdout.write(output);
+ }
+ });
+}