mirror of
https://github.com/jsdoc/jsdoc.git
synced 2025-12-08 19:46:11 +00:00
Added node files.
This commit is contained in:
parent
2d7f690096
commit
618e579b8c
51
jsdoc.js
Normal file
51
jsdoc.js
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
// like: java -classpath ~/Scripts/js.jar org.mozilla.javascript.tools.shell.Main jsdoc.js examples/test1.js
|
||||||
|
// or: node jsdoc examples/test1.js
|
||||||
|
|
||||||
|
(function() {
|
||||||
|
|
||||||
|
// normalise rhino
|
||||||
|
if (typeof load !== 'undefined') {
|
||||||
|
load('lib/rhino-shim.js');
|
||||||
|
}
|
||||||
|
|
||||||
|
// global modules
|
||||||
|
global._ = require('underscore');
|
||||||
|
_.mixin(require('underscore.string'));
|
||||||
|
global.signals = require('signals');
|
||||||
|
|
||||||
|
// needed modules
|
||||||
|
var fs = require('fs'),
|
||||||
|
opts = require('jsdoc/options').parse( process.argv.slice(2) ),
|
||||||
|
dumper = require('jsdoc/util/dumper');
|
||||||
|
|
||||||
|
// user configuration
|
||||||
|
try {
|
||||||
|
var conf = JSON.parse(
|
||||||
|
fs.readFileSync('./conf.json', 'utf-8')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
throw('Configuration file cannot be evaluated. '+e);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof conf.plugins !== 'undefined') {
|
||||||
|
for (var i = 0, len = conf.plugins.length; i < len; i++) {
|
||||||
|
require(conf.plugins[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (opts.help) {
|
||||||
|
console.log('USAGE: node main.js yourfile.js');
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
var srcFile = opts._[0];
|
||||||
|
|
||||||
|
var src = fs.readFileSync(srcFile, 'utf-8');
|
||||||
|
|
||||||
|
var parser = require('jsdoc/parser');
|
||||||
|
|
||||||
|
var symbols = parser.parse(src);
|
||||||
|
|
||||||
|
console.log( dumper.dump(symbols) );
|
||||||
|
|
||||||
|
})();
|
||||||
238
lib/json.js
Normal file
238
lib/json.js
Normal file
@ -0,0 +1,238 @@
|
|||||||
|
// This source code is free for use in the public domain.
|
||||||
|
// NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
|
||||||
|
|
||||||
|
// http://code.google.com/p/json-sans-eval/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses a string of well-formed JSON text.
|
||||||
|
*
|
||||||
|
* If the input is not well-formed, then behavior is undefined, but it is
|
||||||
|
* deterministic and is guaranteed not to modify any object other than its
|
||||||
|
* return value.
|
||||||
|
*
|
||||||
|
* This does not use `eval` so is less likely to have obscure security bugs than
|
||||||
|
* json2.js.
|
||||||
|
* It is optimized for speed, so is much faster than json_parse.js.
|
||||||
|
*
|
||||||
|
* This library should be used whenever security is a concern (when JSON may
|
||||||
|
* come from an untrusted source), speed is a concern, and erroring on malformed
|
||||||
|
* JSON is *not* a concern.
|
||||||
|
*
|
||||||
|
* Pros Cons
|
||||||
|
* +-----------------------+-----------------------+
|
||||||
|
* json_sans_eval.js | Fast, secure | Not validating |
|
||||||
|
* +-----------------------+-----------------------+
|
||||||
|
* json_parse.js | Validating, secure | Slow |
|
||||||
|
* +-----------------------+-----------------------+
|
||||||
|
* json2.js | Fast, some validation | Potentially insecure |
|
||||||
|
* +-----------------------+-----------------------+
|
||||||
|
*
|
||||||
|
* json2.js is very fast, but potentially insecure since it calls `eval` to
|
||||||
|
* parse JSON data, so an attacker might be able to supply strange JS that
|
||||||
|
* looks like JSON, but that executes arbitrary javascript.
|
||||||
|
* If you do have to use json2.js with untrusted data, make sure you keep
|
||||||
|
* your version of json2.js up to date so that you get patches as they're
|
||||||
|
* released.
|
||||||
|
*
|
||||||
|
* @param {string} json per RFC 4627
|
||||||
|
* @param {function (this:Object, string, *):*} opt_reviver optional function
|
||||||
|
* that reworks JSON objects post-parse per Chapter 15.12 of EcmaScript3.1.
|
||||||
|
* If supplied, the function is called with a string key, and a value.
|
||||||
|
* The value is the property of 'this'. The reviver should return
|
||||||
|
* the value to use in its place. So if dates were serialized as
|
||||||
|
* {@code { "type": "Date", "time": 1234 }}, then a reviver might look like
|
||||||
|
* {@code
|
||||||
|
* function (key, value) {
|
||||||
|
* if (value && typeof value === 'object' && 'Date' === value.type) {
|
||||||
|
* return new Date(value.time);
|
||||||
|
* } else {
|
||||||
|
* return value;
|
||||||
|
* }
|
||||||
|
* }}.
|
||||||
|
* If the reviver returns {@code undefined} then the property named by key
|
||||||
|
* will be deleted from its container.
|
||||||
|
* {@code this} is bound to the object containing the specified property.
|
||||||
|
* @return {Object|Array}
|
||||||
|
* @author Mike Samuel <mikesamuel@gmail.com>
|
||||||
|
*/
|
||||||
|
var jsonParse = (function () {
|
||||||
|
var number
|
||||||
|
= '(?:-?\\b(?:0|[1-9][0-9]*)(?:\\.[0-9]+)?(?:[eE][+-]?[0-9]+)?\\b)';
|
||||||
|
var oneChar = '(?:[^\\0-\\x08\\x0a-\\x1f\"\\\\]'
|
||||||
|
+ '|\\\\(?:[\"/\\\\bfnrt]|u[0-9A-Fa-f]{4}))';
|
||||||
|
var string = '(?:\"' + oneChar + '*\")';
|
||||||
|
|
||||||
|
// Will match a value in a well-formed JSON file.
|
||||||
|
// If the input is not well-formed, may match strangely, but not in an unsafe
|
||||||
|
// way.
|
||||||
|
// Since this only matches value tokens, it does not match whitespace, colons,
|
||||||
|
// or commas.
|
||||||
|
var jsonToken = new RegExp(
|
||||||
|
'(?:false|true|null|[\\{\\}\\[\\]]'
|
||||||
|
+ '|' + number
|
||||||
|
+ '|' + string
|
||||||
|
+ ')', 'g');
|
||||||
|
|
||||||
|
// Matches escape sequences in a string literal
|
||||||
|
var escapeSequence = new RegExp('\\\\(?:([^u])|u(.{4}))', 'g');
|
||||||
|
|
||||||
|
// Decodes escape sequences in object literals
|
||||||
|
var escapes = {
|
||||||
|
'"': '"',
|
||||||
|
'/': '/',
|
||||||
|
'\\': '\\',
|
||||||
|
'b': '\b',
|
||||||
|
'f': '\f',
|
||||||
|
'n': '\n',
|
||||||
|
'r': '\r',
|
||||||
|
't': '\t'
|
||||||
|
};
|
||||||
|
function unescapeOne(_, ch, hex) {
|
||||||
|
return ch ? escapes[ch] : String.fromCharCode(parseInt(hex, 16));
|
||||||
|
}
|
||||||
|
|
||||||
|
// A non-falsy value that coerces to the empty string when used as a key.
|
||||||
|
var EMPTY_STRING = new String('');
|
||||||
|
var SLASH = '\\';
|
||||||
|
|
||||||
|
// Constructor to use based on an open token.
|
||||||
|
var firstTokenCtors = { '{': Object, '[': Array };
|
||||||
|
|
||||||
|
var hop = Object.hasOwnProperty;
|
||||||
|
|
||||||
|
return function (json, opt_reviver) {
|
||||||
|
// Split into tokens
|
||||||
|
var toks = json.match(jsonToken);
|
||||||
|
// Construct the object to return
|
||||||
|
var result;
|
||||||
|
var tok = toks[0];
|
||||||
|
var topLevelPrimitive = false;
|
||||||
|
if ('{' === tok) {
|
||||||
|
result = {};
|
||||||
|
} else if ('[' === tok) {
|
||||||
|
result = [];
|
||||||
|
} else {
|
||||||
|
// The RFC only allows arrays or objects at the top level, but the JSON.parse
|
||||||
|
// defined by the EcmaScript 5 draft does allow strings, booleans, numbers, and null
|
||||||
|
// at the top level.
|
||||||
|
result = [];
|
||||||
|
topLevelPrimitive = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If undefined, the key in an object key/value record to use for the next
|
||||||
|
// value parsed.
|
||||||
|
var key;
|
||||||
|
// Loop over remaining tokens maintaining a stack of uncompleted objects and
|
||||||
|
// arrays.
|
||||||
|
var stack = [result];
|
||||||
|
for (var i = 1 - topLevelPrimitive, n = toks.length; i < n; ++i) {
|
||||||
|
tok = toks[i];
|
||||||
|
|
||||||
|
var cont;
|
||||||
|
switch (tok.charCodeAt(0)) {
|
||||||
|
default: // sign or digit
|
||||||
|
cont = stack[0];
|
||||||
|
cont[key || cont.length] = +(tok);
|
||||||
|
key = void 0;
|
||||||
|
break;
|
||||||
|
case 0x22: // '"'
|
||||||
|
tok = tok.substring(1, tok.length - 1);
|
||||||
|
if (tok.indexOf(SLASH) !== -1) {
|
||||||
|
tok = tok.replace(escapeSequence, unescapeOne);
|
||||||
|
}
|
||||||
|
cont = stack[0];
|
||||||
|
if (!key) {
|
||||||
|
if (cont instanceof Array) {
|
||||||
|
key = cont.length;
|
||||||
|
} else {
|
||||||
|
key = tok || EMPTY_STRING; // Use as key for next value seen.
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cont[key] = tok;
|
||||||
|
key = void 0;
|
||||||
|
break;
|
||||||
|
case 0x5b: // '['
|
||||||
|
cont = stack[0];
|
||||||
|
stack.unshift(cont[key || cont.length] = []);
|
||||||
|
key = void 0;
|
||||||
|
break;
|
||||||
|
case 0x5d: // ']'
|
||||||
|
stack.shift();
|
||||||
|
break;
|
||||||
|
case 0x66: // 'f'
|
||||||
|
cont = stack[0];
|
||||||
|
cont[key || cont.length] = false;
|
||||||
|
key = void 0;
|
||||||
|
break;
|
||||||
|
case 0x6e: // 'n'
|
||||||
|
cont = stack[0];
|
||||||
|
cont[key || cont.length] = null;
|
||||||
|
key = void 0;
|
||||||
|
break;
|
||||||
|
case 0x74: // 't'
|
||||||
|
cont = stack[0];
|
||||||
|
cont[key || cont.length] = true;
|
||||||
|
key = void 0;
|
||||||
|
break;
|
||||||
|
case 0x7b: // '{'
|
||||||
|
cont = stack[0];
|
||||||
|
stack.unshift(cont[key || cont.length] = {});
|
||||||
|
key = void 0;
|
||||||
|
break;
|
||||||
|
case 0x7d: // '}'
|
||||||
|
stack.shift();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Fail if we've got an uncompleted object.
|
||||||
|
if (topLevelPrimitive) {
|
||||||
|
if (stack.length !== 1) { throw new Error(); }
|
||||||
|
result = result[0];
|
||||||
|
} else {
|
||||||
|
if (stack.length) { throw new Error(); }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (opt_reviver) {
|
||||||
|
// Based on walk as implemented in http://www.json.org/json2.js
|
||||||
|
var walk = function (holder, key) {
|
||||||
|
var value = holder[key];
|
||||||
|
if (value && typeof value === 'object') {
|
||||||
|
var toDelete = null;
|
||||||
|
for (var k in value) {
|
||||||
|
if (hop.call(value, k) && value !== holder) {
|
||||||
|
// Recurse to properties first. This has the effect of causing
|
||||||
|
// the reviver to be called on the object graph depth-first.
|
||||||
|
|
||||||
|
// Since 'this' is bound to the holder of the property, the
|
||||||
|
// reviver can access sibling properties of k including ones
|
||||||
|
// that have not yet been revived.
|
||||||
|
|
||||||
|
// The value returned by the reviver is used in place of the
|
||||||
|
// current value of property k.
|
||||||
|
// If it returns undefined then the property is deleted.
|
||||||
|
var v = walk(value, k);
|
||||||
|
if (v !== void 0) {
|
||||||
|
value[k] = v;
|
||||||
|
} else {
|
||||||
|
// Deleting properties inside the loop has vaguely defined
|
||||||
|
// semantics in ES3 and ES3.1.
|
||||||
|
if (!toDelete) { toDelete = []; }
|
||||||
|
toDelete.push(k);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (toDelete) {
|
||||||
|
for (var i = toDelete.length; --i >= 0;) {
|
||||||
|
delete value[toDelete[i]];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return opt_reviver.call(holder, key, value);
|
||||||
|
};
|
||||||
|
result = walk({ '': result }, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
})();
|
||||||
1761
lib/nodeunit.js
Normal file
1761
lib/nodeunit.js
Normal file
File diff suppressed because it is too large
Load Diff
253
lib/rhino-require.js
Normal file
253
lib/rhino-require.js
Normal file
@ -0,0 +1,253 @@
|
|||||||
|
/*
|
||||||
|
Rhino-Require is Public Domain
|
||||||
|
<http://en.wikipedia.org/wiki/Public_Domain>
|
||||||
|
|
||||||
|
The author or authors of this code dedicate any and all copyright interest
|
||||||
|
in this code to the public domain. We make this dedication for the benefit
|
||||||
|
of the public at large and to the detriment of our heirs and successors. We
|
||||||
|
intend this dedication to be an overt act of relinquishment in perpetuity of
|
||||||
|
all present and future rights to this code under copyright law.
|
||||||
|
*/
|
||||||
|
|
||||||
|
(function() {
|
||||||
|
|
||||||
|
var require = global.require = function(id) { /*debug*///console.log('require('+id+')');
|
||||||
|
var moduleContent = '',
|
||||||
|
moduleUri;
|
||||||
|
|
||||||
|
moduleUri = require.resolve(id);
|
||||||
|
moduleContent = '';
|
||||||
|
|
||||||
|
var file = new java.io.File(moduleUri);
|
||||||
|
try {
|
||||||
|
var scanner = new java.util.Scanner(file).useDelimiter("\\Z");
|
||||||
|
moduleContent = String( scanner.next() );
|
||||||
|
}
|
||||||
|
catch(e) {
|
||||||
|
throw 'Unable to read file at: '+moduleUri+', '+e;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (moduleContent) {
|
||||||
|
try {
|
||||||
|
var f = new Function('require', 'exports', 'module', moduleContent),
|
||||||
|
exports = require.cache[moduleUri] || {},
|
||||||
|
module = { id: id, uri: moduleUri, exports: exports };
|
||||||
|
|
||||||
|
|
||||||
|
require._root.unshift(moduleUri);
|
||||||
|
f.call({}, require, exports, module);
|
||||||
|
require._root.shift();
|
||||||
|
}
|
||||||
|
catch(e) {
|
||||||
|
throw 'Unable to require source code from "' + moduleUri + '": ' + e.toSource();
|
||||||
|
}
|
||||||
|
|
||||||
|
exports = module.exports || exports;
|
||||||
|
require.cache[id] = exports;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw 'The requested module cannot be returned: no content for id: "' + id + '" in paths: ' + require.paths.join(', ');
|
||||||
|
}
|
||||||
|
|
||||||
|
return exports;
|
||||||
|
}
|
||||||
|
require._root = [''];
|
||||||
|
require.paths = [];
|
||||||
|
require.cache = {}; // cache module exports. Like: {id: exported}
|
||||||
|
|
||||||
|
/** Given a module id, try to find the path to the associated module.
|
||||||
|
*/
|
||||||
|
require.resolve = function(id) {
|
||||||
|
// TODO: 1. load node core modules
|
||||||
|
|
||||||
|
// 2. dot-relative module id, like './foo/bar'
|
||||||
|
var parts = id.match(/^(\.?\.\/|\/)(.+)$/),
|
||||||
|
isRelative = false,
|
||||||
|
isAbsolute = false,
|
||||||
|
basename = id;
|
||||||
|
|
||||||
|
if (parts) {
|
||||||
|
isRelative = parts[1] === './' || parts[1] === '../';
|
||||||
|
isAbsolute = parts[1] === '/';
|
||||||
|
basename = parts[2];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof basename !== 'undefined') {
|
||||||
|
|
||||||
|
if (isAbsolute) {
|
||||||
|
rootedId = id;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
var root = (isRelative? toDir(require._root[0] || '.') : '.'),
|
||||||
|
rootedId = (root + '/' + id).replace(/\/[^\/]+\/\.\.\//g, '/').replace(/\/\.\//g, '/'),
|
||||||
|
uri = '';
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( uri = loadAsFile(rootedId) ) { }
|
||||||
|
else if ( uri = loadAsDir(rootedId) ) { }
|
||||||
|
else if ( uri = loadNodeModules(rootedId) ) { }
|
||||||
|
else if ( uri = nodeModulesPaths(rootedId, 'rhino_modules') ) { }
|
||||||
|
else if ( uri = nodeModulesPaths(rootedId, 'node_modules') ) { }
|
||||||
|
|
||||||
|
if (uri !== '') return toAbsolute(uri);
|
||||||
|
|
||||||
|
throw 'Require Error: Not found.';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Given a path, return the base directory of that path.
|
||||||
|
@example toDir('/foo/bar/somefile.js'); => '/foo/bar'
|
||||||
|
*/
|
||||||
|
function toDir(path) {
|
||||||
|
var file = new java.io.File(path);
|
||||||
|
|
||||||
|
if (file.isDirectory()) {
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
|
||||||
|
var parts = path.split(/[\\\/]/);
|
||||||
|
parts.pop();
|
||||||
|
|
||||||
|
return parts.join('/');
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Returns true if the given path exists and is a file.
|
||||||
|
*/
|
||||||
|
function isFile(path) {
|
||||||
|
var file = new java.io.File(path);
|
||||||
|
|
||||||
|
if (file.isFile()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Returns true if the given path exists and is a directory.
|
||||||
|
*/
|
||||||
|
function isDir(path) {
|
||||||
|
var file = new java.io.File(path);
|
||||||
|
|
||||||
|
if (file.isDirectory()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get the path of the current working directory
|
||||||
|
*/
|
||||||
|
function getCwd() {
|
||||||
|
return toDir( ''+new java.io.File('.').getAbsolutePath() ).replace(/\/\.$/, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
function toAbsolute(relPath) {
|
||||||
|
absPath = ''+new java.io.File(relPath).getAbsolutePath();
|
||||||
|
absPath = absPath.replace(/\/[^\/]+\/\.\.\//g, '/').replace(/\/\.\//g, '/');
|
||||||
|
return absPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Assume the id is a file, try to find it.
|
||||||
|
*/
|
||||||
|
function loadAsFile(id) {
|
||||||
|
if ( isFile(id) ) { return id; }
|
||||||
|
|
||||||
|
if ( isFile(id+'.js') ) { return id+'.js'; }
|
||||||
|
|
||||||
|
if ( isFile(id+'.node') ) { throw 'Require Error: .node files not supported'; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Assume the id is a directory, try to find a module file within it.
|
||||||
|
*/
|
||||||
|
function loadAsDir(id) {
|
||||||
|
if (!isDir(id)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// look for the "main" property of the package.json file
|
||||||
|
if ( isFile(id+'/package.json') ) {
|
||||||
|
var packageJson = readFileSync(id+'/package.json', 'utf-8');
|
||||||
|
eval( 'packageJson = '+ packageJson);
|
||||||
|
if (packageJson.hasOwnProperty('main')) {
|
||||||
|
var main = (id + '/' + packageJson.main).replace(/\/\.?\//g, '/');
|
||||||
|
return require.resolve(main);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( isFile(id+'/index.js') ) {
|
||||||
|
return id+'/index.js';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function loadNodeModules(id) {
|
||||||
|
var path,
|
||||||
|
uri;
|
||||||
|
for (var i = 0, len = require.paths.length; i < len; i++) {
|
||||||
|
path = require.paths[i];
|
||||||
|
if (isDir(path)) {
|
||||||
|
path = (path + '/' + id).replace(/\/\.?\//g, '/');
|
||||||
|
|
||||||
|
uri = loadAsFile(path);
|
||||||
|
if (typeof uri !== 'undefined') {
|
||||||
|
return uri;
|
||||||
|
}
|
||||||
|
|
||||||
|
uri = loadAsDir(path);
|
||||||
|
if (typeof uri !== 'undefined') {
|
||||||
|
return uri;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function nodeModulesPaths(id, moduleFolder) {
|
||||||
|
var cwd = getCwd(),
|
||||||
|
dirs = cwd.split('/'),
|
||||||
|
dir,
|
||||||
|
path,
|
||||||
|
filename,
|
||||||
|
uri;
|
||||||
|
|
||||||
|
while (dirs.length) {
|
||||||
|
dir = dirs.join('/');
|
||||||
|
path = dir+'/'+moduleFolder;
|
||||||
|
|
||||||
|
if ( isDir(path) ) {
|
||||||
|
filename = (path+'/'+id).replace(/\/\.?\//g, '/');
|
||||||
|
|
||||||
|
if ( uri = loadAsFile(filename) ) {
|
||||||
|
uri = uri.replace(cwd, '.');
|
||||||
|
return uri;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( uri = loadAsDir(filename) ) {
|
||||||
|
uri = uri.replace(cwd, '.');
|
||||||
|
return uri;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dirs.pop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function readFileSync(filename, encoding, callback) {
|
||||||
|
if (typeof arguments[1] === 'function') {
|
||||||
|
encoding = null;
|
||||||
|
callback = arguments[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
encoding = encoding || java.lang.System.getProperty('file.encoding');
|
||||||
|
|
||||||
|
try {
|
||||||
|
var content = new java.util.Scanner(
|
||||||
|
new java.io.File(filename),
|
||||||
|
encoding
|
||||||
|
).useDelimiter("\\Z");
|
||||||
|
|
||||||
|
return String( content.next() );
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
})();
|
||||||
69
lib/rhino-shim.js
Normal file
69
lib/rhino-shim.js
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
global = this;
|
||||||
|
|
||||||
|
load('lib/Rhino-Require/src/require.js');
|
||||||
|
|
||||||
|
(function(rhinoArgs) {
|
||||||
|
var dumper;
|
||||||
|
global.console = {
|
||||||
|
log: function(/*...*/) {
|
||||||
|
var args = Array.prototype.slice.call(arguments, 0),
|
||||||
|
dumper = dumper || require('jsdoc/util/dumper');
|
||||||
|
|
||||||
|
for (var i = 0, len = args.length; i < len; i++) {
|
||||||
|
if (typeof args[i] !== 'string') {
|
||||||
|
args[i] = dumper.dump(args[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
print( args.join(' ') );
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
global.process = {
|
||||||
|
exit: function(n) {
|
||||||
|
n = n || 0;
|
||||||
|
java.lang.System.exit(n);
|
||||||
|
},
|
||||||
|
argv: [null, null].concat( Array.prototype.slice.call(rhinoArgs, 0) )
|
||||||
|
};
|
||||||
|
|
||||||
|
if (typeof JSON === 'undefined') { // JSON is defined in Rhino 1.7+
|
||||||
|
load('lib/json.js');
|
||||||
|
global.JSON = {
|
||||||
|
parse: function(raw) {
|
||||||
|
return jsonParse(raw);
|
||||||
|
},
|
||||||
|
stringify: function(o) {
|
||||||
|
return ''+ o;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
(function () {
|
||||||
|
var counter = 1;
|
||||||
|
var timeoutIds = {};
|
||||||
|
|
||||||
|
global.setTimeout = function(fn, delay) {
|
||||||
|
var id = counter++,
|
||||||
|
timer = new java.util.Timer();
|
||||||
|
|
||||||
|
timeoutIds[id] = [
|
||||||
|
new JavaAdapter(java.util.TimerTask,{run: function(){fn(); timer.purge(); timer.cancel();}}),
|
||||||
|
timer
|
||||||
|
];
|
||||||
|
|
||||||
|
timer.schedule(timeoutIds[id][0], delay);
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
global.clearTimeout = function(id) {
|
||||||
|
if (typeof timeoutIds[id] !== 'undefined') {
|
||||||
|
timeoutIds[id][0].cancel();
|
||||||
|
timeoutIds[id][1].purge();
|
||||||
|
timeoutIds[id][1].cancel();
|
||||||
|
delete timeoutIds[id];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
|
})(arguments);
|
||||||
1
node_modules/.bin/nodeunit
generated
vendored
Symbolic link
1
node_modules/.bin/nodeunit
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../nodeunit/bin/nodeunit
|
||||||
140
node_modules/jsdoc/argparser.js
generated
vendored
Normal file
140
node_modules/jsdoc/argparser.js
generated
vendored
Normal file
@ -0,0 +1,140 @@
|
|||||||
|
/**
|
||||||
|
Parse the command line arguments.
|
||||||
|
@module jsdoc/argparser
|
||||||
|
@author Michael Mathews <micmath@gmail.com>
|
||||||
|
@license Apache License 2.0 - See file 'LICENSE.md' in this project.
|
||||||
|
*/
|
||||||
|
(function() {
|
||||||
|
|
||||||
|
/**
|
||||||
|
Create an instance of the parser.
|
||||||
|
@classdesc A parser to interpret the key value pairs entered on the command
|
||||||
|
line.
|
||||||
|
@constructor
|
||||||
|
*/
|
||||||
|
exports.ArgParser = function() {
|
||||||
|
this._options = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.ArgParser.prototype._getOptionByShortName = function(name) {
|
||||||
|
for (var i = this._options.length; i--;) {
|
||||||
|
if (this._options[i].shortName === name) { return this._options[i]; }
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.ArgParser.prototype._getOptionByLongName = function(name) {
|
||||||
|
for (var i = this._options.length; i--;) {
|
||||||
|
if (this._options[i].longName === name) { return this._options[i]; }
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provide information about a legal option.
|
||||||
|
* @param {character} shortName The short name of the option, entered like: -T.
|
||||||
|
* @param {string} longName The equivalent long name of the option, entered like: --test.
|
||||||
|
* @param {boolean} hasValue Does this option require a value? Like: -t templatename
|
||||||
|
* @param {string} helpText
|
||||||
|
* @example
|
||||||
|
* myParser.addOption('t', 'template', true, 'The path to the template.');
|
||||||
|
* myParser.addOption('h', 'help', false, 'Show the help message.');
|
||||||
|
*/
|
||||||
|
exports.ArgParser.prototype.addOption = function(shortName, longName, hasValue, helpText) {
|
||||||
|
this._options.push({shortName: shortName, longName: longName, hasValue: hasValue, helpText: helpText});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
Generate a summary of all the options with corresponding help text.
|
||||||
|
@returns {string}
|
||||||
|
*/
|
||||||
|
exports.ArgParser.prototype.help = function() {
|
||||||
|
var help = 'OPTIONS:\n',
|
||||||
|
option;
|
||||||
|
|
||||||
|
for (var i = 0, leni = this._options.length; i < leni; i++) {
|
||||||
|
option = this._options[i];
|
||||||
|
|
||||||
|
if (option.shortName) {
|
||||||
|
help += '-' + option.shortName + (option.longName? ' or ' : '');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (option.longName) {
|
||||||
|
help += '--' + option.longName;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (option.hasValue) {
|
||||||
|
help += ' <value>';
|
||||||
|
}
|
||||||
|
|
||||||
|
help += ' ' + option.helpText + '\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
return help;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
Get the options.
|
||||||
|
@param {Array.<string>} args An array, like ['-x', 'hello']
|
||||||
|
@param {Object} [defaults={}] An optional collection of default values.
|
||||||
|
@returns {Object} The keys will be the longNames, or the shortName if
|
||||||
|
no longName is defined for that option. The values will be the values
|
||||||
|
provided, or `true` if the option accepts no value.
|
||||||
|
*/
|
||||||
|
exports.ArgParser.prototype.parse = function(args, defaults) {
|
||||||
|
var result = defaults || {};
|
||||||
|
|
||||||
|
result._ = [];
|
||||||
|
|
||||||
|
for (var i = 0, leni = args.length; i < leni; i++) {
|
||||||
|
var arg = '' + args[i],
|
||||||
|
next = (i < leni-1)? '' + args[i+1] : null,
|
||||||
|
option,
|
||||||
|
shortName,
|
||||||
|
longName,
|
||||||
|
name,
|
||||||
|
value = null;
|
||||||
|
|
||||||
|
// like -t
|
||||||
|
if (arg.charAt(0) === '-') {
|
||||||
|
|
||||||
|
// like: --template
|
||||||
|
if (arg.charAt(1) === '-') {
|
||||||
|
name = longName = arg.slice(2);
|
||||||
|
option = this._getOptionByLongName(longName);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
name = shortName = arg.slice(1);
|
||||||
|
option = this._getOptionByShortName(shortName);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (option === null) {
|
||||||
|
throw new Error( 'Unknown command line option found: ' + name );
|
||||||
|
}
|
||||||
|
|
||||||
|
if (option.hasValue) {
|
||||||
|
value = next;
|
||||||
|
i++;
|
||||||
|
|
||||||
|
if (value === null || value.charAt(0) === '-') {
|
||||||
|
throw new Error( 'Command line option requires a value: ' + name );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
value = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (option.longName && shortName) {
|
||||||
|
name = option.longName;
|
||||||
|
}
|
||||||
|
|
||||||
|
result[name] = value;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
result._.push(arg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
})();
|
||||||
161
node_modules/jsdoc/doclet.js
generated
vendored
Normal file
161
node_modules/jsdoc/doclet.js
generated
vendored
Normal file
@ -0,0 +1,161 @@
|
|||||||
|
(function() {
|
||||||
|
var plugin = require('jsdoc/plugin');
|
||||||
|
|
||||||
|
exports.Doclet = Doclet;
|
||||||
|
function Doclet(jsdoc, meta) {
|
||||||
|
if (jsdoc !== '') {
|
||||||
|
this.tags = this.parseTags(jsdoc);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.tags = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
plugin.manager.run('doclet', [this]);
|
||||||
|
}
|
||||||
|
|
||||||
|
Doclet.prototype.parseTags = function(commentSrc) {
|
||||||
|
// tags have [title, type, pname, text]
|
||||||
|
|
||||||
|
commentSrc = fixDescription( trim(commentSrc) );
|
||||||
|
this.src = commentSrc;
|
||||||
|
var tags = splitTags(commentSrc),
|
||||||
|
tag;
|
||||||
|
|
||||||
|
for (var i = 0, len = tags.length; i < len; i++) {
|
||||||
|
tag = tags[i];
|
||||||
|
if ( dict.hasOwnProperty(tag.title) ) {
|
||||||
|
var def = dict[tag.title];
|
||||||
|
if (typeof def.onTag === 'function') {
|
||||||
|
def.onTag(tag, this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return tags;
|
||||||
|
}
|
||||||
|
|
||||||
|
var dict = {};
|
||||||
|
dict.name = {
|
||||||
|
has: ['value'],
|
||||||
|
title: 'name',
|
||||||
|
onTag: function(tag, doc) {
|
||||||
|
tag.text = trim(tag.text);
|
||||||
|
if (tag.text) doc.name = tag.text;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dict.description = {
|
||||||
|
has: ['value'],
|
||||||
|
title: 'description',
|
||||||
|
onTag: function(tag, doc) {
|
||||||
|
tag.text = trim(tag.text);
|
||||||
|
doc.description = tag.text;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dict.desc = dict.description;
|
||||||
|
|
||||||
|
dict['var'] = {
|
||||||
|
has: ['value'],
|
||||||
|
title: 'var',
|
||||||
|
onTag: function(tag, doc) {
|
||||||
|
doc.kind = 'var';
|
||||||
|
dict.name.onTag(tag, doc); // trimmed here
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
dict['example'] = {
|
||||||
|
has: ['value'],
|
||||||
|
title: 'example',
|
||||||
|
onTag: function(tag, doc) {
|
||||||
|
// do not trim, whitespace matters in examples
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
dict['type'] = {
|
||||||
|
has: ['value'],
|
||||||
|
title: 'type',
|
||||||
|
onTag: function(tag, doc) {
|
||||||
|
tag.text = trim(tag.text);
|
||||||
|
if ( /\{(.+?)\}/.test(tag.text) ) {
|
||||||
|
tag.text = trim(RegExp.$1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
function trim(text, newlines) {
|
||||||
|
if (!text) { return ''; }
|
||||||
|
|
||||||
|
if (newlines) {
|
||||||
|
return text.replace(/^[\n\r\f]+|[\n\r\f]+$/g, '');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return text.replace(/^\s+|\s+$/g, '');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
Given the raw source of a jsdoc comment, splits it up into individual tags.
|
||||||
|
@returns An array of tags, like: [{title, text}], where the original src
|
||||||
|
would be like: "@title text"
|
||||||
|
*/
|
||||||
|
function parseParamText(tagSrc) {
|
||||||
|
var pname, pdesc, poptional, pdefault;
|
||||||
|
|
||||||
|
// like: pname, pname pdesc, or name - pdesc
|
||||||
|
tagSrc.match(/^(\[[^\]]+\]|\S+)((?:\s*\-\s*|\s+)(\S[\s\S]*))?$/);
|
||||||
|
pname = RegExp.$1;
|
||||||
|
pdesc = RegExp.$3;
|
||||||
|
|
||||||
|
if ( /^\[\s*(.+?)\s*\]$/.test(pname) ) {
|
||||||
|
pname = RegExp.$1;
|
||||||
|
poptional = true;
|
||||||
|
|
||||||
|
if ( /^(.+?)\s*=\s*(.+)$/.test(pname) ) {
|
||||||
|
pname = RegExp.$1;
|
||||||
|
pdefault = RegExp.$2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return [pname, pdesc, poptional, pdefault];
|
||||||
|
}
|
||||||
|
|
||||||
|
function fixDescription(docletSrc) {
|
||||||
|
if (docletSrc && !/^\s*@/.test(docletSrc)) {
|
||||||
|
docletSrc = '@description ' + docletSrc;
|
||||||
|
}
|
||||||
|
return docletSrc;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
Given the raw source of a jsdoc comment, splits it up into individual tags.
|
||||||
|
@returns An array of tags, like: [{title, text}], where the original src
|
||||||
|
would be like: "@title text"
|
||||||
|
*/
|
||||||
|
function splitTags(docletSrc) {
|
||||||
|
var tagSrcs = [];
|
||||||
|
|
||||||
|
// split out the basic tags, keep surrounding whitespace
|
||||||
|
// like: @tagTitle tagBody
|
||||||
|
docletSrc
|
||||||
|
.replace(/^(\s*)@(\S)/gm, '$1\\@$2') // replace splitter ats with an arbitrary sequence
|
||||||
|
.split('\\@') // then split on that arbitrary sequence
|
||||||
|
.forEach(function($) {
|
||||||
|
if ($) {
|
||||||
|
var parsedTag = $.match(/^(\S+)(:?\s+(\S[\s\S]*))?/);
|
||||||
|
|
||||||
|
if (parsedTag) {
|
||||||
|
var tagTitle = parsedTag[1],
|
||||||
|
tagText = parsedTag[2];
|
||||||
|
|
||||||
|
if (tagTitle) {
|
||||||
|
tagSrcs.push({
|
||||||
|
title: tagTitle.toLowerCase(),
|
||||||
|
text: tagText || ''
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return tagSrcs;
|
||||||
|
}
|
||||||
|
})();
|
||||||
71
node_modules/jsdoc/options.js
generated
vendored
Normal file
71
node_modules/jsdoc/options.js
generated
vendored
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
/**
|
||||||
|
@module jsdoc/options
|
||||||
|
@requires jsdoc/argparser
|
||||||
|
@author Michael Mathews <micmath@gmail.com>
|
||||||
|
@license Apache License 2.0 - See file 'LICENSE.md' in this project.
|
||||||
|
*/
|
||||||
|
(function() {
|
||||||
|
var argv = require('jsdoc/argparser');
|
||||||
|
|
||||||
|
var argParser = new argv.ArgParser(),
|
||||||
|
ourOptions,
|
||||||
|
defaults = {
|
||||||
|
template: 'default',
|
||||||
|
destination: './out/'
|
||||||
|
};
|
||||||
|
|
||||||
|
argParser.addOption('t', 'template', true, 'The name of the template to use. Default: the "default" template');
|
||||||
|
argParser.addOption('c', 'configure', true, 'The path to the configuration file. Default: jsdoc basedir + conf.json');
|
||||||
|
argParser.addOption('e', 'encoding', true, 'Assume this encoding when reading all source files. Default: your system default encoding');
|
||||||
|
argParser.addOption('d', 'destination', true, 'The path to the output folder. Use "console" to dump data to the console. Default: console');
|
||||||
|
argParser.addOption('V', 'validate', false, 'Validate the results produced by parsing the source code.');
|
||||||
|
argParser.addOption('r', 'recurse', false, 'Recurse into subdirectories when scanning for source code files.');
|
||||||
|
argParser.addOption('h', 'help', false, 'Print this message and quit.');
|
||||||
|
argParser.addOption('X', 'expel', false, 'Dump all found doclet internals to console and quit.');
|
||||||
|
argParser.addOption('q', 'query', true, 'Provide a querystring to define custom variable names/values to add to the options hash.');
|
||||||
|
|
||||||
|
|
||||||
|
// TODO [-R, recurseonly] = a number representing the depth to recurse
|
||||||
|
// TODO [-f, filter] = a regex to filter on <-- this can be better defined in the configs?
|
||||||
|
|
||||||
|
/**
|
||||||
|
Set the options for this app.
|
||||||
|
@throws {Error} Illegal arguments will throw errors.
|
||||||
|
@param {string|String[]} args The command line arguments for this app.
|
||||||
|
*/
|
||||||
|
exports.parse = function(args) {
|
||||||
|
args = args || [];
|
||||||
|
|
||||||
|
if (typeof args === 'string' || args.constructor === String) {
|
||||||
|
args = (''+args).split(/\s+/g);
|
||||||
|
}
|
||||||
|
|
||||||
|
ourOptions = argParser.parse(args, defaults);
|
||||||
|
|
||||||
|
return ourOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
Display help message for options.
|
||||||
|
*/
|
||||||
|
exports.help = function() {
|
||||||
|
return argParser.help();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
Get a named option.
|
||||||
|
@param {string} name The name of the option.
|
||||||
|
@return {string} The value associated with the given name.
|
||||||
|
*//**
|
||||||
|
Get all the options for this app.
|
||||||
|
@return {Object} A collection of key/values representing all the options.
|
||||||
|
*/
|
||||||
|
exports.get = function(name) {
|
||||||
|
if (typeof name === 'undefined') {
|
||||||
|
return ourOptions;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return ourOptions[name];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})();
|
||||||
136
node_modules/jsdoc/parser.js
generated
vendored
Normal file
136
node_modules/jsdoc/parser.js
generated
vendored
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
(function() {
|
||||||
|
|
||||||
|
var Narcissus = require('narcissus').Narcissus,
|
||||||
|
Doclet = require('jsdoc/doclet').Doclet;
|
||||||
|
|
||||||
|
|
||||||
|
// handle orphaned doclets, associated with no code token
|
||||||
|
Narcissus.parser.Tokenizer.prototype.onJsDoc = function(doclet, meta) {
|
||||||
|
handle.JSDOC(doclet);
|
||||||
|
}
|
||||||
|
|
||||||
|
var path,
|
||||||
|
symbols = 'wtf';
|
||||||
|
|
||||||
|
exports.parse = function(src) {
|
||||||
|
symbols = [];
|
||||||
|
|
||||||
|
// may call handle.JSDOC
|
||||||
|
var ast = Narcissus.parser.parse(src, '', 1);
|
||||||
|
//console.log(String(ast));
|
||||||
|
path = [];
|
||||||
|
|
||||||
|
walk(ast.children);
|
||||||
|
|
||||||
|
return symbols;
|
||||||
|
}
|
||||||
|
|
||||||
|
function defined(o) {
|
||||||
|
return typeof o !== 'undefined';
|
||||||
|
}
|
||||||
|
|
||||||
|
function typeToName(type) {
|
||||||
|
var name;
|
||||||
|
if (name = typeToName.types[type]) {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
typeToName.types = ['END', 'NEWLINE', 'SEMICOLON', 'COMMA', 'ASSIGN', 'HOOK', 'COLON', 'CONDITIONAL', 'OR', 'AND', 'BITWISE_OR', 'BITWISE_XOR', 'BITWISE_AND', 'EQ', 'NE', 'STRICT_EQ', 'STRICT_NE', 'LT', 'LE', 'GE', 'GT', 'LSH', 'RSH', 'URSH', 'PLUS', 'MINUS', 'MUL', 'DIV', 'MOD', 'NOT', 'BITWISE_NOT', 'UNARY_PLUS', 'UNARY_MINUS', 'INCREMENT', 'DECREMENT', 'DOT', 'LEFT_BRACKET', 'RIGHT_BRACKET', 'LEFT_CURLY', 'RIGHT_CURLY', 'LEFT_PAREN', 'RIGHT_PAREN', 'SCRIPT', 'BLOCK', 'LABEL', 'FOR_IN', 'CALL', 'NEW_WITH_ARGS', 'INDEX', 'ARRAY_INIT', 'OBJECT_INIT', 'PROPERTY_INIT', 'GETTER', 'SETTER', 'GROUP', 'LIST', 'LET_BLOCK', 'ARRAY_COMP', 'GENERATOR', 'COMP_TAIL', 'IDENTIFIER', 'NUMBER', 'STRING', 'REGEXP', 'BREAK', 'CASE', 'CATCH', 'CONST', 'CONTINUE', 'DEBUGGER', 'DEFAULT', 'DELETE', 'DO', 'ELSE', 'EXPORT', 'FALSE', 'FINALLY', 'FOR', 'FUNCTION', 'IF', 'IMPORT', 'IN', 'INSTANCEOF', 'LET', 'MODULE', 'NEW', 'NULL', 'RETURN', 'SWITCH', 'THIS', 'THROW', 'TRUE', 'TRY', 'TYPEOF', 'VAR', 'VOID', 'YIELD', 'WHILE', 'WITH'];
|
||||||
|
|
||||||
|
var handle = {
|
||||||
|
JSDOC: function(jsdoc) {
|
||||||
|
//console.log( '>>> jsdoc "'+jsdoc+'"' );
|
||||||
|
symbols.push({longname: null, jsdoc: new Doclet(jsdoc)});
|
||||||
|
},
|
||||||
|
VAR: function(node) {
|
||||||
|
var child = null,
|
||||||
|
varDoc = defined(node.jsdoc)? node.jsdoc : '';
|
||||||
|
|
||||||
|
while ( child = node.children.shift() ) {
|
||||||
|
if (varDoc) {
|
||||||
|
child.jsdoc = varDoc;
|
||||||
|
varDoc = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var namePath = path.join('') + (path.length?'~':'') + child.name;
|
||||||
|
symbols.push({longname: namePath, jsdoc: defined(child.jsdoc)? new Doclet(child.jsdoc) : new Doclet('')});
|
||||||
|
//console.log( '>>> variable '+namePath+(defined(child.jsdoc)? ' "'+child.jsdoc+'"' : '') );
|
||||||
|
var children = walkable(child);
|
||||||
|
if (children) { walk(children); }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
FUNCTION: function(node) {
|
||||||
|
var namePath = path.join('') + (path.length?'~':'') + node.name,
|
||||||
|
jsdoc = defined(node.jsdoc)? node.jsdoc : '';
|
||||||
|
|
||||||
|
symbols.push({longname: namePath, jsdoc: new Doclet(jsdoc)});
|
||||||
|
//console.log( '>>> function '+namePath+(defined(node.jsdoc)? ' "'+node.jsdoc+'"' : '') );
|
||||||
|
path.push((path.length?'~':'')+node.name);
|
||||||
|
walk(node.body.children);
|
||||||
|
path.pop();
|
||||||
|
},
|
||||||
|
OBJECT_INIT: function(node) {
|
||||||
|
console.log( '>>> TODO object literal '+node.name+' '+node.jsdoc);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
function walk(ast) {
|
||||||
|
//console.log('walk('+ast.length+')');
|
||||||
|
var node = null,
|
||||||
|
nodeType = -1;
|
||||||
|
|
||||||
|
while (node = ast.shift()) {
|
||||||
|
nodeType = typeToName(node.type);
|
||||||
|
//console.log('~>>>> nodeType: '+nodeType+' '+node.name);
|
||||||
|
if (handle.hasOwnProperty(nodeType) && typeof handle[nodeType] === 'function') {
|
||||||
|
handle[nodeType](node);
|
||||||
|
}
|
||||||
|
else if (node.jsdoc) {
|
||||||
|
handle.JSDOC(node.jsdoc);
|
||||||
|
}
|
||||||
|
|
||||||
|
var children = walkable(node);
|
||||||
|
if (children) { walk(children); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function walkable(node) {
|
||||||
|
//console.log('>>> getting walkables of: '+typeToName(node.type)+' '+node.name);
|
||||||
|
if ( defined(node.body) && defined(node.body.children) && node.body.children !== null) {
|
||||||
|
//console.log('- has body/children');
|
||||||
|
return node.body.children;
|
||||||
|
}
|
||||||
|
if ( defined(node.expression) && node.expression && defined(node.expression.children) && node.expression.children ) {
|
||||||
|
//console.log('- has expression/children');
|
||||||
|
return node.expression.children;
|
||||||
|
}
|
||||||
|
// like: foo = function(){}
|
||||||
|
if ( defined(node.initializer)) {
|
||||||
|
//console.log('- has initializer');
|
||||||
|
if (typeToName(node.initializer.type) === 'FUNCTION') {
|
||||||
|
//console.log('- has function value');
|
||||||
|
path.push('~'+node.name);
|
||||||
|
return(node.initializer.body.children);
|
||||||
|
}
|
||||||
|
else if (typeToName(node.initializer.type) === 'OBJECT_INIT') {
|
||||||
|
//console.log('- has object value');
|
||||||
|
path.push('~'+node.name);
|
||||||
|
return(node.initializer.children);
|
||||||
|
}
|
||||||
|
// like foo = /**doc*/{};
|
||||||
|
else if (node.initializer.jsdoc) {
|
||||||
|
handle.JSDOC(node.initializer.jsdoc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ( defined(node.children) ) {
|
||||||
|
//console.log('- has children '+node.children.length);
|
||||||
|
return node.children;
|
||||||
|
}
|
||||||
|
//console.log('nothing to walk');
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
})();
|
||||||
28
node_modules/jsdoc/plugin.js
generated
vendored
Normal file
28
node_modules/jsdoc/plugin.js
generated
vendored
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
if (typeof global.pluginRegistry === 'undefined') {
|
||||||
|
global.pluginRegistry = {};
|
||||||
|
}
|
||||||
|
registry = global.pluginRegistry;
|
||||||
|
|
||||||
|
exports.manager = manager = {
|
||||||
|
on: function(name, callback) {
|
||||||
|
//console.log('on '+name);
|
||||||
|
if (!registry.hasOwnProperty(name)) {
|
||||||
|
registry[name] = [];
|
||||||
|
}
|
||||||
|
registry[name].push(callback);
|
||||||
|
//console.log('on registry '+registry.toSource());
|
||||||
|
},
|
||||||
|
run: function(name, args) {
|
||||||
|
//console.log('run '+name);
|
||||||
|
//consoleconsole.log('run registry '+registry.toSource());
|
||||||
|
if (registry.hasOwnProperty(name)) {
|
||||||
|
var callbacks = registry[name];
|
||||||
|
//console.log('callbacks '+callbacks.length);
|
||||||
|
for (var i = 0, len = callbacks.length; i < len; i++) {
|
||||||
|
callbacks[i].apply(manager, args);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
158
node_modules/jsdoc/util/dumper.js
generated
vendored
Normal file
158
node_modules/jsdoc/util/dumper.js
generated
vendored
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
/**
|
||||||
|
Recursively print out all names and values in a data structure.
|
||||||
|
@module common/dumper
|
||||||
|
@author Michael Mathews <micmath@gmail.com>
|
||||||
|
@license Apache License 2.0 - See file 'LICENSE.md' in this project.
|
||||||
|
*/
|
||||||
|
(function() {
|
||||||
|
/**
|
||||||
|
@param {any} object
|
||||||
|
*/
|
||||||
|
exports.dump = function(object) {
|
||||||
|
indentBy = 0;
|
||||||
|
output = '';
|
||||||
|
|
||||||
|
walk(object);
|
||||||
|
outdent(false);
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
|
const INDENTATION = ' '; // 4 spaces
|
||||||
|
var indentBy,
|
||||||
|
output;
|
||||||
|
|
||||||
|
function pad(depth) {
|
||||||
|
var padding = '';
|
||||||
|
while (depth--) {
|
||||||
|
padding += INDENTATION;
|
||||||
|
}
|
||||||
|
return padding;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
@param {string} openingBrace - The opening brace to add, like "{".
|
||||||
|
@private
|
||||||
|
@inner
|
||||||
|
@memberof module:common/dumper
|
||||||
|
*/
|
||||||
|
function indent(openingBrace) {
|
||||||
|
indentBy++;
|
||||||
|
if (openingBrace) output += openingBrace + '\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
@param {string|boolean} closingBrace - The closing brace to add, like "}" or if boolean
|
||||||
|
`false` no closing brace or trailing newline.
|
||||||
|
@private
|
||||||
|
@inner
|
||||||
|
@memberof module:common/dumper
|
||||||
|
*/
|
||||||
|
function outdent(closingBrace) {
|
||||||
|
indentBy--;
|
||||||
|
output = output.replace(/,\n$/, '\n'); // trim trailing comma
|
||||||
|
if (closingBrace === false) { output = output.replace(/\n$/, ''); }
|
||||||
|
else if (closingBrace) output += pad(indentBy) + closingBrace + ',\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
var seen = [];
|
||||||
|
seen.has = function(object) {
|
||||||
|
for (var i = 0, l = seen.length; i < l; i++) {
|
||||||
|
if (seen[i] === object) { return true; }
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function walk(object) {
|
||||||
|
var value;
|
||||||
|
|
||||||
|
if ( value = getValue(object) ) {
|
||||||
|
output += value + ',\n';
|
||||||
|
}
|
||||||
|
else if ( isUnwalkable(object) ) {
|
||||||
|
output += '<Object>,\n'
|
||||||
|
}
|
||||||
|
else if ( isRegExp(object) ) {
|
||||||
|
output += '<RegExp ' + object + '>,\n'
|
||||||
|
}
|
||||||
|
else if ( isDate(object) ) {
|
||||||
|
output += '<Date ' + object.toUTCString() + '>,\n'
|
||||||
|
}
|
||||||
|
else if ( isFunction(object) ) {
|
||||||
|
output += '<Function' + (object.name? ' '+ object.name : '') + '>,\n';
|
||||||
|
}
|
||||||
|
else if ( isArray(object) ) {
|
||||||
|
if ( seen.has(object) ) {
|
||||||
|
output += '<CircularRef>,\n';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
seen.push(object);
|
||||||
|
}
|
||||||
|
|
||||||
|
indent('[');
|
||||||
|
for (var i = 0, leni = object.length; i < leni; i++) {
|
||||||
|
output += pad(indentBy); // + i + ': ';
|
||||||
|
walk( object[i] );
|
||||||
|
}
|
||||||
|
outdent(']');
|
||||||
|
}
|
||||||
|
else if ( isObject(object) ) {
|
||||||
|
if ( seen.has(object) ) {
|
||||||
|
output += '<CircularRef>,\n';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
seen.push(object);
|
||||||
|
}
|
||||||
|
|
||||||
|
indent('{');
|
||||||
|
for (var p in object) {
|
||||||
|
if ( object.hasOwnProperty(p) ) {
|
||||||
|
output += pad(indentBy) + stringify(p) + ': ';
|
||||||
|
walk( object[p] );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
outdent('}');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getValue(o) { // see: https://developer.mozilla.org/en/JavaScript/Reference/Operators/Special/typeof
|
||||||
|
if (o === null) { return 'null'; }
|
||||||
|
if ( /^(string|boolean|number|undefined)$/.test(typeof o) ) {
|
||||||
|
return ''+stringify(o);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function stringify(o) {
|
||||||
|
return JSON.stringify(o);
|
||||||
|
}
|
||||||
|
|
||||||
|
function isUnwalkable(o) { // some objects are unwalkable, like Java native objects
|
||||||
|
return (typeof o === 'object' && typeof o.constructor === 'undefined');
|
||||||
|
}
|
||||||
|
|
||||||
|
function isArray(o) {
|
||||||
|
return o && (o instanceof Array) || o.constructor === Array;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isRegExp(o) {
|
||||||
|
return (o instanceof RegExp) ||
|
||||||
|
(typeof o.constructor !== 'undefined' && o.constructor.name === 'RegExp');
|
||||||
|
}
|
||||||
|
|
||||||
|
function isDate(o) {
|
||||||
|
return o && (o instanceof Date) ||
|
||||||
|
(typeof o.constructor !== 'undefined' && o.constructor.name === 'Date');
|
||||||
|
}
|
||||||
|
|
||||||
|
function isFunction(o) {
|
||||||
|
return o && (typeof o === 'function' || o instanceof Function);// ||
|
||||||
|
//(typeof o.constructor !== 'undefined' && (o.constructor||{}).name === 'Function');
|
||||||
|
}
|
||||||
|
|
||||||
|
function isObject(o) {
|
||||||
|
return o && o instanceof Object ||
|
||||||
|
(typeof o.constructor !== 'undefined' && o.constructor.name === 'Object');
|
||||||
|
}
|
||||||
|
|
||||||
|
})();
|
||||||
33
node_modules/jsecma5.js
generated
vendored
Normal file
33
node_modules/jsecma5.js
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
// ECMAScript3 versions of ECMAScript5 constructs used in Narcissus parser
|
||||||
|
// All properties will be writable, configurable and enumerable, no matter
|
||||||
|
// the descriptor. Descriptor get/set is also ignored.
|
||||||
|
|
||||||
|
(function() {
|
||||||
|
if (Object.defineProperty === undefined) {
|
||||||
|
Object.defineProperty = function(obj, prop, descriptor) {
|
||||||
|
obj[prop] = descriptor.value;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Object.defineProperties === undefined) {
|
||||||
|
Object.defineProperties = function(obj, props) {
|
||||||
|
for (var prop in props) {
|
||||||
|
if (props.hasOwnProperty(prop)) {
|
||||||
|
Object.defineProperty(obj, prop, props[prop]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Object.create === undefined) {
|
||||||
|
Object.create = function(obj, props) {
|
||||||
|
function ctor() {}
|
||||||
|
ctor.prototype = obj;
|
||||||
|
var o = new ctor();
|
||||||
|
if (props !== undefined) {
|
||||||
|
Object.defineProperties(o, props);
|
||||||
|
}
|
||||||
|
return o;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
})();
|
||||||
5
node_modules/jsmods.js
generated
vendored
Normal file
5
node_modules/jsmods.js
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
// module keyword is removed so that it parses like any identifier
|
||||||
|
delete Narcissus.definitions.tokens.module;
|
||||||
|
delete Narcissus.definitions.keywords.module;
|
||||||
|
delete Narcissus.definitions.tokenIds.module;
|
||||||
|
/* global */ tkn = Narcissus.definitions.tokenIds;
|
||||||
1
node_modules/markdown/.gitignore
generated
vendored
Normal file
1
node_modules/markdown/.gitignore
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
.*.swp
|
||||||
68
node_modules/markdown/README.markdown
generated
vendored
Normal file
68
node_modules/markdown/README.markdown
generated
vendored
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
markdown-js
|
||||||
|
===========
|
||||||
|
|
||||||
|
Yet another markdown parser, this time for JavaScript. There's a few
|
||||||
|
options that precede this project but they all treat markdown to HTML
|
||||||
|
conversion as a single step process. You pass markdown in and get HTML
|
||||||
|
out, end of story. We had some pretty particular views on how the
|
||||||
|
process should actually look, which include:
|
||||||
|
|
||||||
|
* producing well-formed HTML. This means that em and strong nesting is
|
||||||
|
important, as is the ability to output as both HTML and XHTML
|
||||||
|
|
||||||
|
* having an intermediate representation to allow processing of parsed
|
||||||
|
data (we in fact have two, both [JsonML]: a markdown tree and an
|
||||||
|
HTML tree)
|
||||||
|
|
||||||
|
* being easily extensible to add new dialects without having to
|
||||||
|
rewrite the entire parsing mechanics
|
||||||
|
|
||||||
|
* having a good test suite. The only test suites we could find tested
|
||||||
|
massive blocks of input, and passing depended on outputting the HTML
|
||||||
|
with exactly the same whitespace as the original implementation
|
||||||
|
|
||||||
|
[JsonML]: http://jsonml.org/ "JSON Markup Language"
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
The simple way to use it with CommonJS is:
|
||||||
|
|
||||||
|
var input = "# Heading\n\nParagraph";
|
||||||
|
var output = require( "markdown" ).toHTML( input );
|
||||||
|
print( output );
|
||||||
|
|
||||||
|
If you want more control check out the documentation in
|
||||||
|
[lib/markdown.js] which details all the methods and parameters
|
||||||
|
available (including examples!). One day we'll get the docs generated
|
||||||
|
and hosted somewhere for nicer browsing.
|
||||||
|
|
||||||
|
We're yet to try it out in a browser, though it's high up on our list of
|
||||||
|
things to sort out for this project.
|
||||||
|
|
||||||
|
[lib/markdown.js]: http://github.com/evilstreak/markdown-js/blob/master/lib/markdown.js
|
||||||
|
|
||||||
|
## Intermediate Representation
|
||||||
|
|
||||||
|
Internally the process to convert a chunk of markdown into a chunk of
|
||||||
|
HTML has three steps:
|
||||||
|
|
||||||
|
1. Parse the markdown into a JsonML tree. Any references found in the
|
||||||
|
parsing are stored in the attribute hash of the root node under the
|
||||||
|
key `references`.
|
||||||
|
|
||||||
|
2. Convert the markdown tree into an HTML tree. Rename any nodes that
|
||||||
|
need it (`bulletlist` to `ul` for example) and lookup any references
|
||||||
|
used by links or images. Remove the references attribute once done.
|
||||||
|
|
||||||
|
3. Stringify the HTML tree being careful not to wreck whitespace where
|
||||||
|
whitespace is important (surrounding inline elements for example).
|
||||||
|
|
||||||
|
Each step of this process can be called individually if you need to do
|
||||||
|
some processing or modification of the data at an intermediate stage.
|
||||||
|
For example, you may want to grab a list of all URLs linked to in the
|
||||||
|
document before rendering it to HTML which you could do by recursing
|
||||||
|
through the HTML tree looking for `a` nodes.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Released under the MIT license.
|
||||||
3
node_modules/markdown/lib/index.js
generated
vendored
Normal file
3
node_modules/markdown/lib/index.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
// super simple module for the most common nodejs use case.
|
||||||
|
exports.markdown = require("./markdown");
|
||||||
|
exports.parse = exports.markdown.toHTML;
|
||||||
1452
node_modules/markdown/lib/markdown.js
generated
vendored
Normal file
1452
node_modules/markdown/lib/markdown.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
42
node_modules/markdown/package.json
generated
vendored
Normal file
42
node_modules/markdown/package.json
generated
vendored
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
{
|
||||||
|
"name" : "markdown",
|
||||||
|
"version" : "0.2.1",
|
||||||
|
"description" : "A sensible Markdown parser for javascript",
|
||||||
|
"keywords" : [ "markdown", "text processing", "ast" ],
|
||||||
|
"maintainers" : [
|
||||||
|
{
|
||||||
|
"name" : "Dominic Baggott",
|
||||||
|
"email" : "dominic.baggott@gmail.com",
|
||||||
|
"web" : "http://evilstreak.co.uk"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name" : "Ash Berlin",
|
||||||
|
"email" : "ash_markdownjs@firemirror.com",
|
||||||
|
"web" : "http://ashberlin.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"contributors" : [
|
||||||
|
{
|
||||||
|
"name" : "Dominic Baggott",
|
||||||
|
"email" : "dominic.baggott@gmail.com",
|
||||||
|
"web" : "http://evilstreak.co.uk"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name" : "Ash Berlin",
|
||||||
|
"email" : "ash_markdownjs@firemirror.com",
|
||||||
|
"web" : "http://ashberlin.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"bugs" : "http://github.com/evilstreak/markdown-js/issues",
|
||||||
|
"licenses" : [
|
||||||
|
{
|
||||||
|
"type" : "MIT",
|
||||||
|
"url" : "http://www.opensource.org/licenses/mit-license.php"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"main" : "./lib/index.js",
|
||||||
|
"repository" : {
|
||||||
|
"type" : "git",
|
||||||
|
"url" : "git://github.com/evilstreak/markdown-js.git"
|
||||||
|
}
|
||||||
|
}
|
||||||
5
node_modules/markdown/seed.yml
generated
vendored
Normal file
5
node_modules/markdown/seed.yml
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
---
|
||||||
|
name: markdown-js
|
||||||
|
description: JavaScript implementation of Markdown
|
||||||
|
tags: markdown parser
|
||||||
|
version: 0.1.2
|
||||||
84
node_modules/markdown/test/features.t.js
generated
vendored
Normal file
84
node_modules/markdown/test/features.t.js
generated
vendored
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
const test = require('test'),
|
||||||
|
asserts = test.asserts,
|
||||||
|
fs = require( "fs-base" ),
|
||||||
|
markdown = require( "markdown" ),
|
||||||
|
args = require( "system" ).args.splice( 1 );
|
||||||
|
|
||||||
|
function test_dialect( dialect, features ) {
|
||||||
|
var path = module.resource.resolve( "features" ),
|
||||||
|
dialect_test = exports[ "test_" + dialect ] = {};
|
||||||
|
|
||||||
|
for ( var f in features ) {
|
||||||
|
( function( feature ) {
|
||||||
|
dialect_test[ "test_" + feature ] = function() {
|
||||||
|
var test_path = path + feature + "/";
|
||||||
|
|
||||||
|
// grab all the test files in this feature
|
||||||
|
var tests = fs.list( test_path );
|
||||||
|
|
||||||
|
// filter to only the raw files
|
||||||
|
tests = tests.filter( function( x ) x.match( /\.text$/ ) );
|
||||||
|
|
||||||
|
// remove the extensions
|
||||||
|
tests = tests.map( function( x ) x.replace( /\.text$/, "" ) );
|
||||||
|
|
||||||
|
for ( var t in tests ) {
|
||||||
|
// load the raw text
|
||||||
|
var test_name = tests[ t ].substring( tests[ t ].lastIndexOf( "/" ) + 1 ),
|
||||||
|
text_file = fs.rawOpen( test_path + tests[ t ] + ".text", "r" ),
|
||||||
|
text = text_file.readWhole();
|
||||||
|
text_file.close();
|
||||||
|
|
||||||
|
// load the target output
|
||||||
|
if ( fs.isFile( test_path + tests[ t ] + ".json" ) ) {
|
||||||
|
try {
|
||||||
|
var json_file = fs.rawOpen( test_path + tests[ t ] + ".json", "r" ),
|
||||||
|
json = JSON.parse( json_file.readWhole() );
|
||||||
|
json_file.close();
|
||||||
|
|
||||||
|
var output = markdown.toHTMLTree( text, dialect );
|
||||||
|
asserts.same( output, json, test_name );
|
||||||
|
}
|
||||||
|
catch( e ) {
|
||||||
|
asserts.ok( 0, "Failed with error on " + test_name + ": " + e );
|
||||||
|
if ( e.stack )
|
||||||
|
asserts.diag( e.stack );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
asserts.ok( 0, "No target output for " + test_name );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} )( features[ f ] );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( require.main === module ) {
|
||||||
|
var dialects = {};
|
||||||
|
dialects.Gruber = [
|
||||||
|
"blockquotes",
|
||||||
|
"code",
|
||||||
|
"emphasis",
|
||||||
|
"headers",
|
||||||
|
"horizontal_rules",
|
||||||
|
"images",
|
||||||
|
"linebreaks",
|
||||||
|
"links",
|
||||||
|
"lists"
|
||||||
|
];
|
||||||
|
|
||||||
|
dialects.Maruku = dialects.Gruber.slice( 0 );
|
||||||
|
dialects.Maruku.push( "meta", "definition_lists" );
|
||||||
|
|
||||||
|
// TODO if dialects/features were passed on the command line, filter to them
|
||||||
|
// if ( args.length ) {
|
||||||
|
// features = features.filter( function( x ) args.indexOf( x ) !== -1 );
|
||||||
|
// }
|
||||||
|
|
||||||
|
for ( d in dialects ) {
|
||||||
|
test_dialect( d, dialects[ d ] );
|
||||||
|
}
|
||||||
|
|
||||||
|
test.runner( exports );
|
||||||
|
}
|
||||||
15
node_modules/markdown/test/features/blockquotes/contains_code.json
generated
vendored
Normal file
15
node_modules/markdown/test/features/blockquotes/contains_code.json
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
["html",
|
||||||
|
["blockquote",
|
||||||
|
["p",
|
||||||
|
"There's a code block in here:"
|
||||||
|
],
|
||||||
|
["pre",
|
||||||
|
["code",
|
||||||
|
"SET foo = TRUE\n\nIF foo GOTO 10"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
["p",
|
||||||
|
"Wasn't that nice?"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
7
node_modules/markdown/test/features/blockquotes/contains_code.text
generated
vendored
Normal file
7
node_modules/markdown/test/features/blockquotes/contains_code.text
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
> There's a code block in here:
|
||||||
|
>
|
||||||
|
> SET foo = TRUE
|
||||||
|
>
|
||||||
|
> IF foo GOTO 10
|
||||||
|
>
|
||||||
|
> Wasn't that nice?
|
||||||
10
node_modules/markdown/test/features/blockquotes/lazy_wrapping.json
generated
vendored
Normal file
10
node_modules/markdown/test/features/blockquotes/lazy_wrapping.json
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
["html",
|
||||||
|
["blockquote",
|
||||||
|
["p",
|
||||||
|
"If you're too lazy\nto wrap your code nicely"
|
||||||
|
],
|
||||||
|
["p",
|
||||||
|
"This will still work"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
4
node_modules/markdown/test/features/blockquotes/lazy_wrapping.text
generated
vendored
Normal file
4
node_modules/markdown/test/features/blockquotes/lazy_wrapping.text
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
> If you're too lazy
|
||||||
|
to wrap your code nicely
|
||||||
|
|
||||||
|
> This will still work
|
||||||
18
node_modules/markdown/test/features/blockquotes/leading_paras.json
generated
vendored
Normal file
18
node_modules/markdown/test/features/blockquotes/leading_paras.json
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
"Amy wrote:"
|
||||||
|
],
|
||||||
|
["blockquote",
|
||||||
|
["p",
|
||||||
|
"No wai?"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
["p",
|
||||||
|
"Bob wrote:"
|
||||||
|
],
|
||||||
|
["blockquote",
|
||||||
|
["p",
|
||||||
|
"Ya rly!"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
5
node_modules/markdown/test/features/blockquotes/leading_paras.text
generated
vendored
Normal file
5
node_modules/markdown/test/features/blockquotes/leading_paras.text
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
Amy wrote:
|
||||||
|
> No wai?
|
||||||
|
|
||||||
|
Bob wrote:
|
||||||
|
> Ya rly!
|
||||||
15
node_modules/markdown/test/features/blockquotes/nested.json
generated
vendored
Normal file
15
node_modules/markdown/test/features/blockquotes/nested.json
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
["html",
|
||||||
|
["blockquote",
|
||||||
|
["p",
|
||||||
|
"One"
|
||||||
|
],
|
||||||
|
["blockquote",
|
||||||
|
["p",
|
||||||
|
"Two"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
["p",
|
||||||
|
"Three"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
5
node_modules/markdown/test/features/blockquotes/nested.text
generated
vendored
Normal file
5
node_modules/markdown/test/features/blockquotes/nested.text
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
> One
|
||||||
|
>
|
||||||
|
> > Two
|
||||||
|
>
|
||||||
|
> Three
|
||||||
7
node_modules/markdown/test/features/blockquotes/simple.json
generated
vendored
Normal file
7
node_modules/markdown/test/features/blockquotes/simple.json
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
["html",
|
||||||
|
["blockquote",
|
||||||
|
["p",
|
||||||
|
"Blockquote"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/blockquotes/simple.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/blockquotes/simple.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
> Blockquote
|
||||||
7
node_modules/markdown/test/features/blockquotes/spaceless.json
generated
vendored
Normal file
7
node_modules/markdown/test/features/blockquotes/spaceless.json
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
["html",
|
||||||
|
["blockquote",
|
||||||
|
["p",
|
||||||
|
"blockquote\nwithout spaces"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
2
node_modules/markdown/test/features/blockquotes/spaceless.text
generated
vendored
Normal file
2
node_modules/markdown/test/features/blockquotes/spaceless.text
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
>blockquote
|
||||||
|
>without spaces
|
||||||
26
node_modules/markdown/test/features/code/blank_lines.json
generated
vendored
Normal file
26
node_modules/markdown/test/features/code/blank_lines.json
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
"This block is composed of three lines:"
|
||||||
|
],
|
||||||
|
["pre",
|
||||||
|
["code",
|
||||||
|
"one\n\nthree"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
["p",
|
||||||
|
"This block is composed of 5"
|
||||||
|
],
|
||||||
|
["pre",
|
||||||
|
["code",
|
||||||
|
"one\n\n\nfour"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
["p",
|
||||||
|
"This block is composed of 2"
|
||||||
|
],
|
||||||
|
["pre",
|
||||||
|
["code",
|
||||||
|
"two"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
22
node_modules/markdown/test/features/code/blank_lines.text
generated
vendored
Normal file
22
node_modules/markdown/test/features/code/blank_lines.text
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
This block is composed of three lines:
|
||||||
|
|
||||||
|
one
|
||||||
|
|
||||||
|
three
|
||||||
|
|
||||||
|
This block is composed of 5
|
||||||
|
|
||||||
|
|
||||||
|
one
|
||||||
|
|
||||||
|
|
||||||
|
four
|
||||||
|
|
||||||
|
|
||||||
|
This block is composed of 2
|
||||||
|
|
||||||
|
|
||||||
|
two
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
10
node_modules/markdown/test/features/code/block.json
generated
vendored
Normal file
10
node_modules/markdown/test/features/code/block.json
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
"Here is an example of AppleScript:"
|
||||||
|
],
|
||||||
|
["pre",
|
||||||
|
["code",
|
||||||
|
"tell application \"Foo\"\n beep\nend tell\n\ttab"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
7
node_modules/markdown/test/features/code/block.text
generated
vendored
Normal file
7
node_modules/markdown/test/features/code/block.text
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
Here is an example of AppleScript:
|
||||||
|
|
||||||
|
tell application "Foo"
|
||||||
|
beep
|
||||||
|
end tell
|
||||||
|
tab
|
||||||
|
|
||||||
8
node_modules/markdown/test/features/code/embedded_backtick.json
generated
vendored
Normal file
8
node_modules/markdown/test/features/code/embedded_backtick.json
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
"This is a ",
|
||||||
|
["code",
|
||||||
|
"code span with an `embedded` backtick"],
|
||||||
|
"."
|
||||||
|
]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/code/embedded_backtick.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/code/embedded_backtick.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
This is a ``code span with an `embedded` backtick``.
|
||||||
7
node_modules/markdown/test/features/code/horizontal_rules.json
generated
vendored
Normal file
7
node_modules/markdown/test/features/code/horizontal_rules.json
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
["html",
|
||||||
|
["pre",
|
||||||
|
["code",
|
||||||
|
"fsfsfsf\n* * *\n\n***\n\n*****\n\n- - -\n\n---------------------------------------"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
10
node_modules/markdown/test/features/code/horizontal_rules.text
generated
vendored
Normal file
10
node_modules/markdown/test/features/code/horizontal_rules.text
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
fsfsfsf
|
||||||
|
* * *
|
||||||
|
|
||||||
|
***
|
||||||
|
|
||||||
|
*****
|
||||||
|
|
||||||
|
- - -
|
||||||
|
|
||||||
|
---------------------------------------
|
||||||
8
node_modules/markdown/test/features/code/inline.json
generated
vendored
Normal file
8
node_modules/markdown/test/features/code/inline.json
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
["code",
|
||||||
|
"This"
|
||||||
|
],
|
||||||
|
" is a code span."
|
||||||
|
]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/code/inline.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/code/inline.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
`This` is a code span.
|
||||||
9
node_modules/markdown/test/features/code/inline_multiline.json
generated
vendored
Normal file
9
node_modules/markdown/test/features/code/inline_multiline.json
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
"foo ",
|
||||||
|
["code",
|
||||||
|
"code\ncode"
|
||||||
|
],
|
||||||
|
" bar"
|
||||||
|
]
|
||||||
|
]
|
||||||
2
node_modules/markdown/test/features/code/inline_multiline.text
generated
vendored
Normal file
2
node_modules/markdown/test/features/code/inline_multiline.text
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
foo `code
|
||||||
|
code` bar
|
||||||
13
node_modules/markdown/test/features/code/trailing_para.json
generated
vendored
Normal file
13
node_modules/markdown/test/features/code/trailing_para.json
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
"Paragraph above"
|
||||||
|
],
|
||||||
|
["pre",
|
||||||
|
["code",
|
||||||
|
"Code block"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
["p",
|
||||||
|
"Paragraph below"
|
||||||
|
]
|
||||||
|
]
|
||||||
4
node_modules/markdown/test/features/code/trailing_para.text
generated
vendored
Normal file
4
node_modules/markdown/test/features/code/trailing_para.text
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
Paragraph above
|
||||||
|
|
||||||
|
Code block
|
||||||
|
Paragraph below
|
||||||
22
node_modules/markdown/test/features/definition_lists/inline.json
generated
vendored
Normal file
22
node_modules/markdown/test/features/definition_lists/inline.json
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
["html",
|
||||||
|
["dl",
|
||||||
|
["dt",
|
||||||
|
"a term"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
["em",
|
||||||
|
"emphasised"
|
||||||
|
],
|
||||||
|
" definition."
|
||||||
|
],
|
||||||
|
["dt",
|
||||||
|
"another term"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
["strong",
|
||||||
|
"strong"
|
||||||
|
],
|
||||||
|
" definition."
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
5
node_modules/markdown/test/features/definition_lists/inline.text
generated
vendored
Normal file
5
node_modules/markdown/test/features/definition_lists/inline.text
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
a term
|
||||||
|
: *emphasised* definition.
|
||||||
|
|
||||||
|
another term
|
||||||
|
: **strong** definition.
|
||||||
16
node_modules/markdown/test/features/definition_lists/long.json
generated
vendored
Normal file
16
node_modules/markdown/test/features/definition_lists/long.json
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
["html",
|
||||||
|
["dl",
|
||||||
|
["dt",
|
||||||
|
"first term"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
"the quick brown fox jumps\nover the lazy dog"
|
||||||
|
],
|
||||||
|
["dt",
|
||||||
|
"second term"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
"pack my box with five\ndozen liquor jugs"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
7
node_modules/markdown/test/features/definition_lists/long.text
generated
vendored
Normal file
7
node_modules/markdown/test/features/definition_lists/long.text
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
first term
|
||||||
|
: the quick brown fox jumps
|
||||||
|
over the lazy dog
|
||||||
|
|
||||||
|
second term
|
||||||
|
: pack my box with five
|
||||||
|
dozen liquor jugs
|
||||||
28
node_modules/markdown/test/features/definition_lists/multiple_definitions.json
generated
vendored
Normal file
28
node_modules/markdown/test/features/definition_lists/multiple_definitions.json
generated
vendored
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
["html",
|
||||||
|
["dl",
|
||||||
|
["dt",
|
||||||
|
"fruit"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
"apple"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
"banana"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
"pear"
|
||||||
|
],
|
||||||
|
["dt",
|
||||||
|
"animal"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
"cow"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
"duck"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
"horse"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
9
node_modules/markdown/test/features/definition_lists/multiple_definitions.text
generated
vendored
Normal file
9
node_modules/markdown/test/features/definition_lists/multiple_definitions.text
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
fruit
|
||||||
|
: apple
|
||||||
|
: banana
|
||||||
|
: pear
|
||||||
|
|
||||||
|
animal
|
||||||
|
: cow
|
||||||
|
: duck
|
||||||
|
: horse
|
||||||
22
node_modules/markdown/test/features/definition_lists/multiple_terms.json
generated
vendored
Normal file
22
node_modules/markdown/test/features/definition_lists/multiple_terms.json
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
["html",
|
||||||
|
["dl",
|
||||||
|
["dt",
|
||||||
|
"fruit"
|
||||||
|
],
|
||||||
|
["dt",
|
||||||
|
"vegetable"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
"tomato"
|
||||||
|
],
|
||||||
|
["dt",
|
||||||
|
"animal"
|
||||||
|
],
|
||||||
|
["dt",
|
||||||
|
"mineral"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
"pet rock"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
7
node_modules/markdown/test/features/definition_lists/multiple_terms.text
generated
vendored
Normal file
7
node_modules/markdown/test/features/definition_lists/multiple_terms.text
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
fruit
|
||||||
|
vegetable
|
||||||
|
: tomato
|
||||||
|
|
||||||
|
animal
|
||||||
|
mineral
|
||||||
|
: pet rock
|
||||||
22
node_modules/markdown/test/features/definition_lists/tight.json
generated
vendored
Normal file
22
node_modules/markdown/test/features/definition_lists/tight.json
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
["html",
|
||||||
|
["dl",
|
||||||
|
["dt",
|
||||||
|
"one"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
"alpha"
|
||||||
|
],
|
||||||
|
["dt",
|
||||||
|
"two"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
"beta"
|
||||||
|
],
|
||||||
|
["dt",
|
||||||
|
"three"
|
||||||
|
],
|
||||||
|
["dd",
|
||||||
|
"gamma"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
8
node_modules/markdown/test/features/definition_lists/tight.text
generated
vendored
Normal file
8
node_modules/markdown/test/features/definition_lists/tight.text
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
one
|
||||||
|
: alpha
|
||||||
|
|
||||||
|
two
|
||||||
|
: beta
|
||||||
|
|
||||||
|
three
|
||||||
|
: gamma
|
||||||
9
node_modules/markdown/test/features/emphasis/multiple_lines.json
generated
vendored
Normal file
9
node_modules/markdown/test/features/emphasis/multiple_lines.json
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
"You can ",
|
||||||
|
["em",
|
||||||
|
"start emphasis on one line,\nand finish it"
|
||||||
|
],
|
||||||
|
" on another."
|
||||||
|
]
|
||||||
|
]
|
||||||
2
node_modules/markdown/test/features/emphasis/multiple_lines.text
generated
vendored
Normal file
2
node_modules/markdown/test/features/emphasis/multiple_lines.text
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
You can *start emphasis on one line,
|
||||||
|
and finish it* on another.
|
||||||
24
node_modules/markdown/test/features/emphasis/nested.json
generated
vendored
Normal file
24
node_modules/markdown/test/features/emphasis/nested.json
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
"You can ",
|
||||||
|
["strong",
|
||||||
|
"nest ",
|
||||||
|
["em",
|
||||||
|
"em"
|
||||||
|
],
|
||||||
|
" inside strong"
|
||||||
|
],
|
||||||
|
"."
|
||||||
|
],
|
||||||
|
["p",
|
||||||
|
"You can ",
|
||||||
|
["em",
|
||||||
|
"nest ",
|
||||||
|
["strong",
|
||||||
|
"strong"
|
||||||
|
],
|
||||||
|
" inside em"
|
||||||
|
],
|
||||||
|
"."
|
||||||
|
]
|
||||||
|
]
|
||||||
3
node_modules/markdown/test/features/emphasis/nested.text
generated
vendored
Normal file
3
node_modules/markdown/test/features/emphasis/nested.text
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
You can **nest *em* inside strong**.
|
||||||
|
|
||||||
|
You can *nest **strong** inside em*.
|
||||||
24
node_modules/markdown/test/features/emphasis/simple.json
generated
vendored
Normal file
24
node_modules/markdown/test/features/emphasis/simple.json
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
"Emphasis can be ",
|
||||||
|
["em",
|
||||||
|
"weak"
|
||||||
|
],
|
||||||
|
" or ",
|
||||||
|
["strong",
|
||||||
|
"strong"
|
||||||
|
],
|
||||||
|
"."
|
||||||
|
],
|
||||||
|
["p",
|
||||||
|
"It can ",
|
||||||
|
["em",
|
||||||
|
"even"
|
||||||
|
],
|
||||||
|
" use ",
|
||||||
|
["strong",
|
||||||
|
"underscores"
|
||||||
|
],
|
||||||
|
"."
|
||||||
|
]
|
||||||
|
]
|
||||||
3
node_modules/markdown/test/features/emphasis/simple.text
generated
vendored
Normal file
3
node_modules/markdown/test/features/emphasis/simple.text
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
Emphasis can be *weak* or **strong**.
|
||||||
|
|
||||||
|
It can _even_ use __underscores__.
|
||||||
20
node_modules/markdown/test/features/headers/atx.json
generated
vendored
Normal file
20
node_modules/markdown/test/features/headers/atx.json
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
["html",
|
||||||
|
["h1",
|
||||||
|
"One"
|
||||||
|
],
|
||||||
|
["h2",
|
||||||
|
"Two"
|
||||||
|
],
|
||||||
|
["h3",
|
||||||
|
"Three"
|
||||||
|
],
|
||||||
|
["h4",
|
||||||
|
"Four"
|
||||||
|
],
|
||||||
|
["h5",
|
||||||
|
"Five"
|
||||||
|
],
|
||||||
|
["h6",
|
||||||
|
"Six"
|
||||||
|
]
|
||||||
|
]
|
||||||
11
node_modules/markdown/test/features/headers/atx.text
generated
vendored
Normal file
11
node_modules/markdown/test/features/headers/atx.text
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
# One
|
||||||
|
|
||||||
|
## Two
|
||||||
|
|
||||||
|
### Three
|
||||||
|
|
||||||
|
#### Four
|
||||||
|
|
||||||
|
##### Five
|
||||||
|
|
||||||
|
###### Six
|
||||||
11
node_modules/markdown/test/features/headers/atx_closing_hashes.json
generated
vendored
Normal file
11
node_modules/markdown/test/features/headers/atx_closing_hashes.json
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
["html",
|
||||||
|
["h1",
|
||||||
|
"One"
|
||||||
|
],
|
||||||
|
["h2",
|
||||||
|
"Two"
|
||||||
|
],
|
||||||
|
["h3",
|
||||||
|
"Three"
|
||||||
|
]
|
||||||
|
]
|
||||||
5
node_modules/markdown/test/features/headers/atx_closing_hashes.text
generated
vendored
Normal file
5
node_modules/markdown/test/features/headers/atx_closing_hashes.text
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# One #
|
||||||
|
|
||||||
|
## Two #####
|
||||||
|
|
||||||
|
### Three ##
|
||||||
8
node_modules/markdown/test/features/headers/setext.json
generated
vendored
Normal file
8
node_modules/markdown/test/features/headers/setext.json
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
["html",
|
||||||
|
["h1",
|
||||||
|
"One"
|
||||||
|
],
|
||||||
|
["h2",
|
||||||
|
"Two"
|
||||||
|
]
|
||||||
|
]
|
||||||
5
node_modules/markdown/test/features/headers/setext.text
generated
vendored
Normal file
5
node_modules/markdown/test/features/headers/setext.text
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
One
|
||||||
|
===
|
||||||
|
|
||||||
|
Two
|
||||||
|
---
|
||||||
20
node_modules/markdown/test/features/headers/trailing_paras.json
generated
vendored
Normal file
20
node_modules/markdown/test/features/headers/trailing_paras.json
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
["html",
|
||||||
|
["h1",
|
||||||
|
"Header"
|
||||||
|
],
|
||||||
|
["p",
|
||||||
|
"Paragraph"
|
||||||
|
],
|
||||||
|
["h2",
|
||||||
|
"Header"
|
||||||
|
],
|
||||||
|
["p",
|
||||||
|
"Paragraph"
|
||||||
|
],
|
||||||
|
["h3",
|
||||||
|
"Header"
|
||||||
|
],
|
||||||
|
["p",
|
||||||
|
"Paragraph"
|
||||||
|
]
|
||||||
|
]
|
||||||
10
node_modules/markdown/test/features/headers/trailing_paras.text
generated
vendored
Normal file
10
node_modules/markdown/test/features/headers/trailing_paras.text
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
Header
|
||||||
|
======
|
||||||
|
Paragraph
|
||||||
|
|
||||||
|
Header
|
||||||
|
------
|
||||||
|
Paragraph
|
||||||
|
|
||||||
|
### Header
|
||||||
|
Paragraph
|
||||||
17
node_modules/markdown/test/features/horizontal_rules/abutting_blocks.json
generated
vendored
Normal file
17
node_modules/markdown/test/features/horizontal_rules/abutting_blocks.json
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
"para"
|
||||||
|
],
|
||||||
|
["hr"],
|
||||||
|
["ul",
|
||||||
|
["li",
|
||||||
|
"list"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
["hr"],
|
||||||
|
["blockquote",
|
||||||
|
["p",
|
||||||
|
"blockquote"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
5
node_modules/markdown/test/features/horizontal_rules/abutting_blocks.text
generated
vendored
Normal file
5
node_modules/markdown/test/features/horizontal_rules/abutting_blocks.text
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
para
|
||||||
|
***
|
||||||
|
* list
|
||||||
|
- - -
|
||||||
|
> blockquote
|
||||||
3
node_modules/markdown/test/features/horizontal_rules/dashes.json
generated
vendored
Normal file
3
node_modules/markdown/test/features/horizontal_rules/dashes.json
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
["html",
|
||||||
|
["hr"]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/horizontal_rules/dashes.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/horizontal_rules/dashes.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
---
|
||||||
3
node_modules/markdown/test/features/horizontal_rules/leading_spaces.json
generated
vendored
Normal file
3
node_modules/markdown/test/features/horizontal_rules/leading_spaces.json
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
["html",
|
||||||
|
["hr"]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/horizontal_rules/leading_spaces.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/horizontal_rules/leading_spaces.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
* * *
|
||||||
3
node_modules/markdown/test/features/horizontal_rules/long.json
generated
vendored
Normal file
3
node_modules/markdown/test/features/horizontal_rules/long.json
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
["html",
|
||||||
|
["hr"]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/horizontal_rules/long.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/horizontal_rules/long.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
*********************
|
||||||
3
node_modules/markdown/test/features/horizontal_rules/long_loose.json
generated
vendored
Normal file
3
node_modules/markdown/test/features/horizontal_rules/long_loose.json
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
["html",
|
||||||
|
["hr"]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/horizontal_rules/long_loose.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/horizontal_rules/long_loose.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
* * * * * * * * * * *
|
||||||
3
node_modules/markdown/test/features/horizontal_rules/loose_dashes.json
generated
vendored
Normal file
3
node_modules/markdown/test/features/horizontal_rules/loose_dashes.json
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
["html",
|
||||||
|
["hr"]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/horizontal_rules/loose_dashes.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/horizontal_rules/loose_dashes.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
- - -
|
||||||
3
node_modules/markdown/test/features/horizontal_rules/loose_stars.json
generated
vendored
Normal file
3
node_modules/markdown/test/features/horizontal_rules/loose_stars.json
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
["html",
|
||||||
|
["hr"]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/horizontal_rules/loose_stars.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/horizontal_rules/loose_stars.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
* * *
|
||||||
3
node_modules/markdown/test/features/horizontal_rules/loose_underscores.json
generated
vendored
Normal file
3
node_modules/markdown/test/features/horizontal_rules/loose_underscores.json
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
["html",
|
||||||
|
["hr"]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/horizontal_rules/loose_underscores.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/horizontal_rules/loose_underscores.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
_ _ _
|
||||||
3
node_modules/markdown/test/features/horizontal_rules/stars.json
generated
vendored
Normal file
3
node_modules/markdown/test/features/horizontal_rules/stars.json
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
["html",
|
||||||
|
["hr"]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/horizontal_rules/stars.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/horizontal_rules/stars.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
***
|
||||||
3
node_modules/markdown/test/features/horizontal_rules/underscores.json
generated
vendored
Normal file
3
node_modules/markdown/test/features/horizontal_rules/underscores.json
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
["html",
|
||||||
|
["hr"]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/horizontal_rules/underscores.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/horizontal_rules/underscores.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
___
|
||||||
8
node_modules/markdown/test/features/images/basic.json
generated
vendored
Normal file
8
node_modules/markdown/test/features/images/basic.json
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
["img", {
|
||||||
|
"src": "/path/to/img.jpg",
|
||||||
|
"alt": "Alt text"
|
||||||
|
} ]
|
||||||
|
]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/images/basic.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/images/basic.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|

|
||||||
8
node_modules/markdown/test/features/images/crotcheted_url.json
generated
vendored
Normal file
8
node_modules/markdown/test/features/images/crotcheted_url.json
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
["img", {
|
||||||
|
"src": "/url/",
|
||||||
|
"alt": "alt text"
|
||||||
|
} ]
|
||||||
|
]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/images/crotcheted_url.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/images/crotcheted_url.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|

|
||||||
9
node_modules/markdown/test/features/images/crotcheted_url_with_title.json
generated
vendored
Normal file
9
node_modules/markdown/test/features/images/crotcheted_url_with_title.json
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
["img", {
|
||||||
|
"src": "/url/",
|
||||||
|
"alt": "alt text",
|
||||||
|
"title": "with a title"
|
||||||
|
} ]
|
||||||
|
]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/images/crotcheted_url_with_title.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/images/crotcheted_url_with_title.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|

|
||||||
8
node_modules/markdown/test/features/images/empty.json
generated
vendored
Normal file
8
node_modules/markdown/test/features/images/empty.json
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
["html",
|
||||||
|
["p",
|
||||||
|
["img", {
|
||||||
|
"src": "",
|
||||||
|
"alt": "Empty"
|
||||||
|
} ]
|
||||||
|
]
|
||||||
|
]
|
||||||
1
node_modules/markdown/test/features/images/empty.text
generated
vendored
Normal file
1
node_modules/markdown/test/features/images/empty.text
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
![Empty]()
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user