zhaoolee commit

This commit is contained in:
zhaoolee
2019-07-20 20:37:38 +08:00
parent 5ba40a7860
commit 45bb8b3b08
7202 changed files with 992855 additions and 17411 deletions

1
node_modules/.bin/JSONStream generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../JSONStream/bin.js

1
node_modules/.bin/acorn generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../acorn/bin/acorn

1
node_modules/.bin/atob generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../atob/bin/atob.js

1
node_modules/.bin/bunyan generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../hexo-bunyan/bin/bunyan

1
node_modules/.bin/esparse generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../esprima/bin/esparse.js

1
node_modules/.bin/esvalidate generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../esprima/bin/esvalidate.js

1
node_modules/.bin/hexo generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../hexo/bin/hexo

1
node_modules/.bin/js-yaml generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../js-yaml/bin/js-yaml.js

1
node_modules/.bin/marked generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../marked/bin/marked

1
node_modules/.bin/md2html generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../markdown/bin/md2html.js

1
node_modules/.bin/mime generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../mime/cli.js

1
node_modules/.bin/mkdirp generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../mkdirp/bin/cmd.js

1
node_modules/.bin/ncp generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../ncp/bin/ncp

1
node_modules/.bin/nopt generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../nopt/bin/nopt.js

1
node_modules/.bin/nunjucks-precompile generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../nunjucks/bin/precompile

1
node_modules/.bin/rimraf generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../rimraf/bin.js

1
node_modules/.bin/stylus generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../stylus/bin/stylus

1
node_modules/.bin/swig generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../swig-templates/bin/swig.js

1
node_modules/.bin/to-title-case generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../titlecase/bin.js

1
node_modules/.bin/uglifyjs generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../uglify-js/bin/uglifyjs

1
node_modules/.bin/which generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../which/bin/which

1
node_modules/.bin/window-size generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../window-size/cli.js

8
node_modules/JSONStream/.travis.yml generated vendored Normal file
View File

@@ -0,0 +1,8 @@
language: node_js
node_js:
- 4
- 5
- 6
sudo: false

15
node_modules/JSONStream/LICENSE.APACHE2 generated vendored Normal file
View File

@@ -0,0 +1,15 @@
Apache License, Version 2.0
Copyright (c) 2011 Dominic Tarr
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

24
node_modules/JSONStream/LICENSE.MIT generated vendored Normal file
View File

@@ -0,0 +1,24 @@
The MIT License
Copyright (c) 2011 Dominic Tarr
Permission is hereby granted, free of charge,
to any person obtaining a copy of this software and
associated documentation files (the "Software"), to
deal in the Software without restriction, including
without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom
the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

12
node_modules/JSONStream/bin.js generated vendored Executable file
View File

@@ -0,0 +1,12 @@
#! /usr/bin/env node
var JSONStream = require('./')
if(!module.parent && process.title !== 'browser') {
process.stdin
.pipe(JSONStream.parse(process.argv[2]))
.pipe(JSONStream.stringify('[', ',\n', ']\n', 2))
.pipe(process.stdout)
}

13
node_modules/JSONStream/examples/all_docs.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
var request = require('request')
, JSONStream = require('JSONStream')
, es = require('event-stream')
var parser = JSONStream.parse(['rows', true]) //emit parts that match this path (any element of the rows array)
, req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
, logger = es.mapSync(function (data) { //create a stream that logs to stderr,
console.error(data)
return data
})
req.pipe(parser)
parser.pipe(logger)

247
node_modules/JSONStream/index.js generated vendored Executable file
View File

@@ -0,0 +1,247 @@
'use strict'
var Parser = require('jsonparse')
, through = require('through')
var bufferFrom = Buffer.from && Buffer.from !== Uint8Array.from
/*
the value of this.stack that creationix's jsonparse has is weird.
it makes this code ugly, but his problem is way harder that mine,
so i'll forgive him.
*/
exports.parse = function (path, map) {
var header, footer
var parser = new Parser()
var stream = through(function (chunk) {
if('string' === typeof chunk)
chunk = bufferFrom ? Buffer.from(chunk) : new Buffer(chunk)
parser.write(chunk)
},
function (data) {
if(data)
stream.write(data)
if (header)
stream.emit('header', header)
if (footer)
stream.emit('footer', footer)
stream.queue(null)
})
if('string' === typeof path)
path = path.split('.').map(function (e) {
if (e === '$*')
return {emitKey: true}
else if (e === '*')
return true
else if (e === '') // '..'.split('.') returns an empty string
return {recurse: true}
else
return e
})
var count = 0, _key
if(!path || !path.length)
path = null
parser.onValue = function (value) {
if (!this.root)
stream.root = value
if(! path) return
var i = 0 // iterates on path
var j = 0 // iterates on stack
var emitKey = false;
var emitPath = false;
while (i < path.length) {
var key = path[i]
var c
j++
if (key && !key.recurse) {
c = (j === this.stack.length) ? this : this.stack[j]
if (!c) return
if (! check(key, c.key)) {
setHeaderFooter(c.key, value)
return
}
emitKey = !!key.emitKey;
emitPath = !!key.emitPath;
i++
} else {
i++
var nextKey = path[i]
if (! nextKey) return
while (true) {
c = (j === this.stack.length) ? this : this.stack[j]
if (!c) return
if (check(nextKey, c.key)) {
i++;
if (!Object.isFrozen(this.stack[j]))
this.stack[j].value = null
break
} else {
setHeaderFooter(c.key, value)
}
j++
}
}
}
// emit header
if (header) {
stream.emit('header', header);
header = false;
}
if (j !== this.stack.length) return
count ++
var actualPath = this.stack.slice(1).map(function(element) { return element.key }).concat([this.key])
var data = value
if(null != data)
if(null != (data = map ? map(data, actualPath) : data)) {
if (emitKey || emitPath) {
data = { value: data };
if (emitKey)
data["key"] = this.key;
if (emitPath)
data["path"] = actualPath;
}
stream.queue(data)
}
if (this.value) delete this.value[this.key]
for(var k in this.stack)
if (!Object.isFrozen(this.stack[k]))
this.stack[k].value = null
}
parser._onToken = parser.onToken;
parser.onToken = function (token, value) {
parser._onToken(token, value);
if (this.stack.length === 0) {
if (stream.root) {
if(!path)
stream.queue(stream.root)
count = 0;
stream.root = null;
}
}
}
parser.onError = function (err) {
if(err.message.indexOf("at position") > -1)
err.message = "Invalid JSON (" + err.message + ")";
stream.emit('error', err)
}
return stream
function setHeaderFooter(key, value) {
// header has not been emitted yet
if (header !== false) {
header = header || {}
header[key] = value
}
// footer has not been emitted yet but header has
if (footer !== false && header === false) {
footer = footer || {}
footer[key] = value
}
}
}
function check (x, y) {
if ('string' === typeof x)
return y == x
else if (x && 'function' === typeof x.exec)
return x.exec(y)
else if ('boolean' === typeof x || 'object' === typeof x)
return x
else if ('function' === typeof x)
return x(y)
return false
}
exports.stringify = function (op, sep, cl, indent) {
indent = indent || 0
if (op === false){
op = ''
sep = '\n'
cl = ''
} else if (op == null) {
op = '[\n'
sep = '\n,\n'
cl = '\n]\n'
}
//else, what ever you like
var stream
, first = true
, anyData = false
stream = through(function (data) {
anyData = true
try {
var json = JSON.stringify(data, null, indent)
} catch (err) {
return stream.emit('error', err)
}
if(first) { first = false ; stream.queue(op + json)}
else stream.queue(sep + json)
},
function (data) {
if(!anyData)
stream.queue(op)
stream.queue(cl)
stream.queue(null)
})
return stream
}
exports.stringifyObject = function (op, sep, cl, indent) {
indent = indent || 0
if (op === false){
op = ''
sep = '\n'
cl = ''
} else if (op == null) {
op = '{\n'
sep = '\n,\n'
cl = '\n}\n'
}
//else, what ever you like
var first = true
var anyData = false
var stream = through(function (data) {
anyData = true
var json = JSON.stringify(data[0]) + ':' + JSON.stringify(data[1], null, indent)
if(first) { first = false ; this.queue(op + json)}
else this.queue(sep + json)
},
function (data) {
if(!anyData) this.queue(op)
this.queue(cl)
this.queue(null)
})
return stream
}

77
node_modules/JSONStream/package.json generated vendored Normal file
View File

@@ -0,0 +1,77 @@
{
"_args": [
[
"JSONStream@1.3.5",
"/Users/lijianzhao/github/ChineseBQB"
]
],
"_from": "JSONStream@1.3.5",
"_id": "JSONStream@1.3.5",
"_inBundle": false,
"_integrity": "sha1-MgjB8I06TZkmGrZPkjArwV4RHKA=",
"_location": "/JSONStream",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "JSONStream@1.3.5",
"name": "JSONStream",
"escapedName": "JSONStream",
"rawSpec": "1.3.5",
"saveSpec": null,
"fetchSpec": "1.3.5"
},
"_requiredBy": [
"/warehouse"
],
"_resolved": "http://registry.npm.taobao.org/JSONStream/download/JSONStream-1.3.5.tgz",
"_spec": "1.3.5",
"_where": "/Users/lijianzhao/github/ChineseBQB",
"author": {
"name": "Dominic Tarr",
"email": "dominic.tarr@gmail.com",
"url": "http://bit.ly/dominictarr"
},
"bin": {
"JSONStream": "./bin.js"
},
"bugs": {
"url": "https://github.com/dominictarr/JSONStream/issues"
},
"dependencies": {
"jsonparse": "^1.2.0",
"through": ">=2.2.7 <3"
},
"description": "rawStream.pipe(JSONStream.parse()).pipe(streamOfObjects)",
"devDependencies": {
"assertions": "~2.2.2",
"event-stream": "~0.7.0",
"it-is": "~1",
"render": "~0.1.1",
"tape": "~2.12.3",
"trees": "~0.0.3"
},
"engines": {
"node": "*"
},
"homepage": "http://github.com/dominictarr/JSONStream",
"keywords": [
"json",
"stream",
"streaming",
"parser",
"async",
"parsing"
],
"license": "(MIT OR Apache-2.0)",
"name": "JSONStream",
"optionalDependencies": {},
"repository": {
"type": "git",
"url": "git://github.com/dominictarr/JSONStream.git"
},
"scripts": {
"test": "node test/run.js"
},
"version": "1.3.5"
}

207
node_modules/JSONStream/readme.markdown generated vendored Normal file
View File

@@ -0,0 +1,207 @@
# JSONStream
streaming JSON.parse and stringify
![](https://secure.travis-ci.org/dominictarr/JSONStream.png?branch=master)
## install
```npm install JSONStream```
## example
``` js
var request = require('request')
, JSONStream = require('JSONStream')
, es = require('event-stream')
request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
.pipe(JSONStream.parse('rows.*'))
.pipe(es.mapSync(function (data) {
console.error(data)
return data
}))
```
## JSONStream.parse(path)
parse stream of values that match a path
``` js
JSONStream.parse('rows.*.doc')
```
The `..` operator is the recursive descent operator from [JSONPath](http://goessner.net/articles/JsonPath/), which will match a child at any depth (see examples below).
If your keys have keys that include `.` or `*` etc, use an array instead.
`['row', true, /^doc/]`.
If you use an array, `RegExp`s, booleans, and/or functions. The `..` operator is also available in array representation, using `{recurse: true}`.
any object that matches the path will be emitted as 'data' (and `pipe`d down stream)
If `path` is empty or null, no 'data' events are emitted.
If you want to have keys emitted, you can prefix your `*` operator with `$`: `obj.$*` - in this case the data passed to the stream is an object with a `key` holding the key and a `value` property holding the data.
### Examples
query a couchdb view:
``` bash
curl -sS localhost:5984/tests/_all_docs&include_docs=true
```
you will get something like this:
``` js
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
},
]}
```
we are probably most interested in the `rows.*.doc`
create a `Stream` that parses the documents from the feed like this:
``` js
var stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc
stream.on('data', function(data) {
console.log('received:', data);
});
//emits anything from _before_ the first match
stream.on('header', function (data) {
console.log('header:', data) // => {"total_rows":129,"offset":0}
})
```
awesome!
In case you wanted the contents the doc emitted:
``` js
var stream = JSONStream.parse(['rows', true, 'doc', {emitKey: true}]) //rows, ANYTHING, doc, items in docs with keys
stream.on('data', function(data) {
console.log('key:', data.key);
console.log('value:', data.value);
});
```
You can also emit the path:
``` js
var stream = JSONStream.parse(['rows', true, 'doc', {emitPath: true}]) //rows, ANYTHING, doc, items in docs with keys
stream.on('data', function(data) {
console.log('path:', data.path);
console.log('value:', data.value);
});
```
### recursive patterns (..)
`JSONStream.parse('docs..value')`
(or `JSONStream.parse(['docs', {recurse: true}, 'value'])` using an array)
will emit every `value` object that is a child, grand-child, etc. of the
`docs` object. In this example, it will match exactly 5 times at various depth
levels, emitting 0, 1, 2, 3 and 4 as results.
```js
{
"total": 5,
"docs": [
{
"key": {
"value": 0,
"some": "property"
}
},
{"value": 1},
{"value": 2},
{"blbl": [{}, {"a":0, "b":1, "value":3}, 10]},
{"value": 4}
]
}
```
## JSONStream.parse(pattern, map)
provide a function that can be used to map or filter
the json output. `map` is passed the value at that node of the pattern,
if `map` return non-nullish (anything but `null` or `undefined`)
that value will be emitted in the stream. If it returns a nullish value,
nothing will be emitted.
`JSONStream` also emits `'header'` and `'footer'` events,
the `'header'` event contains anything in the output that was before
the first match, and the `'footer'`, is anything after the last match.
## JSONStream.stringify(open, sep, close)
Create a writable stream.
you may pass in custom `open`, `close`, and `seperator` strings.
But, by default, `JSONStream.stringify()` will create an array,
(with default options `open='[\n', sep='\n,\n', close='\n]\n'`)
If you call `JSONStream.stringify(false)`
the elements will only be seperated by a newline.
If you only write one item this will be valid JSON.
If you write many items,
you can use a `RegExp` to split it into valid chunks.
## JSONStream.stringifyObject(open, sep, close)
Very much like `JSONStream.stringify`,
but creates a writable stream for objects instead of arrays.
Accordingly, `open='{\n', sep='\n,\n', close='\n}\n'`.
When you `.write()` to the stream you must supply an array with `[ key, data ]`
as the first argument.
## unix tool
query npm to see all the modules that browserify has ever depended on.
``` bash
curl https://registry.npmjs.org/browserify | JSONStream 'versions.*.dependencies'
```
## numbers
numbers will be emitted as numbers.
huge numbers that cannot be represented in memory as javascript numbers will be emitted as strings.
cf https://github.com/creationix/jsonparse/commit/044b268f01c4b8f97fb936fc85d3bcfba179e5bb for details.
## Acknowlegements
this module depends on https://github.com/creationix/jsonparse
by Tim Caswell
and also thanks to Florent Jaby for teaching me about parsing with:
https://github.com/Floby/node-json-streams
## license
Dual-licensed under the MIT License or the Apache License, version 2.0

41
node_modules/JSONStream/test/bool.js generated vendored Normal file
View File

@@ -0,0 +1,41 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
// stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
JSONStream.parse([true]),
es.writeArray(function (err, lines) {
it(lines).has(expected)
console.error('PASSED')
})
)

18
node_modules/JSONStream/test/browser.js generated vendored Normal file
View File

@@ -0,0 +1,18 @@
var test = require('tape')
var JSONStream = require('../')
var testData = '{"rows":[{"hello":"world"}, {"foo": "bar"}]}'
test('basic parsing', function (t) {
t.plan(2)
var parsed = JSONStream.parse("rows.*")
var parsedKeys = {}
parsed.on('data', function(match) {
parsedKeys[Object.keys(match)[0]] = true
})
parsed.on('end', function() {
t.equal(!!parsedKeys['hello'], true)
t.equal(!!parsedKeys['foo'], true)
})
parsed.write(testData)
parsed.end()
})

27
node_modules/JSONStream/test/destroy_missing.js generated vendored Normal file
View File

@@ -0,0 +1,27 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var JSONStream = require('../');
var server = net.createServer(function(client) {
var parser = JSONStream.parse([]);
parser.on('end', function() {
console.log('close')
console.error('PASSED');
server.close();
});
client.pipe(parser);
var n = 4
client.on('data', function () {
if(--n) return
client.end();
})
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
fs.createReadStream(file).pipe(client).on('data', console.log) //.resume();
});

29
node_modules/JSONStream/test/doubledot1.js generated vendored Normal file
View File

@@ -0,0 +1,29 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse('rows..rev')
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
for (var i = 0 ; i < expected.rows.length ; i++)
it(parsed[i]).deepEqual(expected.rows[i].value.rev)
console.error('PASSED')
})

30
node_modules/JSONStream/test/doubledot2.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','depth.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['docs', {recurse: true}, 'value'])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
var expectedValues = [0, [1], {"a": 2}, "3", 4]
it(called).equal(expectedValues.length)
for (var i = 0 ; i < 5 ; i++)
it(parsed[i]).deepEqual(expectedValues[i])
console.error('PASSED')
})

44
node_modules/JSONStream/test/empty.js generated vendored Normal file
View File

@@ -0,0 +1,44 @@
var JSONStream = require('../')
, stream = require('stream')
, it = require('it-is')
var output = [ [], [] ]
var parser1 = JSONStream.parse(['docs', /./])
parser1.on('data', function(data) {
output[0].push(data)
})
var parser2 = JSONStream.parse(['docs', /./])
parser2.on('data', function(data) {
output[1].push(data)
})
var pending = 2
function onend () {
if (--pending > 0) return
it(output).deepEqual([
[], [{hello: 'world'}]
])
console.error('PASSED')
}
parser1.on('end', onend)
parser2.on('end', onend)
function makeReadableStream() {
var readStream = new stream.Stream()
readStream.readable = true
readStream.write = function (data) { this.emit('data', data) }
readStream.end = function (data) { this.emit('end') }
return readStream
}
var emptyArray = makeReadableStream()
emptyArray.pipe(parser1)
emptyArray.write('{"docs":[]}')
emptyArray.end()
var objectArray = makeReadableStream()
objectArray.pipe(parser2)
objectArray.write('{"docs":[{"hello":"world"}]}')
objectArray.end()

45
node_modules/JSONStream/test/error_contents.js generated vendored Normal file
View File

@@ -0,0 +1,45 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','error.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows'])
, called = 0
, headerCalled = 0
, footerCalled = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('header', function (data) {
headerCalled ++
it(data).deepEqual({
error: 'error_code',
message: 'this is an error message'
})
})
parser.on('footer', function (data) {
footerCalled ++
})
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(0)
it(headerCalled).equal(1)
it(footerCalled).equal(0)
console.error('PASSED')
})

4030
node_modules/JSONStream/test/fixtures/all_npm.json generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,18 @@
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
},
]}

15
node_modules/JSONStream/test/fixtures/depth.json generated vendored Normal file
View File

@@ -0,0 +1,15 @@
{
"total": 5,
"docs": [
{
"key": {
"value": 0,
"some": "property"
}
},
{"value": [1]},
{"value": {"a":2}},
{"blbl": [{}, {"a":0, "b":1, "value":"3"}, 10]},
{"value": 4}
]
}

1
node_modules/JSONStream/test/fixtures/error.json generated vendored Normal file
View File

@@ -0,0 +1 @@
{"error": "error_code", "message": "this is an error message"}

View File

@@ -0,0 +1,19 @@
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
}
],
"foo": {"bar": "baz"}}

39
node_modules/JSONStream/test/fn.js generated vendored Normal file
View File

@@ -0,0 +1,39 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
function fn (s) {
return !isNaN(parseInt(s, 10))
}
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', fn])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})

135
node_modules/JSONStream/test/gen.js generated vendored Normal file
View File

@@ -0,0 +1,135 @@
return // dont run this test for now since tape is weird and broken on 0.10
var fs = require('fs')
var JSONStream = require('../')
var file = process.argv[2] || '/tmp/JSONStream-test-large.json'
var size = Number(process.argv[3] || 100000)
var tape = require('tape')
// if (process.title !== 'browser') {
tape('out of mem', function (t) {
t.plan(1)
//////////////////////////////////////////////////////
// Produces a random number between arg1 and arg2
//////////////////////////////////////////////////////
var randomNumber = function (min, max) {
var number = Math.floor(Math.random() * (max - min + 1) + min);
return number;
};
//////////////////////////////////////////////////////
// Produces a random string of a length between arg1 and arg2
//////////////////////////////////////////////////////
var randomString = function (min, max) {
// add several spaces to increase chanses of creating 'words'
var chars = ' 0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
var result = '';
var randomLength = randomNumber(min, max);
for (var i = randomLength; i > 0; --i) {
result += chars[Math.round(Math.random() * (chars.length - 1))];
}
return result;
};
//////////////////////////////////////////////////////
// Produces a random JSON document, as a string
//////////////////////////////////////////////////////
var randomJsonDoc = function () {
var doc = {
"CrashOccurenceID": randomNumber(10000, 50000),
"CrashID": randomNumber(1000, 10000),
"SiteName": randomString(10, 25),
"MachineName": randomString(10, 25),
"Date": randomString(26, 26),
"ProcessDuration": randomString(18, 18),
"ThreadIdentityName": null,
"WindowsIdentityName": randomString(15, 40),
"OperatingSystemName": randomString(35, 65),
"DetailedExceptionInformation": randomString(100, 800)
};
doc = JSON.stringify(doc);
doc = doc.replace(/\,/g, ',\n'); // add new lines after each attribute
return doc;
};
//////////////////////////////////////////////////////
// generates test data
//////////////////////////////////////////////////////
var generateTestData = function (cb) {
console.log('generating large data file...');
var stream = fs.createWriteStream(file, {
encoding: 'utf8'
});
var i = 0;
var max = size;
var writing = false
var split = ',\n';
var doc = randomJsonDoc();
stream.write('[');
function write () {
if(writing) return
writing = true
while(++i < max) {
if(Math.random() < 0.001)
console.log('generate..', i + ' / ' + size)
if(!stream.write(doc + split)) {
writing = false
return stream.once('drain', write)
}
}
stream.write(doc + ']')
stream.end();
console.log('END')
}
write()
stream.on('close', cb)
};
//////////////////////////////////////////////////////
// Shows that parsing 100000 instances using JSONStream fails
//
// After several seconds, you will get this crash
// FATAL ERROR: JS Allocation failed - process out of memory
//////////////////////////////////////////////////////
var testJSONStreamParse_causesOutOfMem = function (done) {
var items = 0
console.log('parsing data files using JSONStream...');
var parser = JSONStream.parse([true]);
var stream = fs.createReadStream(file);
stream.pipe(parser);
parser.on('data', function (data) {
items++
if(Math.random() < 0.01) console.log(items, '...')
});
parser.on('end', function () {
t.equal(items, size)
});
};
//////////////////////////////////////////////////////
// main
//////////////////////////////////////////////////////
fs.stat(file, function (err, stat) {
console.log(stat)
if(err)
generateTestData(testJSONStreamParse_causesOutOfMem);
else
testJSONStreamParse_causesOutOfMem()
})
})
// }

55
node_modules/JSONStream/test/header_footer.js generated vendored Normal file
View File

@@ -0,0 +1,55 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','header_footer.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', /\d+/ /*, 'value'*/])
, called = 0
, headerCalled = 0
, footerCalled = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('header', function (data) {
headerCalled ++
it(data).deepEqual({
total_rows: 129,
offset: 0
})
})
parser.on('footer', function (data) {
footerCalled ++
it(data).deepEqual({
foo: { bar: 'baz' }
})
})
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
it(headerCalled).equal(1)
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(headerCalled).equal(1)
it(footerCalled).equal(1)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})

34
node_modules/JSONStream/test/issues.js generated vendored Normal file
View File

@@ -0,0 +1,34 @@
var JSONStream = require('../');
var test = require('tape')
test('#66', function (t) {
var error = 0;
var stream = JSONStream
.parse()
.on('error', function (err) {
t.ok(err);
error++;
})
.on('end', function () {
t.ok(error === 1);
t.end();
});
stream.write('["foo":bar[');
stream.end();
});
test('#81 - failure to parse nested objects', function (t) {
var stream = JSONStream
.parse('.bar.foo')
.on('error', function (err) {
t.error(err);
})
.on('end', function () {
t.end();
});
stream.write('{"bar":{"foo":"baz"}}');
stream.end();
});

105
node_modules/JSONStream/test/keys.js generated vendored Normal file
View File

@@ -0,0 +1,105 @@
var test = require('tape');
var fs = require ('fs');
var join = require('path').join;
var couch_sample_file = join(__dirname, 'fixtures','couch_sample.json');
var JSONStream = require('../');
var fixture = {
obj: {
one: 1,
two: 2,
three: 3
}
};
function assertFixtureKeys(stream, t) {
var keys = [];
var values = [];
stream.on('data', function(data) {
keys.push(data.key);
values.push(data.value);
});
stream.on('end', function() {
t.deepEqual(keys, ['one', 'two', 'three']);
t.deepEqual(values, [1,2,3]);
t.end();
});
stream.write(JSON.stringify(fixture));
stream.end();
}
test('keys via string', function(t) {
var stream = JSONStream.parse('obj.$*');
assertFixtureKeys(stream, t);
});
test('keys via array', function(t) {
var stream = JSONStream.parse(['obj',{emitKey: true}]);
assertFixtureKeys(stream, t);
});
test('path via array', function(t) {
var stream = JSONStream.parse(['obj',{emitPath: true}]);
var paths = [];
var values = [];
stream.on('data', function(data) {
console.log(JSON.stringify(data));
paths.push(data.path);
values.push(data.value);
});
stream.on('end', function() {
t.deepEqual(paths, [['obj', 'one'], ['obj', 'two'], ['obj', 'three']]);
t.deepEqual(values, [1,2,3]);
t.end();
});
stream.write(JSON.stringify(fixture));
stream.end();
});
test('advanced keys', function(t) {
var advanced = fs.readFileSync(couch_sample_file);
var stream = JSONStream.parse(['rows', true, 'doc', {emitKey: true}]);
var keys = [];
var values = [];
stream.on('data', function(data) {
keys.push(data.key);
values.push(data.value);
});
stream.on('end', function() {
t.deepEqual(keys, [
'_id', '_rev', 'hello',
'_id', '_rev', 'hello'
]);
t.deepEqual(values, [
"change1_0.6995461115147918", "1-e240bae28c7bb3667f02760f6398d508", 1,
"change2_0.6995461115147918", "1-13677d36b98c0c075145bb8975105153", 2
]);
t.end();
});
stream.write(advanced);
stream.end();
});
test('parent keys', function(t) {
var stream = JSONStream.parse('$*');
var d = null;
stream.on('data', function(data) {
if(d) t.fail('should only be called once');
d = data;
});
stream.on('end', function() {
t.deepEqual(d,{
key: 'obj',
value: fixture.obj
});
t.end();
});
stream.write(JSON.stringify(fixture));
stream.end();
})

40
node_modules/JSONStream/test/map.js generated vendored Normal file
View File

@@ -0,0 +1,40 @@
var test = require('tape')
var JSONStream = require('../')
test('map function', function (t) {
var actual = []
stream = JSONStream.parse([true], function (e) { return e*10 })
stream.on('data', function (v) { actual.push(v)})
stream.on('end', function () {
t.deepEqual(actual, [10,20,30,40,50,60])
t.end()
})
stream.write(JSON.stringify([1,2,3,4,5,6], null, 2))
stream.end()
})
test('filter function', function (t) {
var actual = []
stream = JSONStream
.parse([true], function (e) { return e%2 ? e : null})
.on('data', function (v) { actual.push(v)})
.on('end', function () {
t.deepEqual(actual, [1,3,5])
t.end()
})
stream.write(JSON.stringify([1,2,3,4,5,6], null, 2))
stream.end()
})

36
node_modules/JSONStream/test/multiple_objects.js generated vendored Normal file
View File

@@ -0,0 +1,36 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var it = require('it-is');
var JSONStream = require('../');
var str = fs.readFileSync(file);
var datas = {}
var server = net.createServer(function(client) {
var data_calls = 0;
var parser = JSONStream.parse(['rows', true, 'key']);
parser.on('data', function(data) {
++ data_calls;
datas[data] = (datas[data] || 0) + 1
it(data).typeof('string')
});
parser.on('end', function() {
console.log('END')
var min = Infinity
for (var d in datas)
min = min > datas[d] ? datas[d] : min
it(min).equal(3);
server.close();
});
client.pipe(parser);
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
var msgs = str + ' ' + str + '\n\n' + str
client.end(msgs);
});

29
node_modules/JSONStream/test/multiple_objects_error.js generated vendored Normal file
View File

@@ -0,0 +1,29 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var it = require('it-is');
var JSONStream = require('../');
var str = fs.readFileSync(file);
var server = net.createServer(function(client) {
var data_calls = 0;
var parser = JSONStream.parse();
parser.on('error', function(err) {
console.log(err);
server.close();
});
parser.on('end', function() {
console.log('END');
server.close();
});
client.pipe(parser);
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
var msgs = str + '}';
client.end(msgs);
});

28
node_modules/JSONStream/test/null.js generated vendored Normal file
View File

@@ -0,0 +1,28 @@
var JSONStream = require('../')
var data = [
{ID: 1, optional: null},
{ID: 2, optional: null},
{ID: 3, optional: 20},
{ID: 4, optional: null},
{ID: 5, optional: 'hello'},
{ID: 6, optional: null}
]
var test = require('tape')
test ('null properties', function (t) {
var actual = []
var stream =
JSONStream.parse('*.optional')
.on('data', function (v) { actual.push(v) })
.on('end', function () {
t.deepEqual(actual, [20, 'hello'])
t.end()
})
stream.write(JSON.stringify(data, null, 2))
stream.end()
})

29
node_modules/JSONStream/test/parsejson.js generated vendored Normal file
View File

@@ -0,0 +1,29 @@
/*
sometimes jsonparse changes numbers slightly.
*/
var r = Math.random()
, Parser = require('jsonparse')
, p = new Parser()
, assert = require('assert')
, times = 20
, bufferFrom = Buffer.from && Buffer.from !== Uint8Array.from
, str
while (times --) {
assert.equal(JSON.parse(JSON.stringify(r)), r, 'core JSON')
p.onValue = function (v) {
console.error('parsed', v)
assert.equal(v,r)
}
console.error('correct', r)
str = JSON.stringify([r])
p.write (bufferFrom ? Buffer.from(str) : new Buffer(str))
}

13
node_modules/JSONStream/test/run.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
var readdirSync = require('fs').readdirSync
var spawnSync = require('child_process').spawnSync
var extname = require('path').extname
var files = readdirSync(__dirname)
files.forEach(function(file){
if (extname(file) !== '.js' || file === 'run.js')
return
console.log(`*** ${file} ***`)
var result = spawnSync(process.argv0, [file], { stdio: 'inherit', cwd: __dirname} )
if (result.status !== 0)
process.exit(result.status)
})

41
node_modules/JSONStream/test/stringify.js generated vendored Normal file
View File

@@ -0,0 +1,41 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
//JSONStream.parse([/./]),
es.writeArray(function (err, lines) {
it(JSON.parse(lines.join(''))).deepEqual(expected)
console.error('PASSED')
})
)

47
node_modules/JSONStream/test/stringify_object.js generated vendored Normal file
View File

@@ -0,0 +1,47 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
, es = require('event-stream')
, pending = 10
, passed = true
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
for (var ix = 0; ix < pending; ix++) (function (count) {
var expected = {}
, stringify = JSONStream.stringifyObject()
es.connect(
stringify,
es.writeArray(function (err, lines) {
it(JSON.parse(lines.join(''))).deepEqual(expected)
if (--pending === 0) {
console.error('PASSED')
}
})
)
while (count --) {
var key = Math.random().toString(16).slice(2)
expected[key] = randomObj()
stringify.write([ key, expected[key] ])
}
process.nextTick(function () {
stringify.end()
})
})(ix)

35
node_modules/JSONStream/test/test.js generated vendored Normal file
View File

@@ -0,0 +1,35 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', /\d+/ /*, 'value'*/])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})

29
node_modules/JSONStream/test/test2.js generated vendored Normal file
View File

@@ -0,0 +1,29 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, '..','package.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse([])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it(data).deepEqual(expected)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(1)
console.error('PASSED')
})

41
node_modules/JSONStream/test/two-ways.js generated vendored Normal file
View File

@@ -0,0 +1,41 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
// stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
JSONStream.parse([/./]),
es.writeArray(function (err, lines) {
it(lines).has(expected)
console.error('PASSED')
})
)

21
node_modules/a-sync-waterfall/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2013 Elan Shanker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the “Software”), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

95
node_modules/a-sync-waterfall/README.md generated vendored Normal file
View File

@@ -0,0 +1,95 @@
# a-sync-waterfall
Simple, isolated sync/async waterfall module for JavaScript.
Runs an array of functions in series, each passing their results to the next in
the array. However, if any of the functions pass an error to the callback, the
next function is not executed and the main callback is immediately called with
the error.
For browsers and node.js.
## Installation
* Just include a-sync-waterfall before your scripts.
* `npm install a-sync-waterfall` if youre using node.js.
## Usage
* `waterfall(tasks, optionalCallback, forceAsync);`
* **tasks** - An array of functions to run, each function is passed a
`callback(err, result1, result2, ...)` it must call on completion. The first
argument is an error (which can be null) and any further arguments will be
passed as arguments in order to the next task.
* **optionalCallback** - An optional callback to run once all the functions have
completed. This will be passed the results of the last task's callback.
* **forceAsync** An optional flag that force tasks run asynchronously even if they are sync.
##### Node.js:
```javascript
var waterfall = require('a-sync-waterfall');
waterfall(tasks, callback);
```
##### Browser:
```javascript
var waterfall = require('a-sync-waterfall');
waterfall(tasks, callback);
// Default:
window.waterfall(tasks, callback);
```
##### Tasks as Array of Functions
```javascript
waterfall([
function(callback){
callback(null, 'one', 'two');
},
function(arg1, arg2, callback){
callback(null, 'three');
},
function(arg1, callback){
// arg1 now equals 'three'
callback(null, 'done');
}
], function (err, result) {
// result now equals 'done'
});
```
##### Derive Tasks from an Array.map
```javascript
/* basic - no arguments */
waterfall(myArray.map(function (arrayItem) {
return function (nextCallback) {
// same execution for each item, call the next one when done
doAsyncThingsWith(arrayItem, nextCallback);
}}));
/* with arguments, initializer function, and final callback */
waterfall([function initializer (firstMapFunction) {
firstMapFunction(null, initialValue);
}].concat(myArray.map(function (arrayItem) {
return function (lastItemResult, nextCallback) {
// same execution for each item in the array
var itemResult = doThingsWith(arrayItem, lastItemResult);
// results carried along from each to the next
nextCallback(null, itemResult);
}})), function (err, finalResult) {
// final callback
});
```
## Acknowledgements
Hat tip to [Caolan McMahon](https://github.com/caolan) and
[Paul Miller](https://github.com/paulmillr), whose prior contributions this is
based upon.
Also [Elan Shanker](https://github.com/es128) from which this rep is forked
## License
[MIT](https://raw.github.com/hydiak/a-sync-waterfall/master/LICENSE)

83
node_modules/a-sync-waterfall/index.js generated vendored Normal file
View File

@@ -0,0 +1,83 @@
// MIT license (by Elan Shanker).
(function(globals) {
'use strict';
var executeSync = function(){
var args = Array.prototype.slice.call(arguments);
if (typeof args[0] === 'function'){
args[0].apply(null, args.splice(1));
}
};
var executeAsync = function(fn){
if (typeof setImmediate === 'function') {
setImmediate(fn);
} else if (typeof process !== 'undefined' && process.nextTick) {
process.nextTick(fn);
} else {
setTimeout(fn, 0);
}
};
var makeIterator = function (tasks) {
var makeCallback = function (index) {
var fn = function () {
if (tasks.length) {
tasks[index].apply(null, arguments);
}
return fn.next();
};
fn.next = function () {
return (index < tasks.length - 1) ? makeCallback(index + 1): null;
};
return fn;
};
return makeCallback(0);
};
var _isArray = Array.isArray || function(maybeArray){
return Object.prototype.toString.call(maybeArray) === '[object Array]';
};
var waterfall = function (tasks, callback, forceAsync) {
var nextTick = forceAsync ? executeAsync : executeSync;
callback = callback || function () {};
if (!_isArray(tasks)) {
var err = new Error('First argument to waterfall must be an array of functions');
return callback(err);
}
if (!tasks.length) {
return callback();
}
var wrapIterator = function (iterator) {
return function (err) {
if (err) {
callback.apply(null, arguments);
callback = function () {};
} else {
var args = Array.prototype.slice.call(arguments, 1);
var next = iterator.next();
if (next) {
args.push(wrapIterator(next));
} else {
args.push(callback);
}
nextTick(function () {
iterator.apply(null, args);
});
}
};
};
wrapIterator(makeIterator(tasks))();
};
if (typeof define !== 'undefined' && define.amd) {
define([], function () {
return waterfall;
}); // RequireJS
} else if (typeof module !== 'undefined' && module.exports) {
module.exports = waterfall; // CommonJS
} else {
globals.waterfall = waterfall; // <script>
}
})(this);

56
node_modules/a-sync-waterfall/package.json generated vendored Normal file
View File

@@ -0,0 +1,56 @@
{
"_args": [
[
"a-sync-waterfall@1.0.1",
"/Users/lijianzhao/github/ChineseBQB"
]
],
"_from": "a-sync-waterfall@1.0.1",
"_id": "a-sync-waterfall@1.0.1",
"_inBundle": false,
"_integrity": "sha1-dba2qnJZi0l6El56J3DxT0yKH6c=",
"_location": "/a-sync-waterfall",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "a-sync-waterfall@1.0.1",
"name": "a-sync-waterfall",
"escapedName": "a-sync-waterfall",
"rawSpec": "1.0.1",
"saveSpec": null,
"fetchSpec": "1.0.1"
},
"_requiredBy": [
"/nunjucks"
],
"_resolved": "http://registry.npm.taobao.org/a-sync-waterfall/download/a-sync-waterfall-1.0.1.tgz",
"_spec": "1.0.1",
"_where": "/Users/lijianzhao/github/ChineseBQB",
"author": {
"name": "Gleb Khudyakov",
"url": "https://github.com/hydiak/a-sync-waterfall"
},
"bugs": {
"url": "https://github.com/hydiak/a-sync-waterfall/issues"
},
"dependencies": {},
"description": "Runs a list of async tasks, passing the results of each into the next one",
"homepage": "https://github.com/hydiak/a-sync-waterfall",
"keywords": [
"async",
"sync",
"waterfall",
"tasks",
"control",
"flow"
],
"license": "MIT",
"main": "./index",
"name": "a-sync-waterfall",
"repository": {
"type": "git",
"url": "git+ssh://git@github.com/hydiak/a-sync-waterfall.git"
},
"version": "1.0.1"
}

77
node_modules/a-sync-waterfall/test.js generated vendored Normal file
View File

@@ -0,0 +1,77 @@
"use strict";
const waterfall = require('./index');
var generateSyncTask = function(index) {
return function (x){
return function(cb){
console.log(x);
cb(null);
};
}(index);
};
var generateAsyncTask = function(index) {
return function (x){
return function(cb){
setTimeout(function(){
console.log(x);
cb(null);
}, 0);
};
}(index);
};
var generateSyncTasks = function(count){
var tasks = [];
for(var i=0; i<count; i++) {
tasks.push(generateSyncTask(i));
}
return tasks;
}
var generateAsyncTasks = function(count){
var tasks = [];
for(var i=0; i<count; i++) {
tasks.push(generateAsyncTask(i));
}
return tasks;
}
var generateRandomTasks = function(count){
var tasks = [];
for(var i=0; i<count; i++) {
Math.random() > .5 ? tasks.push(generateAsyncTask(i)) : tasks.push(generateSyncTask(i))
}
return tasks;
}
var done = function(){
console.log('done');
}
var testSync = function(){
waterfall(generateSyncTasks(10), done);
console.log('this text should be after waterfall');
};
var testAsync = function(){
waterfall(generateAsyncTasks(5), done);
console.log('this text should be before waterfall');
};
var testMixed = function(){
waterfall(generateRandomTasks(20), done);
};
console.log('testSync:');
testSync();
// console.log('\ntestAsync: ');
// testAsync();
console.log('\ntestMixed: ');
testMixed();

46
node_modules/abbrev/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,46 @@
This software is dual-licensed under the ISC and MIT licenses.
You may use this software under EITHER of the following licenses.
----------
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
----------
Copyright Isaac Z. Schlueter and Contributors
All rights reserved.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

23
node_modules/abbrev/README.md generated vendored Normal file
View File

@@ -0,0 +1,23 @@
# abbrev-js
Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).
Usage:
var abbrev = require("abbrev");
abbrev("foo", "fool", "folding", "flop");
// returns:
{ fl: 'flop'
, flo: 'flop'
, flop: 'flop'
, fol: 'folding'
, fold: 'folding'
, foldi: 'folding'
, foldin: 'folding'
, folding: 'folding'
, foo: 'foo'
, fool: 'fool'
}
This is handy for command-line scripts, or other cases where you want to be able to accept shorthands.

61
node_modules/abbrev/abbrev.js generated vendored Normal file
View File

@@ -0,0 +1,61 @@
module.exports = exports = abbrev.abbrev = abbrev
abbrev.monkeyPatch = monkeyPatch
function monkeyPatch () {
Object.defineProperty(Array.prototype, 'abbrev', {
value: function () { return abbrev(this) },
enumerable: false, configurable: true, writable: true
})
Object.defineProperty(Object.prototype, 'abbrev', {
value: function () { return abbrev(Object.keys(this)) },
enumerable: false, configurable: true, writable: true
})
}
function abbrev (list) {
if (arguments.length !== 1 || !Array.isArray(list)) {
list = Array.prototype.slice.call(arguments, 0)
}
for (var i = 0, l = list.length, args = [] ; i < l ; i ++) {
args[i] = typeof list[i] === "string" ? list[i] : String(list[i])
}
// sort them lexicographically, so that they're next to their nearest kin
args = args.sort(lexSort)
// walk through each, seeing how much it has in common with the next and previous
var abbrevs = {}
, prev = ""
for (var i = 0, l = args.length ; i < l ; i ++) {
var current = args[i]
, next = args[i + 1] || ""
, nextMatches = true
, prevMatches = true
if (current === next) continue
for (var j = 0, cl = current.length ; j < cl ; j ++) {
var curChar = current.charAt(j)
nextMatches = nextMatches && curChar === next.charAt(j)
prevMatches = prevMatches && curChar === prev.charAt(j)
if (!nextMatches && !prevMatches) {
j ++
break
}
}
prev = current
if (j === cl) {
abbrevs[current] = current
continue
}
for (var a = current.substr(0, j) ; j <= cl ; j ++) {
abbrevs[a] = current
a += current.charAt(j)
}
}
return abbrevs
}
function lexSort (a, b) {
return a === b ? 0 : a > b ? 1 : -1
}

61
node_modules/abbrev/package.json generated vendored Normal file
View File

@@ -0,0 +1,61 @@
{
"_args": [
[
"abbrev@1.1.1",
"/Users/lijianzhao/github/ChineseBQB"
]
],
"_from": "abbrev@1.1.1",
"_id": "abbrev@1.1.1",
"_inBundle": false,
"_integrity": "sha1-+PLIh60Qv2f2NPAFtph/7TF5qsg=",
"_location": "/abbrev",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "abbrev@1.1.1",
"name": "abbrev",
"escapedName": "abbrev",
"rawSpec": "1.1.1",
"saveSpec": null,
"fetchSpec": "1.1.1"
},
"_requiredBy": [
"/hexo",
"/hexo/hexo-cli",
"/nopt"
],
"_resolved": "http://registry.npm.taobao.org/abbrev/download/abbrev-1.1.1.tgz",
"_spec": "1.1.1",
"_where": "/Users/lijianzhao/github/ChineseBQB",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me"
},
"bugs": {
"url": "https://github.com/isaacs/abbrev-js/issues"
},
"description": "Like ruby's abbrev module, but in js",
"devDependencies": {
"tap": "^10.1"
},
"files": [
"abbrev.js"
],
"homepage": "https://github.com/isaacs/abbrev-js#readme",
"license": "ISC",
"main": "abbrev.js",
"name": "abbrev",
"repository": {
"type": "git",
"url": "git+ssh://git@github.com/isaacs/abbrev-js.git"
},
"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test.js --100"
},
"version": "1.1.1"
}

236
node_modules/accepts/HISTORY.md generated vendored Normal file
View File

@@ -0,0 +1,236 @@
1.3.7 / 2019-04-29
==================
* deps: negotiator@0.6.2
- Fix sorting charset, encoding, and language with extra parameters
1.3.6 / 2019-04-28
==================
* deps: mime-types@~2.1.24
- deps: mime-db@~1.40.0
1.3.5 / 2018-02-28
==================
* deps: mime-types@~2.1.18
- deps: mime-db@~1.33.0
1.3.4 / 2017-08-22
==================
* deps: mime-types@~2.1.16
- deps: mime-db@~1.29.0
1.3.3 / 2016-05-02
==================
* deps: mime-types@~2.1.11
- deps: mime-db@~1.23.0
* deps: negotiator@0.6.1
- perf: improve `Accept` parsing speed
- perf: improve `Accept-Charset` parsing speed
- perf: improve `Accept-Encoding` parsing speed
- perf: improve `Accept-Language` parsing speed
1.3.2 / 2016-03-08
==================
* deps: mime-types@~2.1.10
- Fix extension of `application/dash+xml`
- Update primary extension for `audio/mp4`
- deps: mime-db@~1.22.0
1.3.1 / 2016-01-19
==================
* deps: mime-types@~2.1.9
- deps: mime-db@~1.21.0
1.3.0 / 2015-09-29
==================
* deps: mime-types@~2.1.7
- deps: mime-db@~1.19.0
* deps: negotiator@0.6.0
- Fix including type extensions in parameters in `Accept` parsing
- Fix parsing `Accept` parameters with quoted equals
- Fix parsing `Accept` parameters with quoted semicolons
- Lazy-load modules from main entry point
- perf: delay type concatenation until needed
- perf: enable strict mode
- perf: hoist regular expressions
- perf: remove closures getting spec properties
- perf: remove a closure from media type parsing
- perf: remove property delete from media type parsing
1.2.13 / 2015-09-06
===================
* deps: mime-types@~2.1.6
- deps: mime-db@~1.18.0
1.2.12 / 2015-07-30
===================
* deps: mime-types@~2.1.4
- deps: mime-db@~1.16.0
1.2.11 / 2015-07-16
===================
* deps: mime-types@~2.1.3
- deps: mime-db@~1.15.0
1.2.10 / 2015-07-01
===================
* deps: mime-types@~2.1.2
- deps: mime-db@~1.14.0
1.2.9 / 2015-06-08
==================
* deps: mime-types@~2.1.1
- perf: fix deopt during mapping
1.2.8 / 2015-06-07
==================
* deps: mime-types@~2.1.0
- deps: mime-db@~1.13.0
* perf: avoid argument reassignment & argument slice
* perf: avoid negotiator recursive construction
* perf: enable strict mode
* perf: remove unnecessary bitwise operator
1.2.7 / 2015-05-10
==================
* deps: negotiator@0.5.3
- Fix media type parameter matching to be case-insensitive
1.2.6 / 2015-05-07
==================
* deps: mime-types@~2.0.11
- deps: mime-db@~1.9.1
* deps: negotiator@0.5.2
- Fix comparing media types with quoted values
- Fix splitting media types with quoted commas
1.2.5 / 2015-03-13
==================
* deps: mime-types@~2.0.10
- deps: mime-db@~1.8.0
1.2.4 / 2015-02-14
==================
* Support Node.js 0.6
* deps: mime-types@~2.0.9
- deps: mime-db@~1.7.0
* deps: negotiator@0.5.1
- Fix preference sorting to be stable for long acceptable lists
1.2.3 / 2015-01-31
==================
* deps: mime-types@~2.0.8
- deps: mime-db@~1.6.0
1.2.2 / 2014-12-30
==================
* deps: mime-types@~2.0.7
- deps: mime-db@~1.5.0
1.2.1 / 2014-12-30
==================
* deps: mime-types@~2.0.5
- deps: mime-db@~1.3.1
1.2.0 / 2014-12-19
==================
* deps: negotiator@0.5.0
- Fix list return order when large accepted list
- Fix missing identity encoding when q=0 exists
- Remove dynamic building of Negotiator class
1.1.4 / 2014-12-10
==================
* deps: mime-types@~2.0.4
- deps: mime-db@~1.3.0
1.1.3 / 2014-11-09
==================
* deps: mime-types@~2.0.3
- deps: mime-db@~1.2.0
1.1.2 / 2014-10-14
==================
* deps: negotiator@0.4.9
- Fix error when media type has invalid parameter
1.1.1 / 2014-09-28
==================
* deps: mime-types@~2.0.2
- deps: mime-db@~1.1.0
* deps: negotiator@0.4.8
- Fix all negotiations to be case-insensitive
- Stable sort preferences of same quality according to client order
1.1.0 / 2014-09-02
==================
* update `mime-types`
1.0.7 / 2014-07-04
==================
* Fix wrong type returned from `type` when match after unknown extension
1.0.6 / 2014-06-24
==================
* deps: negotiator@0.4.7
1.0.5 / 2014-06-20
==================
* fix crash when unknown extension given
1.0.4 / 2014-06-19
==================
* use `mime-types`
1.0.3 / 2014-06-11
==================
* deps: negotiator@0.4.6
- Order by specificity when quality is the same
1.0.2 / 2014-05-29
==================
* Fix interpretation when header not in request
* deps: pin negotiator@0.4.5
1.0.1 / 2014-01-18
==================
* Identity encoding isn't always acceptable
* deps: negotiator@~0.4.0
1.0.0 / 2013-12-27
==================
* Genesis

23
node_modules/accepts/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,23 @@
(The MIT License)
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com>
Copyright (c) 2015 Douglas Christopher Wilson <doug@somethingdoug.com>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
'Software'), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

142
node_modules/accepts/README.md generated vendored Normal file
View File

@@ -0,0 +1,142 @@
# accepts
[![NPM Version][npm-version-image]][npm-url]
[![NPM Downloads][npm-downloads-image]][npm-url]
[![Node.js Version][node-version-image]][node-version-url]
[![Build Status][travis-image]][travis-url]
[![Test Coverage][coveralls-image]][coveralls-url]
Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator).
Extracted from [koa](https://www.npmjs.com/package/koa) for general use.
In addition to negotiator, it allows:
- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])`
as well as `('text/html', 'application/json')`.
- Allows type shorthands such as `json`.
- Returns `false` when no types match
- Treats non-existent headers as `*`
## Installation
This is a [Node.js](https://nodejs.org/en/) module available through the
[npm registry](https://www.npmjs.com/). Installation is done using the
[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally):
```sh
$ npm install accepts
```
## API
<!-- eslint-disable no-unused-vars -->
```js
var accepts = require('accepts')
```
### accepts(req)
Create a new `Accepts` object for the given `req`.
#### .charset(charsets)
Return the first accepted charset. If nothing in `charsets` is accepted,
then `false` is returned.
#### .charsets()
Return the charsets that the request accepts, in the order of the client's
preference (most preferred first).
#### .encoding(encodings)
Return the first accepted encoding. If nothing in `encodings` is accepted,
then `false` is returned.
#### .encodings()
Return the encodings that the request accepts, in the order of the client's
preference (most preferred first).
#### .language(languages)
Return the first accepted language. If nothing in `languages` is accepted,
then `false` is returned.
#### .languages()
Return the languages that the request accepts, in the order of the client's
preference (most preferred first).
#### .type(types)
Return the first accepted type (and it is returned as the same text as what
appears in the `types` array). If nothing in `types` is accepted, then `false`
is returned.
The `types` array can contain full MIME types or file extensions. Any value
that is not a full MIME types is passed to `require('mime-types').lookup`.
#### .types()
Return the types that the request accepts, in the order of the client's
preference (most preferred first).
## Examples
### Simple type negotiation
This simple example shows how to use `accepts` to return a different typed
respond body based on what the client wants to accept. The server lists it's
preferences in order and will get back the best match between the client and
server.
```js
var accepts = require('accepts')
var http = require('http')
function app (req, res) {
var accept = accepts(req)
// the order of this list is significant; should be server preferred order
switch (accept.type(['json', 'html'])) {
case 'json':
res.setHeader('Content-Type', 'application/json')
res.write('{"hello":"world!"}')
break
case 'html':
res.setHeader('Content-Type', 'text/html')
res.write('<b>hello, world!</b>')
break
default:
// the fallback is text/plain, so no need to specify it above
res.setHeader('Content-Type', 'text/plain')
res.write('hello, world!')
break
}
res.end()
}
http.createServer(app).listen(3000)
```
You can test this out with the cURL program:
```sh
curl -I -H'Accept: text/html' http://localhost:3000/
```
## License
[MIT](LICENSE)
[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/accepts/master
[coveralls-url]: https://coveralls.io/r/jshttp/accepts?branch=master
[node-version-image]: https://badgen.net/npm/node/accepts
[node-version-url]: https://nodejs.org/en/download
[npm-downloads-image]: https://badgen.net/npm/dm/accepts
[npm-url]: https://npmjs.org/package/accepts
[npm-version-image]: https://badgen.net/npm/v/accepts
[travis-image]: https://badgen.net/travis/jshttp/accepts/master
[travis-url]: https://travis-ci.org/jshttp/accepts

238
node_modules/accepts/index.js generated vendored Normal file
View File

@@ -0,0 +1,238 @@
/*!
* accepts
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2015 Douglas Christopher Wilson
* MIT Licensed
*/
'use strict'
/**
* Module dependencies.
* @private
*/
var Negotiator = require('negotiator')
var mime = require('mime-types')
/**
* Module exports.
* @public
*/
module.exports = Accepts
/**
* Create a new Accepts object for the given req.
*
* @param {object} req
* @public
*/
function Accepts (req) {
if (!(this instanceof Accepts)) {
return new Accepts(req)
}
this.headers = req.headers
this.negotiator = new Negotiator(req)
}
/**
* Check if the given `type(s)` is acceptable, returning
* the best match when true, otherwise `undefined`, in which
* case you should respond with 406 "Not Acceptable".
*
* The `type` value may be a single mime type string
* such as "application/json", the extension name
* such as "json" or an array `["json", "html", "text/plain"]`. When a list
* or array is given the _best_ match, if any is returned.
*
* Examples:
*
* // Accept: text/html
* this.types('html');
* // => "html"
*
* // Accept: text/*, application/json
* this.types('html');
* // => "html"
* this.types('text/html');
* // => "text/html"
* this.types('json', 'text');
* // => "json"
* this.types('application/json');
* // => "application/json"
*
* // Accept: text/*, application/json
* this.types('image/png');
* this.types('png');
* // => undefined
*
* // Accept: text/*;q=.5, application/json
* this.types(['html', 'json']);
* this.types('html', 'json');
* // => "json"
*
* @param {String|Array} types...
* @return {String|Array|Boolean}
* @public
*/
Accepts.prototype.type =
Accepts.prototype.types = function (types_) {
var types = types_
// support flattened arguments
if (types && !Array.isArray(types)) {
types = new Array(arguments.length)
for (var i = 0; i < types.length; i++) {
types[i] = arguments[i]
}
}
// no types, return all requested types
if (!types || types.length === 0) {
return this.negotiator.mediaTypes()
}
// no accept header, return first given type
if (!this.headers.accept) {
return types[0]
}
var mimes = types.map(extToMime)
var accepts = this.negotiator.mediaTypes(mimes.filter(validMime))
var first = accepts[0]
return first
? types[mimes.indexOf(first)]
: false
}
/**
* Return accepted encodings or best fit based on `encodings`.
*
* Given `Accept-Encoding: gzip, deflate`
* an array sorted by quality is returned:
*
* ['gzip', 'deflate']
*
* @param {String|Array} encodings...
* @return {String|Array}
* @public
*/
Accepts.prototype.encoding =
Accepts.prototype.encodings = function (encodings_) {
var encodings = encodings_
// support flattened arguments
if (encodings && !Array.isArray(encodings)) {
encodings = new Array(arguments.length)
for (var i = 0; i < encodings.length; i++) {
encodings[i] = arguments[i]
}
}
// no encodings, return all requested encodings
if (!encodings || encodings.length === 0) {
return this.negotiator.encodings()
}
return this.negotiator.encodings(encodings)[0] || false
}
/**
* Return accepted charsets or best fit based on `charsets`.
*
* Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5`
* an array sorted by quality is returned:
*
* ['utf-8', 'utf-7', 'iso-8859-1']
*
* @param {String|Array} charsets...
* @return {String|Array}
* @public
*/
Accepts.prototype.charset =
Accepts.prototype.charsets = function (charsets_) {
var charsets = charsets_
// support flattened arguments
if (charsets && !Array.isArray(charsets)) {
charsets = new Array(arguments.length)
for (var i = 0; i < charsets.length; i++) {
charsets[i] = arguments[i]
}
}
// no charsets, return all requested charsets
if (!charsets || charsets.length === 0) {
return this.negotiator.charsets()
}
return this.negotiator.charsets(charsets)[0] || false
}
/**
* Return accepted languages or best fit based on `langs`.
*
* Given `Accept-Language: en;q=0.8, es, pt`
* an array sorted by quality is returned:
*
* ['es', 'pt', 'en']
*
* @param {String|Array} langs...
* @return {Array|String}
* @public
*/
Accepts.prototype.lang =
Accepts.prototype.langs =
Accepts.prototype.language =
Accepts.prototype.languages = function (languages_) {
var languages = languages_
// support flattened arguments
if (languages && !Array.isArray(languages)) {
languages = new Array(arguments.length)
for (var i = 0; i < languages.length; i++) {
languages[i] = arguments[i]
}
}
// no languages, return all requested languages
if (!languages || languages.length === 0) {
return this.negotiator.languages()
}
return this.negotiator.languages(languages)[0] || false
}
/**
* Convert extnames to mime.
*
* @param {String} type
* @return {String}
* @private
*/
function extToMime (type) {
return type.indexOf('/') === -1
? mime.lookup(type)
: type
}
/**
* Check if mime is valid.
*
* @param {String} type
* @return {String}
* @private
*/
function validMime (type) {
return typeof type === 'string'
}

89
node_modules/accepts/package.json generated vendored Normal file
View File

@@ -0,0 +1,89 @@
{
"_args": [
[
"accepts@1.3.7",
"/Users/lijianzhao/github/ChineseBQB"
]
],
"_from": "accepts@1.3.7",
"_id": "accepts@1.3.7",
"_inBundle": false,
"_integrity": "sha1-UxvHJlF6OytB+FACHGzBXqq1B80=",
"_location": "/accepts",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "accepts@1.3.7",
"name": "accepts",
"escapedName": "accepts",
"rawSpec": "1.3.7",
"saveSpec": null,
"fetchSpec": "1.3.7"
},
"_requiredBy": [
"/compression"
],
"_resolved": "https://registry.npm.taobao.org/accepts/download/accepts-1.3.7.tgz",
"_spec": "1.3.7",
"_where": "/Users/lijianzhao/github/ChineseBQB",
"bugs": {
"url": "https://github.com/jshttp/accepts/issues"
},
"contributors": [
{
"name": "Douglas Christopher Wilson",
"email": "doug@somethingdoug.com"
},
{
"name": "Jonathan Ong",
"email": "me@jongleberry.com",
"url": "http://jongleberry.com"
}
],
"dependencies": {
"mime-types": "~2.1.24",
"negotiator": "0.6.2"
},
"description": "Higher-level content negotiation",
"devDependencies": {
"deep-equal": "1.0.1",
"eslint": "5.16.0",
"eslint-config-standard": "12.0.0",
"eslint-plugin-import": "2.17.2",
"eslint-plugin-markdown": "1.0.0",
"eslint-plugin-node": "8.0.1",
"eslint-plugin-promise": "4.1.1",
"eslint-plugin-standard": "4.0.0",
"mocha": "6.1.4",
"nyc": "14.0.0"
},
"engines": {
"node": ">= 0.6"
},
"files": [
"LICENSE",
"HISTORY.md",
"index.js"
],
"homepage": "https://github.com/jshttp/accepts#readme",
"keywords": [
"content",
"negotiation",
"accept",
"accepts"
],
"license": "MIT",
"name": "accepts",
"repository": {
"type": "git",
"url": "git+https://github.com/jshttp/accepts.git"
},
"scripts": {
"lint": "eslint --plugin markdown --ext js,md .",
"test": "mocha --reporter spec --check-leaks --bail test/",
"test-cov": "nyc --reporter=html --reporter=text npm test",
"test-travis": "nyc --reporter=text npm test"
},
"version": "1.3.7"
}

514
node_modules/acorn/CHANGELOG.md generated vendored Normal file
View File

@@ -0,0 +1,514 @@
## 6.1.1 (2019-02-27)
### Bug fixes
Fix bug that caused parsing default exports of with names to fail.
## 6.1.0 (2019-02-08)
### Bug fixes
Fix scope checking when redefining a `var` as a lexical binding.
### New features
Split up `parseSubscripts` to use an internal `parseSubscript` method to make it easier to extend with plugins.
## 6.0.7 (2019-02-04)
### Bug fixes
Check that exported bindings are defined.
Don't treat `\u180e` as a whitespace character.
Check for duplicate parameter names in methods.
Don't allow shorthand properties when they are generators or async methods.
Forbid binding `await` in async arrow function's parameter list.
## 6.0.6 (2019-01-30)
### Bug fixes
The content of class declarations and expressions is now always parsed in strict mode.
Don't allow `let` or `const` to bind the variable name `let`.
Treat class declarations as lexical.
Don't allow a generator function declaration as the sole body of an `if` or `else`.
Ignore `"use strict"` when after an empty statement.
Allow string line continuations with special line terminator characters.
Treat `for` bodies as part of the `for` scope when checking for conflicting bindings.
Fix bug with parsing `yield` in a `for` loop initializer.
Implement special cases around scope checking for functions.
## 6.0.5 (2019-01-02)
### Bug fixes
Fix TypeScript type for `Parser.extend` and add `allowAwaitOutsideFunction` to options type.
Don't treat `let` as a keyword when the next token is `{` on the next line.
Fix bug that broke checking for parentheses around an object pattern in a destructuring assignment when `preserveParens` was on.
## 6.0.4 (2018-11-05)
### Bug fixes
Further improvements to tokenizing regular expressions in corner cases.
## 6.0.3 (2018-11-04)
### Bug fixes
Fix bug in tokenizing an expression-less return followed by a function followed by a regular expression.
Remove stray symlink in the package tarball.
## 6.0.2 (2018-09-26)
### Bug fixes
Fix bug where default expressions could fail to parse inside an object destructuring assignment expression.
## 6.0.1 (2018-09-14)
### Bug fixes
Fix wrong value in `version` export.
## 6.0.0 (2018-09-14)
### Bug fixes
Better handle variable-redefinition checks for catch bindings and functions directly under if statements.
Forbid `new.target` in top-level arrow functions.
Fix issue with parsing a regexp after `yield` in some contexts.
### New features
The package now comes with TypeScript definitions.
### Breaking changes
The default value of the `ecmaVersion` option is now 9 (2018).
Plugins work differently, and will have to be rewritten to work with this version.
The loose parser and walker have been moved into separate packages (`acorn-loose` and `acorn-walk`).
## 5.7.3 (2018-09-10)
### Bug fixes
Fix failure to tokenize regexps after expressions like `x.of`.
Better error message for unterminated template literals.
## 5.7.2 (2018-08-24)
### Bug fixes
Properly handle `allowAwaitOutsideFunction` in for statements.
Treat function declarations at the top level of modules like let bindings.
Don't allow async function declarations as the only statement under a label.
## 5.7.0 (2018-06-15)
### New features
Upgraded to Unicode 11.
## 5.6.0 (2018-05-31)
### New features
Allow U+2028 and U+2029 in string when ECMAVersion >= 10.
Allow binding-less catch statements when ECMAVersion >= 10.
Add `allowAwaitOutsideFunction` option for parsing top-level `await`.
## 5.5.3 (2018-03-08)
### Bug fixes
A _second_ republish of the code in 5.5.1, this time with yarn, to hopefully get valid timestamps.
## 5.5.2 (2018-03-08)
### Bug fixes
A republish of the code in 5.5.1 in an attempt to solve an issue with the file timestamps in the npm package being 0.
## 5.5.1 (2018-03-06)
### Bug fixes
Fix misleading error message for octal escapes in template strings.
## 5.5.0 (2018-02-27)
### New features
The identifier character categorization is now based on Unicode version 10.
Acorn will now validate the content of regular expressions, including new ES9 features.
## 5.4.0 (2018-02-01)
### Bug fixes
Disallow duplicate or escaped flags on regular expressions.
Disallow octal escapes in strings in strict mode.
### New features
Add support for async iteration.
Add support for object spread and rest.
## 5.3.0 (2017-12-28)
### Bug fixes
Fix parsing of floating point literals with leading zeroes in loose mode.
Allow duplicate property names in object patterns.
Don't allow static class methods named `prototype`.
Disallow async functions directly under `if` or `else`.
Parse right-hand-side of `for`/`of` as an assignment expression.
Stricter parsing of `for`/`in`.
Don't allow unicode escapes in contextual keywords.
### New features
Parsing class members was factored into smaller methods to allow plugins to hook into it.
## 5.2.1 (2017-10-30)
### Bug fixes
Fix a token context corruption bug.
## 5.2.0 (2017-10-30)
### Bug fixes
Fix token context tracking for `class` and `function` in property-name position.
Make sure `%*` isn't parsed as a valid operator.
Allow shorthand properties `get` and `set` to be followed by default values.
Disallow `super` when not in callee or object position.
### New features
Support [`directive` property](https://github.com/estree/estree/compare/b3de58c9997504d6fba04b72f76e6dd1619ee4eb...1da8e603237144f44710360f8feb7a9977e905e0) on directive expression statements.
## 5.1.2 (2017-09-04)
### Bug fixes
Disable parsing of legacy HTML-style comments in modules.
Fix parsing of async methods whose names are keywords.
## 5.1.1 (2017-07-06)
### Bug fixes
Fix problem with disambiguating regexp and division after a class.
## 5.1.0 (2017-07-05)
### Bug fixes
Fix tokenizing of regexps in an object-desctructuring `for`/`of` loop and after `yield`.
Parse zero-prefixed numbers with non-octal digits as decimal.
Allow object/array patterns in rest parameters.
Don't error when `yield` is used as a property name.
Allow `async` as a shorthand object property.
### New features
Implement the [template literal revision proposal](https://github.com/tc39/proposal-template-literal-revision) for ES9.
## 5.0.3 (2017-04-01)
### Bug fixes
Fix spurious duplicate variable definition errors for named functions.
## 5.0.2 (2017-03-30)
### Bug fixes
A binary operator after a parenthesized arrow expression is no longer incorrectly treated as an error.
## 5.0.0 (2017-03-28)
### Bug fixes
Raise an error for duplicated lexical bindings.
Fix spurious error when an assignement expression occurred after a spread expression.
Accept regular expressions after `of` (in `for`/`of`), `yield` (in a generator), and braced arrow functions.
Allow labels in front or `var` declarations, even in strict mode.
### Breaking changes
Parse declarations following `export default` as declaration nodes, not expressions. This means that class and function declarations nodes can now have `null` as their `id`.
## 4.0.11 (2017-02-07)
### Bug fixes
Allow all forms of member expressions to be parenthesized as lvalue.
## 4.0.10 (2017-02-07)
### Bug fixes
Don't expect semicolons after default-exported functions or classes, even when they are expressions.
Check for use of `'use strict'` directives in non-simple parameter functions, even when already in strict mode.
## 4.0.9 (2017-02-06)
### Bug fixes
Fix incorrect error raised for parenthesized simple assignment targets, so that `(x) = 1` parses again.
## 4.0.8 (2017-02-03)
### Bug fixes
Solve spurious parenthesized pattern errors by temporarily erring on the side of accepting programs that our delayed errors don't handle correctly yet.
## 4.0.7 (2017-02-02)
### Bug fixes
Accept invalidly rejected code like `(x).y = 2` again.
Don't raise an error when a function _inside_ strict code has a non-simple parameter list.
## 4.0.6 (2017-02-02)
### Bug fixes
Fix exponential behavior (manifesting itself as a complete hang for even relatively small source files) introduced by the new 'use strict' check.
## 4.0.5 (2017-02-02)
### Bug fixes
Disallow parenthesized pattern expressions.
Allow keywords as export names.
Don't allow the `async` keyword to be parenthesized.
Properly raise an error when a keyword contains a character escape.
Allow `"use strict"` to appear after other string literal expressions.
Disallow labeled declarations.
## 4.0.4 (2016-12-19)
### Bug fixes
Fix crash when `export` was followed by a keyword that can't be
exported.
## 4.0.3 (2016-08-16)
### Bug fixes
Allow regular function declarations inside single-statement `if` branches in loose mode. Forbid them entirely in strict mode.
Properly parse properties named `async` in ES2017 mode.
Fix bug where reserved words were broken in ES2017 mode.
## 4.0.2 (2016-08-11)
### Bug fixes
Don't ignore period or 'e' characters after octal numbers.
Fix broken parsing for call expressions in default parameter values of arrow functions.
## 4.0.1 (2016-08-08)
### Bug fixes
Fix false positives in duplicated export name errors.
## 4.0.0 (2016-08-07)
### Breaking changes
The default `ecmaVersion` option value is now 7.
A number of internal method signatures changed, so plugins might need to be updated.
### Bug fixes
The parser now raises errors on duplicated export names.
`arguments` and `eval` can now be used in shorthand properties.
Duplicate parameter names in non-simple argument lists now always produce an error.
### New features
The `ecmaVersion` option now also accepts year-style version numbers
(2015, etc).
Support for `async`/`await` syntax when `ecmaVersion` is >= 8.
Support for trailing commas in call expressions when `ecmaVersion` is >= 8.
## 3.3.0 (2016-07-25)
### Bug fixes
Fix bug in tokenizing of regexp operator after a function declaration.
Fix parser crash when parsing an array pattern with a hole.
### New features
Implement check against complex argument lists in functions that enable strict mode in ES7.
## 3.2.0 (2016-06-07)
### Bug fixes
Improve handling of lack of unicode regexp support in host
environment.
Properly reject shorthand properties whose name is a keyword.
### New features
Visitors created with `visit.make` now have their base as _prototype_, rather than copying properties into a fresh object.
## 3.1.0 (2016-04-18)
### Bug fixes
Properly tokenize the division operator directly after a function expression.
Allow trailing comma in destructuring arrays.
## 3.0.4 (2016-02-25)
### Fixes
Allow update expressions as left-hand-side of the ES7 exponential operator.
## 3.0.2 (2016-02-10)
### Fixes
Fix bug that accidentally made `undefined` a reserved word when parsing ES7.
## 3.0.0 (2016-02-10)
### Breaking changes
The default value of the `ecmaVersion` option is now 6 (used to be 5).
Support for comprehension syntax (which was dropped from the draft spec) has been removed.
### Fixes
`let` and `yield` are now “contextual keywords”, meaning you can mostly use them as identifiers in ES5 non-strict code.
A parenthesized class or function expression after `export default` is now parsed correctly.
### New features
When `ecmaVersion` is set to 7, Acorn will parse the exponentiation operator (`**`).
The identifier character ranges are now based on Unicode 8.0.0.
Plugins can now override the `raiseRecoverable` method to override the way non-critical errors are handled.
## 2.7.0 (2016-01-04)
### Fixes
Stop allowing rest parameters in setters.
Disallow `y` rexexp flag in ES5.
Disallow `\00` and `\000` escapes in strict mode.
Raise an error when an import name is a reserved word.
## 2.6.2 (2015-11-10)
### Fixes
Don't crash when no options object is passed.
## 2.6.0 (2015-11-09)
### Fixes
Add `await` as a reserved word in module sources.
Disallow `yield` in a parameter default value for a generator.
Forbid using a comma after a rest pattern in an array destructuring.
### New features
Support parsing stdin in command-line tool.
## 2.5.0 (2015-10-27)
### Fixes
Fix tokenizer support in the command-line tool.
Stop allowing `new.target` outside of functions.
Remove legacy `guard` and `guardedHandler` properties from try nodes.
Stop allowing multiple `__proto__` properties on an object literal in strict mode.
Don't allow rest parameters to be non-identifier patterns.
Check for duplicate paramter names in arrow functions.

19
node_modules/acorn/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,19 @@
Copyright (C) 2012-2018 by various contributors (see AUTHORS)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

269
node_modules/acorn/README.md generated vendored Normal file
View File

@@ -0,0 +1,269 @@
# Acorn
A tiny, fast JavaScript parser written in JavaScript.
## Community
Acorn is open source software released under an
[MIT license](https://github.com/acornjs/acorn/blob/master/acorn/LICENSE).
You are welcome to
[report bugs](https://github.com/acornjs/acorn/issues) or create pull
requests on [github](https://github.com/acornjs/acorn). For questions
and discussion, please use the
[Tern discussion forum](https://discuss.ternjs.net).
## Installation
The easiest way to install acorn is from [`npm`](https://www.npmjs.com/):
```sh
npm install acorn
```
Alternately, you can download the source and build acorn yourself:
```sh
git clone https://github.com/acornjs/acorn.git
cd acorn
npm install
```
## Interface
**parse**`(input, options)` is the main interface to the library. The
`input` parameter is a string, `options` can be undefined or an object
setting some of the options listed below. The return value will be an
abstract syntax tree object as specified by the [ESTree
spec](https://github.com/estree/estree).
```javascript
let acorn = require("acorn");
console.log(acorn.parse("1 + 1"));
```
When encountering a syntax error, the parser will raise a
`SyntaxError` object with a meaningful message. The error object will
have a `pos` property that indicates the string offset at which the
error occurred, and a `loc` object that contains a `{line, column}`
object referring to that same position.
Options can be provided by passing a second argument, which should be
an object containing any of these fields:
- **ecmaVersion**: Indicates the ECMAScript version to parse. Must be
either 3, 5, 6 (2015), 7 (2016), 8 (2017), 9 (2018) or 10 (2019, partial
support). This influences support for strict mode, the set of
reserved words, and support for new syntax features. Default is 7.
**NOTE**: Only 'stage 4' (finalized) ECMAScript features are being
implemented by Acorn. Other proposed new features can be implemented
through plugins.
- **sourceType**: Indicate the mode the code should be parsed in. Can be
either `"script"` or `"module"`. This influences global strict mode
and parsing of `import` and `export` declarations.
- **onInsertedSemicolon**: If given a callback, that callback will be
called whenever a missing semicolon is inserted by the parser. The
callback will be given the character offset of the point where the
semicolon is inserted as argument, and if `locations` is on, also a
`{line, column}` object representing this position.
- **onTrailingComma**: Like `onInsertedSemicolon`, but for trailing
commas.
- **allowReserved**: If `false`, using a reserved word will generate
an error. Defaults to `true` for `ecmaVersion` 3, `false` for higher
versions. When given the value `"never"`, reserved words and
keywords can also not be used as property names (as in Internet
Explorer's old parser).
- **allowReturnOutsideFunction**: By default, a return statement at
the top level raises an error. Set this to `true` to accept such
code.
- **allowImportExportEverywhere**: By default, `import` and `export`
declarations can only appear at a program's top level. Setting this
option to `true` allows them anywhere where a statement is allowed.
- **allowAwaitOutsideFunction**: By default, `await` expressions can
only appear inside `async` functions. Setting this option to
`true` allows to have top-level `await` expressions. They are
still not allowed in non-`async` functions, though.
- **allowHashBang**: When this is enabled (off by default), if the
code starts with the characters `#!` (as in a shellscript), the
first line will be treated as a comment.
- **locations**: When `true`, each node has a `loc` object attached
with `start` and `end` subobjects, each of which contains the
one-based line and zero-based column numbers in `{line, column}`
form. Default is `false`.
- **onToken**: If a function is passed for this option, each found
token will be passed in same format as tokens returned from
`tokenizer().getToken()`.
If array is passed, each found token is pushed to it.
Note that you are not allowed to call the parser from the
callback—that will corrupt its internal state.
- **onComment**: If a function is passed for this option, whenever a
comment is encountered the function will be called with the
following parameters:
- `block`: `true` if the comment is a block comment, false if it
is a line comment.
- `text`: The content of the comment.
- `start`: Character offset of the start of the comment.
- `end`: Character offset of the end of the comment.
When the `locations` options is on, the `{line, column}` locations
of the comments start and end are passed as two additional
parameters.
If array is passed for this option, each found comment is pushed
to it as object in Esprima format:
```javascript
{
"type": "Line" | "Block",
"value": "comment text",
"start": Number,
"end": Number,
// If `locations` option is on:
"loc": {
"start": {line: Number, column: Number}
"end": {line: Number, column: Number}
},
// If `ranges` option is on:
"range": [Number, Number]
}
```
Note that you are not allowed to call the parser from the
callback—that will corrupt its internal state.
- **ranges**: Nodes have their start and end characters offsets
recorded in `start` and `end` properties (directly on the node,
rather than the `loc` object, which holds line/column data. To also
add a
[semi-standardized](https://bugzilla.mozilla.org/show_bug.cgi?id=745678)
`range` property holding a `[start, end]` array with the same
numbers, set the `ranges` option to `true`.
- **program**: It is possible to parse multiple files into a single
AST by passing the tree produced by parsing the first file as the
`program` option in subsequent parses. This will add the toplevel
forms of the parsed file to the "Program" (top) node of an existing
parse tree.
- **sourceFile**: When the `locations` option is `true`, you can pass
this option to add a `source` attribute in every nodes `loc`
object. Note that the contents of this option are not examined or
processed in any way; you are free to use whatever format you
choose.
- **directSourceFile**: Like `sourceFile`, but a `sourceFile` property
will be added (regardless of the `location` option) directly to the
nodes, rather than the `loc` object.
- **preserveParens**: If this option is `true`, parenthesized expressions
are represented by (non-standard) `ParenthesizedExpression` nodes
that have a single `expression` property containing the expression
inside parentheses.
**parseExpressionAt**`(input, offset, options)` will parse a single
expression in a string, and return its AST. It will not complain if
there is more of the string left after the expression.
**tokenizer**`(input, options)` returns an object with a `getToken`
method that can be called repeatedly to get the next token, a `{start,
end, type, value}` object (with added `loc` property when the
`locations` option is enabled and `range` property when the `ranges`
option is enabled). When the token's type is `tokTypes.eof`, you
should stop calling the method, since it will keep returning that same
token forever.
In ES6 environment, returned result can be used as any other
protocol-compliant iterable:
```javascript
for (let token of acorn.tokenizer(str)) {
// iterate over the tokens
}
// transform code to array of tokens:
var tokens = [...acorn.tokenizer(str)];
```
**tokTypes** holds an object mapping names to the token type objects
that end up in the `type` properties of tokens.
**getLineInfo**`(input, offset)` can be used to get a `{line,
column}` object for a given program string and offset.
### The `Parser` class
Instances of the **`Parser`** class contain all the state and logic
that drives a parse. It has static methods `parse`,
`parseExpressionAt`, and `tokenizer` that match the top-level
functions by the same name.
When extending the parser with plugins, you need to call these methods
on the extended version of the class. To extend a parser with plugins,
you can use its static `extend` method.
```javascript
var acorn = require("acorn");
var jsx = require("acorn-jsx");
var JSXParser = acorn.Parser.extend(jsx());
JSXParser.parse("foo(<bar/>)");
```
The `extend` method takes any number of plugin values, and returns a
new `Parser` class that includes the extra parser logic provided by
the plugins.
## Command line interface
The `bin/acorn` utility can be used to parse a file from the command
line. It accepts as arguments its input file and the following
options:
- `--ecma3|--ecma5|--ecma6|--ecma7|--ecma8|--ecma9|--ecma10`: Sets the ECMAScript version
to parse. Default is version 9.
- `--module`: Sets the parsing mode to `"module"`. Is set to `"script"` otherwise.
- `--locations`: Attaches a "loc" object to each node with "start" and
"end" subobjects, each of which contains the one-based line and
zero-based column numbers in `{line, column}` form.
- `--allow-hash-bang`: If the code starts with the characters #! (as
in a shellscript), the first line will be treated as a comment.
- `--compact`: No whitespace is used in the AST output.
- `--silent`: Do not output the AST, just return the exit status.
- `--help`: Print the usage information and quit.
The utility spits out the syntax tree as JSON data.
## Existing plugins
- [`acorn-jsx`](https://github.com/RReverser/acorn-jsx): Parse [Facebook JSX syntax extensions](https://github.com/facebook/jsx)
Plugins for ECMAScript proposals:
- [`acorn-stage3`](https://github.com/acornjs/acorn-stage3): Parse most stage 3 proposals, bundling:
- [`acorn-async-iteration`](https://github.com/acornjs/acorn-async-iteration): Parse [async iteration proposal](https://github.com/tc39/proposal-async-iteration)
- [`acorn-bigint`](https://github.com/acornjs/acorn-bigint): Parse [BigInt proposal](https://github.com/tc39/proposal-bigint)
- [`acorn-class-fields`](https://github.com/acornjs/acorn-class-fields): Parse [class fields proposal](https://github.com/tc39/proposal-class-fields)
- [`acorn-dynamic-import`](https://github.com/kesne/acorn-dynamic-import): Parse [import() proposal](https://github.com/tc39/proposal-dynamic-import)
- [`acorn-import-meta`](https://github.com/acornjs/acorn-import-meta): Parse [import.meta proposal](https://github.com/tc39/proposal-import-meta)
- [`acorn-numeric-separator`](https://github.com/acornjs/acorn-numeric-separator): Parse [numeric separator proposal](https://github.com/tc39/proposal-numeric-separator)
- [`acorn-private-methods`](https://github.com/acornjs/acorn-private-methods): parse [private methods, getters and setters proposal](https://github.com/tc39/proposal-private-methods)n

4
node_modules/acorn/bin/acorn generated vendored Executable file
View File

@@ -0,0 +1,4 @@
#!/usr/bin/env node
'use strict';
require('../dist/bin.js');

209
node_modules/acorn/dist/acorn.d.ts generated vendored Normal file
View File

@@ -0,0 +1,209 @@
export as namespace acorn
export = acorn
declare namespace acorn {
function parse(input: string, options?: Options): Node
function parseExpressionAt(input: string, pos?: number, options?: Options): Node
function tokenizer(input: string, options?: Options): {
getToken(): Token
[Symbol.iterator](): Iterator<Token>
}
interface Options {
ecmaVersion?: 3 | 5 | 6 | 7 | 8 | 9 | 10 | 2015 | 2016 | 2017 | 2018 | 2019
sourceType?: 'script' | 'module'
onInsertedSemicolon?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
onTrailingComma?: (lastTokEnd: number, lastTokEndLoc?: Position) => void
allowReserved?: boolean
allowReturnOutsideFunction?: boolean
allowImportExportEverywhere?: boolean
allowAwaitOutsideFunction?: boolean
allowHashBang?: boolean
locations?: boolean
onToken?: ((token: Token) => any) | Token[]
onComment?: ((
isBlock: boolean, text: string, start: number, end: number, startLoc?: Position,
endLoc?: Position
) => void) | Comment[]
ranges?: boolean
program?: Node
sourceFile?: string
directSourceFile?: string
preserveParens?: boolean
}
class Parser {
constructor(options: Options, input: string, startPos?: number)
parse(): Node
static parse(input: string, options?: Options): Node
static parseExpressionAt(input: string, pos: number, options?: Options): Node
static tokenizer(input: string, options?: Options): {
getToken(): Token
[Symbol.iterator](): Iterator<Token>
}
static extend(...plugins: ((BaseParser: typeof Parser) => typeof Parser)[]): typeof Parser
}
interface Position { line: number; column: number; offset: number }
const defaultOptions: Options
function getLineInfo(input: string, offset: number): Position
class SourceLocation {
start: Position
end: Position
source?: string | null
constructor(p: Parser, start: Position, end: Position)
}
class Node {
type: string
start: number
end: number
loc?: SourceLocation
sourceFile?: string
range?: [number, number]
constructor(parser: Parser, pos: number, loc?: SourceLocation)
}
class TokenType {
label: string
keyword: string
beforeExpr: boolean
startsExpr: boolean
isLoop: boolean
isAssign: boolean
prefix: boolean
postfix: boolean
binop: number
updateContext?: (prevType: TokenType) => void
constructor(label: string, conf?: any)
}
const tokTypes: {
num: TokenType
regexp: TokenType
string: TokenType
name: TokenType
eof: TokenType
bracketL: TokenType
bracketR: TokenType
braceL: TokenType
braceR: TokenType
parenL: TokenType
parenR: TokenType
comma: TokenType
semi: TokenType
colon: TokenType
dot: TokenType
question: TokenType
arrow: TokenType
template: TokenType
ellipsis: TokenType
backQuote: TokenType
dollarBraceL: TokenType
eq: TokenType
assign: TokenType
incDec: TokenType
prefix: TokenType
logicalOR: TokenType
logicalAND: TokenType
bitwiseOR: TokenType
bitwiseXOR: TokenType
bitwiseAND: TokenType
equality: TokenType
relational: TokenType
bitShift: TokenType
plusMin: TokenType
modulo: TokenType
star: TokenType
slash: TokenType
starstar: TokenType
_break: TokenType
_case: TokenType
_catch: TokenType
_continue: TokenType
_debugger: TokenType
_default: TokenType
_do: TokenType
_else: TokenType
_finally: TokenType
_for: TokenType
_function: TokenType
_if: TokenType
_return: TokenType
_switch: TokenType
_throw: TokenType
_try: TokenType
_var: TokenType
_const: TokenType
_while: TokenType
_with: TokenType
_new: TokenType
_this: TokenType
_super: TokenType
_class: TokenType
_extends: TokenType
_export: TokenType
_import: TokenType
_null: TokenType
_true: TokenType
_false: TokenType
_in: TokenType
_instanceof: TokenType
_typeof: TokenType
_void: TokenType
_delete: TokenType
}
class TokContext {
constructor(token: string, isExpr: boolean, preserveSpace: boolean, override?: (p: Parser) => void)
}
const tokContexts: {
b_stat: TokContext
b_expr: TokContext
b_tmpl: TokContext
p_stat: TokContext
p_expr: TokContext
q_tmpl: TokContext
f_expr: TokContext
}
function isIdentifierStart(code: number, astral?: boolean): boolean
function isIdentifierChar(code: number, astral?: boolean): boolean
interface AbstractToken {
}
interface Comment extends AbstractToken {
type: string
value: string
start: number
end: number
loc?: SourceLocation
range?: [number, number]
}
class Token {
type: TokenType
value: any
start: number
end: number
loc?: SourceLocation
range?: [number, number]
constructor(p: Parser)
}
function isNewLine(code: number): boolean
const lineBreak: RegExp
const lineBreakG: RegExp
const version: string
}

5018
node_modules/acorn/dist/acorn.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

1
node_modules/acorn/dist/acorn.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

4987
node_modules/acorn/dist/acorn.mjs generated vendored Normal file

File diff suppressed because it is too large Load Diff

1
node_modules/acorn/dist/acorn.mjs.map generated vendored Normal file

File diff suppressed because one or more lines are too long

68
node_modules/acorn/dist/bin.js generated vendored Normal file
View File

@@ -0,0 +1,68 @@
'use strict';
var path = require('path');
var fs = require('fs');
var acorn = require('./acorn.js');
var infile;
var forceFile;
var silent = false;
var compact = false;
var tokenize = false;
var options = {};
function help(status) {
var print = (status === 0) ? console.log : console.error;
print("usage: " + path.basename(process.argv[1]) + " [--ecma3|--ecma5|--ecma6|--ecma7|--ecma8|--ecma9|...|--ecma2015|--ecma2016|--ecma2017|--ecma2018|...]");
print(" [--tokenize] [--locations] [---allow-hash-bang] [--compact] [--silent] [--module] [--help] [--] [infile]");
process.exit(status);
}
for (var i = 2; i < process.argv.length; ++i) {
var arg = process.argv[i];
if ((arg === "-" || arg[0] !== "-") && !infile) { infile = arg; }
else if (arg === "--" && !infile && i + 2 === process.argv.length) { forceFile = infile = process.argv[++i]; }
else if (arg === "--locations") { options.locations = true; }
else if (arg === "--allow-hash-bang") { options.allowHashBang = true; }
else if (arg === "--silent") { silent = true; }
else if (arg === "--compact") { compact = true; }
else if (arg === "--help") { help(0); }
else if (arg === "--tokenize") { tokenize = true; }
else if (arg === "--module") { options.sourceType = "module"; }
else {
var match = arg.match(/^--ecma(\d+)$/);
if (match)
{ options.ecmaVersion = +match[1]; }
else
{ help(1); }
}
}
function run(code) {
var result;
try {
if (!tokenize) {
result = acorn.parse(code, options);
} else {
result = [];
var tokenizer$$1 = acorn.tokenizer(code, options), token;
do {
token = tokenizer$$1.getToken();
result.push(token);
} while (token.type !== acorn.tokTypes.eof)
}
} catch (e) {
console.error(e.message);
process.exit(1);
}
if (!silent) { console.log(JSON.stringify(result, null, compact ? null : 2)); }
}
if (forceFile || infile && infile !== "-") {
run(fs.readFileSync(infile, "utf8"));
} else {
var code = "";
process.stdin.resume();
process.stdin.on("data", function (chunk) { return code += chunk; });
process.stdin.on("end", function () { return run(code); });
}

69
node_modules/acorn/package.json generated vendored Normal file
View File

@@ -0,0 +1,69 @@
{
"_args": [
[
"acorn@6.1.1",
"/Users/lijianzhao/github/ChineseBQB"
]
],
"_from": "acorn@6.1.1",
"_id": "acorn@6.1.1",
"_inBundle": false,
"_integrity": "sha1-fSWuBbuK0fm2mRCOEJTs14hK3B8=",
"_location": "/acorn",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "acorn@6.1.1",
"name": "acorn",
"escapedName": "acorn",
"rawSpec": "6.1.1",
"saveSpec": null,
"fetchSpec": "6.1.1"
},
"_requiredBy": [
"/hexo/hexo-cli"
],
"_resolved": "http://registry.npm.taobao.org/acorn/download/acorn-6.1.1.tgz",
"_spec": "6.1.1",
"_where": "/Users/lijianzhao/github/ChineseBQB",
"bin": {
"acorn": "./bin/acorn"
},
"bugs": {
"url": "https://github.com/acornjs/acorn/issues"
},
"description": "ECMAScript parser",
"engines": {
"node": ">=0.4.0"
},
"homepage": "https://github.com/acornjs/acorn",
"license": "MIT",
"main": "dist/acorn.js",
"maintainers": [
{
"name": "Marijn Haverbeke",
"email": "marijnh@gmail.com",
"url": "https://marijnhaverbeke.nl"
},
{
"name": "Ingvar Stepanyan",
"email": "me@rreverser.com",
"url": "https://rreverser.com/"
},
{
"name": "Adrian Heine",
"url": "http://adrianheine.de"
}
],
"module": "dist/acorn.mjs",
"name": "acorn",
"repository": {
"type": "git",
"url": "git+https://github.com/acornjs/acorn.git"
},
"scripts": {
"prepare": "cd ..; npm run build:main && npm run build:bin"
},
"version": "6.1.1"
}

21
node_modules/align-text/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2015, Jon Schlinkert.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

236
node_modules/align-text/README.md generated vendored Normal file
View File

@@ -0,0 +1,236 @@
# align-text [![NPM version](https://badge.fury.io/js/align-text.svg)](http://badge.fury.io/js/align-text) [![Build Status](https://travis-ci.org/jonschlinkert/align-text.svg)](https://travis-ci.org/jonschlinkert/align-text)
> Align the text in a string.
**Examples**
Align text values in an array:
```js
align([1, 2, 3, 100]);
//=> [' 1', ' 2', ' 3', '100']
```
Or [do stuff like this](./example.js):
[![screen shot 2015-06-09 at 2 08 34 am](https://cloud.githubusercontent.com/assets/383994/8051597/7b716fbc-0e4c-11e5-9aef-4493fd22db58.png)](./example.js)
Visit [the example](./example.js) to see how this works.
## Install
Install with [npm](https://www.npmjs.com/)
```sh
$ npm i align-text --save
```
## Usage
```js
var align = require('align-text');
align(text, callback_function_or_integer);
```
**Params**
* `text` can be a **string or array**. If a string is passed, a string will be returned. If an array is passed, an array will be returned.
* `callback|integer`: if an integer, the text will be indented by that amount. If a function, it must return an integer representing the amount of leading indentation to use as `align` loops over each line.
**Example**
```js
align(text, 4);
```
Would align:
```
abc
abc
abc
```
To:
```
abc
abc
abc
```
## callback
### params
The callback is used to determine the indentation of each line and gets the following params:
* `len` the length of the "current" line
* `longest` the length of the longest line
* `line` the current line (string) being aligned
* `lines` the array of all lines
### return
The callback may return:
* an integer that represents the number of spaces to use for padding,
* or an object with the following properties:
- `indent`: **{Number}** the amount of indentation to use. Default is `0` when an object is returned.
- `character`: **{String}** the character to use for indentation. Default is `''` (empty string) when an object is returned.
- `prefix`: **{String}** leading characters to use at the beginning of each line. `''` (empty string) when an object is returned.
**Integer example:**
```js
// calculate half the difference between the length
// of the current line and the longest line
function centerAlign(len, longest, line, lines) {
return Math.floor((longest - len) / 2);
}
```
**Object example:**
```js
function centerAlign(len, longest, line, lines) {
return {
character: '\t',
indent: Math.floor((longest - len) / 2),
prefix: '~ ',
}
}
```
## Usage examples
### Center align
Using the `centerAlign` function from above:
```js
align(text, centerAlign);
```
Would align this text:
```js
Lorem ipsum dolor sit amet
consectetur adipiscin
elit, sed do eiusmod tempor incididun
ut labore et dolor
magna aliqua. Ut enim ad mini
veniam, quis
```
Resulting in this:
```
Lorem ipsum dolor sit amet,
consectetur adipiscing
elit, sed do eiusmod tempor incididunt
ut labore et dolore
magna aliqua. Ut enim ad minim
veniam, quis
```
**Customize**
If you wanted to add more padding on the left, just pass the number in the callback.
For example, to add 4 spaces before every line:
```js
function centerAlign(len, longest, line, lines) {
return 4 + Math.floor((longest - len) / 2);
}
```
Would result in:
```
Lorem ipsum dolor sit amet,
consectetur adipiscing
elit, sed do eiusmod tempor incididunt
ut labore et dolore
magna aliqua. Ut enim ad minim
veniam, quis
```
### Bullets
```js
align(text, function (len, max, line, lines) {
return {prefix: ' - '};
});
```
Would return:
```
- Lorem ipsum dolor sit amet,
- consectetur adipiscing
- elit, sed do eiusmod tempor incididunt
- ut labore et dolore
- magna aliqua. Ut enim ad minim
- veniam, quis
```
### Different indent character
```js
align(text, function (len, max, line, lines) {
return {
indent: Math.floor((max - len) / 2),
character: '~',
};
});
```
Would return
```
~~~~~Lorem ipsum dolor sit amet,
~~~~~~~~consectetur adipiscing
elit, sed do eiusmod tempor incididunt
~~~~~~~~~ut labore et dolore
~~~~magna aliqua. Ut enim ad minim
~~~~~~~~~~~~~veniam, quis
```
## Related projects
* [center-align](https://github.com/jonschlinkert/center-align): Center-align the text in a string.
* [justify](https://github.com/bahamas10/node-justify): Left or right (or both) justify text using a custom width and character
* [longest](https://github.com/jonschlinkert/longest): Get the longest item in an array.
* [right-align](https://github.com/jonschlinkert/right-align): Right-align the text in a string.
* [repeat-string](https://github.com/jonschlinkert/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string.
* [word-wrap](https://github.com/jonschlinkert/word-wrap): Wrap words to a specified length.
## Running tests
Install dev dependencies:
```sh
$ npm i -d && npm test
```
## Contributing
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/align-text/issues/new)
## Author
**Jon Schlinkert**
+ [github/jonschlinkert](https://github.com/jonschlinkert)
+ [twitter/jonschlinkert](http://twitter.com/jonschlinkert)
## License
Copyright © 2015 [Jon Schlinkert](https://github.com/jonschlinkert)
Released under the MIT license.
***
_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on June 09, 2015._

52
node_modules/align-text/index.js generated vendored Normal file
View File

@@ -0,0 +1,52 @@
/*!
* align-text <https://github.com/jonschlinkert/align-text>
*
* Copyright (c) 2015, Jon Schlinkert.
* Licensed under the MIT License.
*/
'use strict';
var typeOf = require('kind-of');
var repeat = require('repeat-string');
var longest = require('longest');
module.exports = function alignText(val, fn) {
var lines, type = typeOf(val);
if (type === 'array') {
lines = val;
} else if (type === 'string') {
lines = val.split(/(?:\r\n|\n)/);
} else {
throw new TypeError('align-text expects a string or array.');
}
var fnType = typeOf(fn);
var len = lines.length;
var max = longest(lines);
var res = [], i = 0;
while (len--) {
var line = String(lines[i++]);
var diff;
if (fnType === 'function') {
diff = fn(line.length, max.length, line, lines, i);
} else if (fnType === 'number') {
diff = fn;
} else {
diff = max.length - line.length;
}
if (typeOf(diff) === 'number') {
res.push(repeat(' ', diff) + line);
} else if (typeOf(diff) === 'object') {
var result = repeat(diff.character || ' ', diff.indent || 0);
res.push((diff.prefix || '') + result + line);
}
}
if (type === 'array') return res;
return res.join('\n');
};

21
node_modules/align-text/node_modules/kind-of/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014-2017, Jon Schlinkert
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

261
node_modules/align-text/node_modules/kind-of/README.md generated vendored Normal file
View File

@@ -0,0 +1,261 @@
# kind-of [![NPM version](https://img.shields.io/npm/v/kind-of.svg?style=flat)](https://www.npmjs.com/package/kind-of) [![NPM monthly downloads](https://img.shields.io/npm/dm/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![NPM total downloads](https://img.shields.io/npm/dt/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/kind-of.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/kind-of)
> Get the native type of a value.
## Install
Install with [npm](https://www.npmjs.com/):
```sh
$ npm install --save kind-of
```
## Install
Install with [bower](https://bower.io/)
```sh
$ bower install kind-of --save
```
## Usage
> es5, browser and es6 ready
```js
var kindOf = require('kind-of');
kindOf(undefined);
//=> 'undefined'
kindOf(null);
//=> 'null'
kindOf(true);
//=> 'boolean'
kindOf(false);
//=> 'boolean'
kindOf(new Boolean(true));
//=> 'boolean'
kindOf(new Buffer(''));
//=> 'buffer'
kindOf(42);
//=> 'number'
kindOf(new Number(42));
//=> 'number'
kindOf('str');
//=> 'string'
kindOf(new String('str'));
//=> 'string'
kindOf(arguments);
//=> 'arguments'
kindOf({});
//=> 'object'
kindOf(Object.create(null));
//=> 'object'
kindOf(new Test());
//=> 'object'
kindOf(new Date());
//=> 'date'
kindOf([]);
//=> 'array'
kindOf([1, 2, 3]);
//=> 'array'
kindOf(new Array());
//=> 'array'
kindOf(/foo/);
//=> 'regexp'
kindOf(new RegExp('foo'));
//=> 'regexp'
kindOf(function () {});
//=> 'function'
kindOf(function * () {});
//=> 'function'
kindOf(new Function());
//=> 'function'
kindOf(new Map());
//=> 'map'
kindOf(new WeakMap());
//=> 'weakmap'
kindOf(new Set());
//=> 'set'
kindOf(new WeakSet());
//=> 'weakset'
kindOf(Symbol('str'));
//=> 'symbol'
kindOf(new Int8Array());
//=> 'int8array'
kindOf(new Uint8Array());
//=> 'uint8array'
kindOf(new Uint8ClampedArray());
//=> 'uint8clampedarray'
kindOf(new Int16Array());
//=> 'int16array'
kindOf(new Uint16Array());
//=> 'uint16array'
kindOf(new Int32Array());
//=> 'int32array'
kindOf(new Uint32Array());
//=> 'uint32array'
kindOf(new Float32Array());
//=> 'float32array'
kindOf(new Float64Array());
//=> 'float64array'
```
## Benchmarks
Benchmarked against [typeof](http://github.com/CodingFu/typeof) and [type-of](https://github.com/ForbesLindesay/type-of).
Note that performaces is slower for es6 features `Map`, `WeakMap`, `Set` and `WeakSet`.
```bash
#1: array
current x 23,329,397 ops/sec ±0.82% (94 runs sampled)
lib-type-of x 4,170,273 ops/sec ±0.55% (94 runs sampled)
lib-typeof x 9,686,935 ops/sec ±0.59% (98 runs sampled)
#2: boolean
current x 27,197,115 ops/sec ±0.85% (94 runs sampled)
lib-type-of x 3,145,791 ops/sec ±0.73% (97 runs sampled)
lib-typeof x 9,199,562 ops/sec ±0.44% (99 runs sampled)
#3: date
current x 20,190,117 ops/sec ±0.86% (92 runs sampled)
lib-type-of x 5,166,970 ops/sec ±0.74% (94 runs sampled)
lib-typeof x 9,610,821 ops/sec ±0.50% (96 runs sampled)
#4: function
current x 23,855,460 ops/sec ±0.60% (97 runs sampled)
lib-type-of x 5,667,740 ops/sec ±0.54% (100 runs sampled)
lib-typeof x 10,010,644 ops/sec ±0.44% (100 runs sampled)
#5: null
current x 27,061,047 ops/sec ±0.97% (96 runs sampled)
lib-type-of x 13,965,573 ops/sec ±0.62% (97 runs sampled)
lib-typeof x 8,460,194 ops/sec ±0.61% (97 runs sampled)
#6: number
current x 25,075,682 ops/sec ±0.53% (99 runs sampled)
lib-type-of x 2,266,405 ops/sec ±0.41% (98 runs sampled)
lib-typeof x 9,821,481 ops/sec ±0.45% (99 runs sampled)
#7: object
current x 3,348,980 ops/sec ±0.49% (99 runs sampled)
lib-type-of x 3,245,138 ops/sec ±0.60% (94 runs sampled)
lib-typeof x 9,262,952 ops/sec ±0.59% (99 runs sampled)
#8: regex
current x 21,284,827 ops/sec ±0.72% (96 runs sampled)
lib-type-of x 4,689,241 ops/sec ±0.43% (100 runs sampled)
lib-typeof x 8,957,593 ops/sec ±0.62% (98 runs sampled)
#9: string
current x 25,379,234 ops/sec ±0.58% (96 runs sampled)
lib-type-of x 3,635,148 ops/sec ±0.76% (93 runs sampled)
lib-typeof x 9,494,134 ops/sec ±0.49% (98 runs sampled)
#10: undef
current x 27,459,221 ops/sec ±1.01% (93 runs sampled)
lib-type-of x 14,360,433 ops/sec ±0.52% (99 runs sampled)
lib-typeof x 23,202,868 ops/sec ±0.59% (94 runs sampled)
```
## Optimizations
In 7 out of 8 cases, this library is 2x-10x faster than other top libraries included in the benchmarks. There are a few things that lead to this performance advantage, none of them hard and fast rules, but all of them simple and repeatable in almost any code library:
1. Optimize around the fastest and most common use cases first. Of course, this will change from project-to-project, but I took some time to understand how and why `typeof` checks were being used in my own libraries and other libraries I use a lot.
2. Optimize around bottlenecks - In other words, the order in which conditionals are implemented is significant, because each check is only as fast as the failing checks that came before it. Here, the biggest bottleneck by far is checking for plain objects (an object that was created by the `Object` constructor). I opted to make this check happen by process of elimination rather than brute force up front (e.g. by using something like `val.constructor.name`), so that every other type check would not be penalized it.
3. Don't do uneccessary processing - why do `.slice(8, -1).toLowerCase();` just to get the word `regex`? It's much faster to do `if (type === '[object RegExp]') return 'regex'`
## About
### Related projects
* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet")
* [is-number](https://www.npmjs.com/package/is-number): Returns true if the value is a number. comprehensive tests. | [homepage](https://github.com/jonschlinkert/is-number "Returns true if the value is a number. comprehensive tests.")
* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ")
### Contributing
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
### Contributors
| **Commits** | **Contributor** |
| --- | --- |
| 59 | [jonschlinkert](https://github.com/jonschlinkert) |
| 2 | [miguelmota](https://github.com/miguelmota) |
| 1 | [dtothefp](https://github.com/dtothefp) |
| 1 | [ksheedlo](https://github.com/ksheedlo) |
| 1 | [pdehaan](https://github.com/pdehaan) |
| 1 | [laggingreflex](https://github.com/laggingreflex) |
### Building docs
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
To generate the readme, run the following command:
```sh
$ npm install -g verbose/verb#dev verb-generate-readme && verb
```
### Running tests
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
```sh
$ npm install && npm test
```
### Author
**Jon Schlinkert**
* [github/jonschlinkert](https://github.com/jonschlinkert)
* [twitter/jonschlinkert](https://twitter.com/jonschlinkert)
### License
Copyright © 2017, [Jon Schlinkert](https://github.com/jonschlinkert).
Released under the [MIT License](LICENSE).
***
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on May 16, 2017._

116
node_modules/align-text/node_modules/kind-of/index.js generated vendored Normal file
View File

@@ -0,0 +1,116 @@
var isBuffer = require('is-buffer');
var toString = Object.prototype.toString;
/**
* Get the native `typeof` a value.
*
* @param {*} `val`
* @return {*} Native javascript type
*/
module.exports = function kindOf(val) {
// primitivies
if (typeof val === 'undefined') {
return 'undefined';
}
if (val === null) {
return 'null';
}
if (val === true || val === false || val instanceof Boolean) {
return 'boolean';
}
if (typeof val === 'string' || val instanceof String) {
return 'string';
}
if (typeof val === 'number' || val instanceof Number) {
return 'number';
}
// functions
if (typeof val === 'function' || val instanceof Function) {
return 'function';
}
// array
if (typeof Array.isArray !== 'undefined' && Array.isArray(val)) {
return 'array';
}
// check for instances of RegExp and Date before calling `toString`
if (val instanceof RegExp) {
return 'regexp';
}
if (val instanceof Date) {
return 'date';
}
// other objects
var type = toString.call(val);
if (type === '[object RegExp]') {
return 'regexp';
}
if (type === '[object Date]') {
return 'date';
}
if (type === '[object Arguments]') {
return 'arguments';
}
if (type === '[object Error]') {
return 'error';
}
// buffer
if (isBuffer(val)) {
return 'buffer';
}
// es6: Map, WeakMap, Set, WeakSet
if (type === '[object Set]') {
return 'set';
}
if (type === '[object WeakSet]') {
return 'weakset';
}
if (type === '[object Map]') {
return 'map';
}
if (type === '[object WeakMap]') {
return 'weakmap';
}
if (type === '[object Symbol]') {
return 'symbol';
}
// typed arrays
if (type === '[object Int8Array]') {
return 'int8array';
}
if (type === '[object Uint8Array]') {
return 'uint8array';
}
if (type === '[object Uint8ClampedArray]') {
return 'uint8clampedarray';
}
if (type === '[object Int16Array]') {
return 'int16array';
}
if (type === '[object Uint16Array]') {
return 'uint16array';
}
if (type === '[object Int32Array]') {
return 'int32array';
}
if (type === '[object Uint32Array]') {
return 'uint32array';
}
if (type === '[object Float32Array]') {
return 'float32array';
}
if (type === '[object Float64Array]') {
return 'float64array';
}
// must be a plain object
return 'object';
};

View File

@@ -0,0 +1,142 @@
{
"_args": [
[
"kind-of@3.2.2",
"/Users/lijianzhao/github/ChineseBQB"
]
],
"_from": "kind-of@3.2.2",
"_id": "kind-of@3.2.2",
"_inBundle": false,
"_integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=",
"_location": "/align-text/kind-of",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "kind-of@3.2.2",
"name": "kind-of",
"escapedName": "kind-of",
"rawSpec": "3.2.2",
"saveSpec": null,
"fetchSpec": "3.2.2"
},
"_requiredBy": [
"/align-text"
],
"_resolved": "http://registry.npm.taobao.org/kind-of/download/kind-of-3.2.2.tgz",
"_spec": "3.2.2",
"_where": "/Users/lijianzhao/github/ChineseBQB",
"author": {
"name": "Jon Schlinkert",
"url": "https://github.com/jonschlinkert"
},
"bugs": {
"url": "https://github.com/jonschlinkert/kind-of/issues"
},
"contributors": [
{
"name": "David Fox-Powell",
"url": "https://dtothefp.github.io/me"
},
{
"name": "Jon Schlinkert",
"url": "http://twitter.com/jonschlinkert"
},
{
"name": "Ken Sheedlo",
"url": "kensheedlo.com"
},
{
"name": "laggingreflex",
"url": "https://github.com/laggingreflex"
},
{
"name": "Miguel Mota",
"url": "https://miguelmota.com"
},
{
"name": "Peter deHaan",
"url": "http://about.me/peterdehaan"
}
],
"dependencies": {
"is-buffer": "^1.1.5"
},
"description": "Get the native type of a value.",
"devDependencies": {
"ansi-bold": "^0.1.1",
"benchmarked": "^1.0.0",
"browserify": "^14.3.0",
"glob": "^7.1.1",
"gulp-format-md": "^0.1.12",
"mocha": "^3.3.0",
"type-of": "^2.0.1",
"typeof": "^1.0.0"
},
"engines": {
"node": ">=0.10.0"
},
"files": [
"index.js"
],
"homepage": "https://github.com/jonschlinkert/kind-of",
"keywords": [
"arguments",
"array",
"boolean",
"check",
"date",
"function",
"is",
"is-type",
"is-type-of",
"kind",
"kind-of",
"number",
"object",
"of",
"regexp",
"string",
"test",
"type",
"type-of",
"typeof",
"types"
],
"license": "MIT",
"main": "index.js",
"name": "kind-of",
"repository": {
"type": "git",
"url": "git+https://github.com/jonschlinkert/kind-of.git"
},
"scripts": {
"prepublish": "browserify -o browser.js -e index.js -s index --bare",
"test": "mocha"
},
"verb": {
"related": {
"list": [
"is-glob",
"is-number",
"is-primitive"
]
},
"toc": false,
"layout": "default",
"tasks": [
"readme"
],
"plugins": [
"gulp-format-md"
],
"lint": {
"reflinks": true
},
"reflinks": [
"verb"
]
},
"version": "3.2.2"
}

83
node_modules/align-text/package.json generated vendored Normal file
View File

@@ -0,0 +1,83 @@
{
"_args": [
[
"align-text@0.1.4",
"/Users/lijianzhao/github/ChineseBQB"
]
],
"_from": "align-text@0.1.4",
"_id": "align-text@0.1.4",
"_inBundle": false,
"_integrity": "sha1-DNkKVhCT810KmSVsIrcGlDP60Rc=",
"_location": "/align-text",
"_phantomChildren": {
"is-buffer": "1.1.6"
},
"_requested": {
"type": "version",
"registry": true,
"raw": "align-text@0.1.4",
"name": "align-text",
"escapedName": "align-text",
"rawSpec": "0.1.4",
"saveSpec": null,
"fetchSpec": "0.1.4"
},
"_requiredBy": [
"/center-align",
"/right-align"
],
"_resolved": "http://registry.npm.taobao.org/align-text/download/align-text-0.1.4.tgz",
"_spec": "0.1.4",
"_where": "/Users/lijianzhao/github/ChineseBQB",
"author": {
"name": "Jon Schlinkert",
"url": "https://github.com/jonschlinkert"
},
"bugs": {
"url": "https://github.com/jonschlinkert/align-text/issues"
},
"dependencies": {
"kind-of": "^3.0.2",
"longest": "^1.0.1",
"repeat-string": "^1.5.2"
},
"description": "Align the text in a string.",
"devDependencies": {
"mocha": "*",
"should": "*",
"word-wrap": "^1.0.3"
},
"engines": {
"node": ">=0.10.0"
},
"files": [
"index.js"
],
"homepage": "https://github.com/jonschlinkert/align-text",
"keywords": [
"align",
"align-center",
"alignment",
"center",
"center-align",
"indent",
"pad",
"padding",
"right",
"right-align",
"text",
"typography"
],
"license": "MIT",
"main": "index.js",
"name": "align-text",
"repository": {
"type": "git",
"url": "git://github.com/jonschlinkert/align-text.git"
},
"scripts": {
"test": "mocha"
},
"version": "0.1.4"
}

58
node_modules/amdefine/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,58 @@
amdefine is released under two licenses: new BSD, and MIT. You may pick the
license that best suits your development needs. The text of both licenses are
provided below.
The "New" BSD License:
----------------------
Copyright (c) 2011-2016, The Dojo Foundation
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the Dojo Foundation nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
MIT License
-----------
Copyright (c) 2011-2016, The Dojo Foundation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

171
node_modules/amdefine/README.md generated vendored Normal file
View File

@@ -0,0 +1,171 @@
# amdefine
A module that can be used to implement AMD's define() in Node. This allows you
to code to the AMD API and have the module work in node programs without
requiring those other programs to use AMD.
## Usage
**1)** Update your package.json to indicate amdefine as a dependency:
```javascript
"dependencies": {
"amdefine": ">=0.1.0"
}
```
Then run `npm install` to get amdefine into your project.
**2)** At the top of each module that uses define(), place this code:
```javascript
if (typeof define !== 'function') { var define = require('amdefine')(module) }
```
**Only use these snippets** when loading amdefine. If you preserve the basic structure,
with the braces, it will be stripped out when using the [RequireJS optimizer](#optimizer).
You can add spaces, line breaks and even require amdefine with a local path, but
keep the rest of the structure to get the stripping behavior.
As you may know, because `if` statements in JavaScript don't have their own scope, the var
declaration in the above snippet is made whether the `if` expression is truthy or not. If
RequireJS is loaded then the declaration is superfluous because `define` is already already
declared in the same scope in RequireJS. Fortunately JavaScript handles multiple `var`
declarations of the same variable in the same scope gracefully.
If you want to deliver amdefine.js with your code rather than specifying it as a dependency
with npm, then just download the latest release and refer to it using a relative path:
[Latest Version](https://github.com/jrburke/amdefine/raw/latest/amdefine.js)
### amdefine/intercept
Consider this very experimental.
Instead of pasting the piece of text for the amdefine setup of a `define`
variable in each module you create or consume, you can use `amdefine/intercept`
instead. It will automatically insert the above snippet in each .js file loaded
by Node.
**Warning**: you should only use this if you are creating an application that
is consuming AMD style defined()'d modules that are distributed via npm and want
to run that code in Node.
For library code where you are not sure if it will be used by others in Node or
in the browser, then explicitly depending on amdefine and placing the code
snippet above is suggested path, instead of using `amdefine/intercept`. The
intercept module affects all .js files loaded in the Node app, and it is
inconsiderate to modify global state like that unless you are also controlling
the top level app.
#### Why distribute AMD-style modules via npm?
npm has a lot of weaknesses for front-end use (installed layout is not great,
should have better support for the `baseUrl + moduleID + '.js' style of loading,
single file JS installs), but some people want a JS package manager and are
willing to live with those constraints. If that is you, but still want to author
in AMD style modules to get dynamic require([]), better direct source usage and
powerful loader plugin support in the browser, then this tool can help.
#### amdefine/intercept usage
Just require it in your top level app module (for example index.js, server.js):
```javascript
require('amdefine/intercept');
```
The module does not return a value, so no need to assign the result to a local
variable.
Then just require() code as you normally would with Node's require(). Any .js
loaded after the intercept require will have the amdefine check injected in
the .js source as it is loaded. It does not modify the source on disk, just
prepends some content to the text of the module as it is loaded by Node.
#### How amdefine/intercept works
It overrides the `Module._extensions['.js']` in Node to automatically prepend
the amdefine snippet above. So, it will affect any .js file loaded by your
app.
## define() usage
It is best if you use the anonymous forms of define() in your module:
```javascript
define(function (require) {
var dependency = require('dependency');
});
```
or
```javascript
define(['dependency'], function (dependency) {
});
```
## RequireJS optimizer integration. <a name="optimizer"></name>
Version 1.0.3 of the [RequireJS optimizer](http://requirejs.org/docs/optimization.html)
will have support for stripping the `if (typeof define !== 'function')` check
mentioned above, so you can include this snippet for code that runs in the
browser, but avoid taking the cost of the if() statement once the code is
optimized for deployment.
## Node 0.4 Support
If you want to support Node 0.4, then add `require` as the second parameter to amdefine:
```javascript
//Only if you want Node 0.4. If using 0.5 or later, use the above snippet.
if (typeof define !== 'function') { var define = require('amdefine')(module, require) }
```
## Limitations
### Synchronous vs Asynchronous
amdefine creates a define() function that is callable by your code. It will
execute and trace dependencies and call the factory function *synchronously*,
to keep the behavior in line with Node's synchronous dependency tracing.
The exception: calling AMD's callback-style require() from inside a factory
function. The require callback is called on process.nextTick():
```javascript
define(function (require) {
require(['a'], function(a) {
//'a' is loaded synchronously, but
//this callback is called on process.nextTick().
});
});
```
### Loader Plugins
Loader plugins are supported as long as they call their load() callbacks
synchronously. So ones that do network requests will not work. However plugins
like [text](http://requirejs.org/docs/api.html#text) can load text files locally.
The plugin API's `load.fromText()` is **not supported** in amdefine, so this means
transpiler plugins like the [CoffeeScript loader plugin](https://github.com/jrburke/require-cs)
will not work. This may be fixable, but it is a bit complex, and I do not have
enough node-fu to figure it out yet. See the source for amdefine.js if you want
to get an idea of the issues involved.
## Tests
To run the tests, cd to **tests** and run:
```
node all.js
node all-intercept.js
```
## License
New BSD and MIT. Check the LICENSE file for all the details.

301
node_modules/amdefine/amdefine.js generated vendored Normal file
View File

@@ -0,0 +1,301 @@
/** vim: et:ts=4:sw=4:sts=4
* @license amdefine 1.0.1 Copyright (c) 2011-2016, The Dojo Foundation All Rights Reserved.
* Available via the MIT or new BSD license.
* see: http://github.com/jrburke/amdefine for details
*/
/*jslint node: true */
/*global module, process */
'use strict';
/**
* Creates a define for node.
* @param {Object} module the "module" object that is defined by Node for the
* current module.
* @param {Function} [requireFn]. Node's require function for the current module.
* It only needs to be passed in Node versions before 0.5, when module.require
* did not exist.
* @returns {Function} a define function that is usable for the current node
* module.
*/
function amdefine(module, requireFn) {
'use strict';
var defineCache = {},
loaderCache = {},
alreadyCalled = false,
path = require('path'),
makeRequire, stringRequire;
/**
* Trims the . and .. from an array of path segments.
* It will keep a leading path segment if a .. will become
* the first path segment, to help with module name lookups,
* which act like paths, but can be remapped. But the end result,
* all paths that use this function should look normalized.
* NOTE: this method MODIFIES the input array.
* @param {Array} ary the array of path segments.
*/
function trimDots(ary) {
var i, part;
for (i = 0; ary[i]; i+= 1) {
part = ary[i];
if (part === '.') {
ary.splice(i, 1);
i -= 1;
} else if (part === '..') {
if (i === 1 && (ary[2] === '..' || ary[0] === '..')) {
//End of the line. Keep at least one non-dot
//path segment at the front so it can be mapped
//correctly to disk. Otherwise, there is likely
//no path mapping for a path starting with '..'.
//This can still fail, but catches the most reasonable
//uses of ..
break;
} else if (i > 0) {
ary.splice(i - 1, 2);
i -= 2;
}
}
}
}
function normalize(name, baseName) {
var baseParts;
//Adjust any relative paths.
if (name && name.charAt(0) === '.') {
//If have a base name, try to normalize against it,
//otherwise, assume it is a top-level require that will
//be relative to baseUrl in the end.
if (baseName) {
baseParts = baseName.split('/');
baseParts = baseParts.slice(0, baseParts.length - 1);
baseParts = baseParts.concat(name.split('/'));
trimDots(baseParts);
name = baseParts.join('/');
}
}
return name;
}
/**
* Create the normalize() function passed to a loader plugin's
* normalize method.
*/
function makeNormalize(relName) {
return function (name) {
return normalize(name, relName);
};
}
function makeLoad(id) {
function load(value) {
loaderCache[id] = value;
}
load.fromText = function (id, text) {
//This one is difficult because the text can/probably uses
//define, and any relative paths and requires should be relative
//to that id was it would be found on disk. But this would require
//bootstrapping a module/require fairly deeply from node core.
//Not sure how best to go about that yet.
throw new Error('amdefine does not implement load.fromText');
};
return load;
}
makeRequire = function (systemRequire, exports, module, relId) {
function amdRequire(deps, callback) {
if (typeof deps === 'string') {
//Synchronous, single module require('')
return stringRequire(systemRequire, exports, module, deps, relId);
} else {
//Array of dependencies with a callback.
//Convert the dependencies to modules.
deps = deps.map(function (depName) {
return stringRequire(systemRequire, exports, module, depName, relId);
});
//Wait for next tick to call back the require call.
if (callback) {
process.nextTick(function () {
callback.apply(null, deps);
});
}
}
}
amdRequire.toUrl = function (filePath) {
if (filePath.indexOf('.') === 0) {
return normalize(filePath, path.dirname(module.filename));
} else {
return filePath;
}
};
return amdRequire;
};
//Favor explicit value, passed in if the module wants to support Node 0.4.
requireFn = requireFn || function req() {
return module.require.apply(module, arguments);
};
function runFactory(id, deps, factory) {
var r, e, m, result;
if (id) {
e = loaderCache[id] = {};
m = {
id: id,
uri: __filename,
exports: e
};
r = makeRequire(requireFn, e, m, id);
} else {
//Only support one define call per file
if (alreadyCalled) {
throw new Error('amdefine with no module ID cannot be called more than once per file.');
}
alreadyCalled = true;
//Use the real variables from node
//Use module.exports for exports, since
//the exports in here is amdefine exports.
e = module.exports;
m = module;
r = makeRequire(requireFn, e, m, module.id);
}
//If there are dependencies, they are strings, so need
//to convert them to dependency values.
if (deps) {
deps = deps.map(function (depName) {
return r(depName);
});
}
//Call the factory with the right dependencies.
if (typeof factory === 'function') {
result = factory.apply(m.exports, deps);
} else {
result = factory;
}
if (result !== undefined) {
m.exports = result;
if (id) {
loaderCache[id] = m.exports;
}
}
}
stringRequire = function (systemRequire, exports, module, id, relId) {
//Split the ID by a ! so that
var index = id.indexOf('!'),
originalId = id,
prefix, plugin;
if (index === -1) {
id = normalize(id, relId);
//Straight module lookup. If it is one of the special dependencies,
//deal with it, otherwise, delegate to node.
if (id === 'require') {
return makeRequire(systemRequire, exports, module, relId);
} else if (id === 'exports') {
return exports;
} else if (id === 'module') {
return module;
} else if (loaderCache.hasOwnProperty(id)) {
return loaderCache[id];
} else if (defineCache[id]) {
runFactory.apply(null, defineCache[id]);
return loaderCache[id];
} else {
if(systemRequire) {
return systemRequire(originalId);
} else {
throw new Error('No module with ID: ' + id);
}
}
} else {
//There is a plugin in play.
prefix = id.substring(0, index);
id = id.substring(index + 1, id.length);
plugin = stringRequire(systemRequire, exports, module, prefix, relId);
if (plugin.normalize) {
id = plugin.normalize(id, makeNormalize(relId));
} else {
//Normalize the ID normally.
id = normalize(id, relId);
}
if (loaderCache[id]) {
return loaderCache[id];
} else {
plugin.load(id, makeRequire(systemRequire, exports, module, relId), makeLoad(id), {});
return loaderCache[id];
}
}
};
//Create a define function specific to the module asking for amdefine.
function define(id, deps, factory) {
if (Array.isArray(id)) {
factory = deps;
deps = id;
id = undefined;
} else if (typeof id !== 'string') {
factory = id;
id = deps = undefined;
}
if (deps && !Array.isArray(deps)) {
factory = deps;
deps = undefined;
}
if (!deps) {
deps = ['require', 'exports', 'module'];
}
//Set up properties for this module. If an ID, then use
//internal cache. If no ID, then use the external variables
//for this node module.
if (id) {
//Put the module in deep freeze until there is a
//require call for it.
defineCache[id] = [id, deps, factory];
} else {
runFactory(id, deps, factory);
}
}
//define.require, which has access to all the values in the
//cache. Useful for AMD modules that all have IDs in the file,
//but need to finally export a value to node based on one of those
//IDs.
define.require = function (id) {
if (loaderCache[id]) {
return loaderCache[id];
}
if (defineCache[id]) {
runFactory.apply(null, defineCache[id]);
return loaderCache[id];
}
};
define.amd = {};
return define;
}
module.exports = amdefine;

36
node_modules/amdefine/intercept.js generated vendored Normal file
View File

@@ -0,0 +1,36 @@
/*jshint node: true */
var inserted,
Module = require('module'),
fs = require('fs'),
existingExtFn = Module._extensions['.js'],
amdefineRegExp = /amdefine\.js/;
inserted = "if (typeof define !== 'function') {var define = require('amdefine')(module)}";
//From the node/lib/module.js source:
function stripBOM(content) {
// Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
// because the buffer-to-string conversion in `fs.readFileSync()`
// translates it to FEFF, the UTF-16 BOM.
if (content.charCodeAt(0) === 0xFEFF) {
content = content.slice(1);
}
return content;
}
//Also adapted from the node/lib/module.js source:
function intercept(module, filename) {
var content = stripBOM(fs.readFileSync(filename, 'utf8'));
if (!amdefineRegExp.test(module.id)) {
content = inserted + content;
}
module._compile(content, filename);
}
intercept._id = 'amdefine/intercept';
if (!existingExtFn._id || existingExtFn._id !== intercept._id) {
Module._extensions['.js'] = intercept;
}

51
node_modules/amdefine/package.json generated vendored Normal file
View File

@@ -0,0 +1,51 @@
{
"_args": [
[
"amdefine@1.0.1",
"/Users/lijianzhao/github/ChineseBQB"
]
],
"_from": "amdefine@1.0.1",
"_id": "amdefine@1.0.1",
"_inBundle": false,
"_integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=",
"_location": "/amdefine",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "amdefine@1.0.1",
"name": "amdefine",
"escapedName": "amdefine",
"rawSpec": "1.0.1",
"saveSpec": null,
"fetchSpec": "1.0.1"
},
"_requiredBy": [
"/stylus/source-map"
],
"_resolved": "http://registry.npm.taobao.org/amdefine/download/amdefine-1.0.1.tgz",
"_spec": "1.0.1",
"_where": "/Users/lijianzhao/github/ChineseBQB",
"author": {
"name": "James Burke",
"email": "jrburke@gmail.com",
"url": "http://github.com/jrburke"
},
"bugs": {
"url": "https://github.com/jrburke/amdefine/issues"
},
"description": "Provide AMD's define() API for declaring modules in the AMD format",
"engines": {
"node": ">=0.4.2"
},
"homepage": "http://github.com/jrburke/amdefine",
"license": "BSD-3-Clause OR MIT",
"main": "./amdefine.js",
"name": "amdefine",
"repository": {
"type": "git",
"url": "git+https://github.com/jrburke/amdefine.git"
},
"version": "1.0.1"
}

4
node_modules/ansi-regex/index.js generated vendored Normal file
View File

@@ -0,0 +1,4 @@
'use strict';
module.exports = function () {
return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nqry=><]/g;
};

21
node_modules/ansi-regex/license generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

116
node_modules/ansi-regex/package.json generated vendored Normal file
View File

@@ -0,0 +1,116 @@
{
"_args": [
[
"ansi-regex@2.1.1",
"/Users/lijianzhao/github/ChineseBQB"
]
],
"_from": "ansi-regex@2.1.1",
"_id": "ansi-regex@2.1.1",
"_inBundle": false,
"_integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=",
"_location": "/ansi-regex",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "ansi-regex@2.1.1",
"name": "ansi-regex",
"escapedName": "ansi-regex",
"rawSpec": "2.1.1",
"saveSpec": null,
"fetchSpec": "2.1.1"
},
"_requiredBy": [
"/cliui/strip-ansi",
"/has-ansi",
"/hexo-log/strip-ansi",
"/hexo-server/strip-ansi",
"/string-width/strip-ansi",
"/wrap-ansi/strip-ansi"
],
"_resolved": "http://registry.npm.taobao.org/ansi-regex/download/ansi-regex-2.1.1.tgz",
"_spec": "2.1.1",
"_where": "/Users/lijianzhao/github/ChineseBQB",
"author": {
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "sindresorhus.com"
},
"bugs": {
"url": "https://github.com/chalk/ansi-regex/issues"
},
"description": "Regular expression for matching ANSI escape codes",
"devDependencies": {
"ava": "0.17.0",
"xo": "0.16.0"
},
"engines": {
"node": ">=0.10.0"
},
"files": [
"index.js"
],
"homepage": "https://github.com/chalk/ansi-regex#readme",
"keywords": [
"ansi",
"styles",
"color",
"colour",
"colors",
"terminal",
"console",
"cli",
"string",
"tty",
"escape",
"formatting",
"rgb",
"256",
"shell",
"xterm",
"command-line",
"text",
"regex",
"regexp",
"re",
"match",
"test",
"find",
"pattern"
],
"license": "MIT",
"maintainers": [
{
"name": "Sindre Sorhus",
"email": "sindresorhus@gmail.com",
"url": "sindresorhus.com"
},
{
"name": "Joshua Appelman",
"email": "jappelman@xebia.com",
"url": "jbnicolai.com"
},
{
"name": "JD Ballard",
"email": "i.am.qix@gmail.com",
"url": "github.com/qix-"
}
],
"name": "ansi-regex",
"repository": {
"type": "git",
"url": "git+https://github.com/chalk/ansi-regex.git"
},
"scripts": {
"test": "xo && ava --verbose",
"view-supported": "node fixtures/view-codes.js"
},
"version": "2.1.1",
"xo": {
"rules": {
"guard-for-in": 0,
"no-loop-func": 0
}
}
}

39
node_modules/ansi-regex/readme.md generated vendored Normal file
View File

@@ -0,0 +1,39 @@
# ansi-regex [![Build Status](https://travis-ci.org/chalk/ansi-regex.svg?branch=master)](https://travis-ci.org/chalk/ansi-regex)
> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
## Install
```
$ npm install --save ansi-regex
```
## Usage
```js
const ansiRegex = require('ansi-regex');
ansiRegex().test('\u001b[4mcake\u001b[0m');
//=> true
ansiRegex().test('cake');
//=> false
'\u001b[4mcake\u001b[0m'.match(ansiRegex());
//=> ['\u001b[4m', '\u001b[0m']
```
## FAQ
### Why do you test for codes not in the ECMA 48 standard?
Some of the codes we run as a test are codes that we acquired finding various lists of non-standard or manufacturer specific codes. If I recall correctly, we test for both standard and non-standard codes, as most of them follow the same or similar format and can be safely matched in strings without the risk of removing actual string content. There are a few non-standard control codes that do not follow the traditional format (i.e. they end in numbers) thus forcing us to exclude them from the test because we cannot reliably match them.
On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out.
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)

Some files were not shown because too many files have changed in this diff Show More