Commit 5ac04cbf authored by Dmitry Nikulin's avatar Dmitry Nikulin Committed by Dmitry

Первый коммит, проблема с регистром clipPath присутствует

parents

Too many changes to show.

To preserve performance only 1000 of 1000+ files are displayed.

This diff is collapsed. Click to expand it.
../acorn/bin/acorn
\ No newline at end of file
../escodegen/bin/escodegen.js
\ No newline at end of file
../escodegen/bin/esgenerate.js
\ No newline at end of file
../esprima/bin/esparse.js
\ No newline at end of file
../esprima/bin/esvalidate.js
\ No newline at end of file
../har-validator/bin/har-validator
\ No newline at end of file
../sshpk/bin/sshpk-conv
\ No newline at end of file
../sshpk/bin/sshpk-sign
\ No newline at end of file
../sshpk/bin/sshpk-verify
\ No newline at end of file
../node-uuid/bin/uuid
\ No newline at end of file
## 1.0.3
- Replaced `let` with `var` in `lib/btoa.js`
- Follow up from `1.0.2`
- Resolves https://github.com/jsdom/abab/issues/18
## 1.0.2
- Replaced `const` with `var` in `index.js`
- Allows use of `abab` in the browser without a transpilation step
- Resolves https://github.com/jsdom/abab/issues/15
# abab
[![npm version](https://badge.fury.io/js/abab.svg)](https://www.npmjs.com/package/abab) [![Build Status](https://travis-ci.org/jsdom/abab.svg?branch=master)](https://travis-ci.org/jsdom/abab)
A module that implements `window.atob` and `window.btoa` according to the [WHATWG spec](https://html.spec.whatwg.org/multipage/webappapis.html#atob). The code is originally from [w3c/web-platform-tests](https://github.com/w3c/web-platform-tests/blob/master/html/webappapis/atob/base64.html).
Compatibility: Node.js version 3+ and all major browsers (using browserify or webpack)
Install with `npm`:
```sh
npm install abab
```
## API
### `btoa` (base64 encode)
```js
const btoa = require('abab').btoa;
btoa('Hello, world!'); // 'SGVsbG8sIHdvcmxkIQ=='
```
### `atob` (base64 decode)
```js
const atob = require('abab').atob;
atob('SGVsbG8sIHdvcmxkIQ=='); // 'Hello, world!'
```
#### Valid characters
[Per the spec](https://html.spec.whatwg.org/multipage/webappapis.html#atob:dom-windowbase64-btoa-3), `btoa` will accept strings "containing only characters in the range `U+0000` to `U+00FF`." If passed a string with characters above `U+00FF`, `btoa` will return `null`. If `atob` is passed a string that is not base64-valid, it will also return `null`. In both cases when `null` is returned, the spec calls for throwing a `DOMException` of type `InvalidCharacterError`.
## Browsers
If you want to include just one of the methods to save bytes in your client-side code, you can `require` the desired module directly.
```js
var atob = require('abab/lib/atob');
var btoa = require('abab/lib/btoa');
```
-----
### Checklists
If you're **submitting a PR** or **deploying to npm**, please use the [checklists in CONTRIBUTING.md](https://github.com/jsdom/abab/blob/master/CONTRIBUTING.md#checklists)
### Remembering `atob` vs. `btoa`
Here's a mnemonic that might be useful: if you have a plain string and want to base64 encode it, then decode it, `btoa` is what you run before (**b**efore - **b**toa), and `atob` is what you run after (**a**fter - **a**tob).
'use strict';
var atob = require('./lib/atob');
var btoa = require('./lib/btoa');
module.exports = {
atob: atob,
btoa: btoa
};
'use strict';
/**
* Implementation of atob() according to the HTML spec, except that instead of
* throwing INVALID_CHARACTER_ERR we return null.
*/
function atob(input) {
// WebIDL requires DOMStrings to just be converted using ECMAScript
// ToString, which in our case amounts to calling String().
input = String(input);
// "Remove all space characters from input."
input = input.replace(/[ \t\n\f\r]/g, '');
// "If the length of input divides by 4 leaving no remainder, then: if
// input ends with one or two U+003D EQUALS SIGN (=) characters, remove
// them from input."
if (input.length % 4 == 0 && /==?$/.test(input)) {
input = input.replace(/==?$/, '');
}
// "If the length of input divides by 4 leaving a remainder of 1, throw an
// INVALID_CHARACTER_ERR exception and abort these steps."
//
// "If input contains a character that is not in the following list of
// characters and character ranges, throw an INVALID_CHARACTER_ERR
// exception and abort these steps:
//
// U+002B PLUS SIGN (+)
// U+002F SOLIDUS (/)
// U+0030 DIGIT ZERO (0) to U+0039 DIGIT NINE (9)
// U+0041 LATIN CAPITAL LETTER A to U+005A LATIN CAPITAL LETTER Z
// U+0061 LATIN SMALL LETTER A to U+007A LATIN SMALL LETTER Z"
if (input.length % 4 == 1 || !/^[+/0-9A-Za-z]*$/.test(input)) {
return null;
}
// "Let output be a string, initially empty."
var output = '';
// "Let buffer be a buffer that can have bits appended to it, initially
// empty."
//
// We append bits via left-shift and or. accumulatedBits is used to track
// when we've gotten to 24 bits.
var buffer = 0;
var accumulatedBits = 0;
// "While position does not point past the end of input, run these
// substeps:"
for (var i = 0; i < input.length; i++) {
// "Find the character pointed to by position in the first column of
// the following table. Let n be the number given in the second cell of
// the same row."
//
// "Append to buffer the six bits corresponding to number, most
// significant bit first."
//
// atobLookup() implements the table from the spec.
buffer <<= 6;
buffer |= atobLookup(input[i]);
// "If buffer has accumulated 24 bits, interpret them as three 8-bit
// big-endian numbers. Append the three characters with code points
// equal to those numbers to output, in the same order, and then empty
// buffer."
accumulatedBits += 6;
if (accumulatedBits == 24) {
output += String.fromCharCode((buffer & 0xff0000) >> 16);
output += String.fromCharCode((buffer & 0xff00) >> 8);
output += String.fromCharCode(buffer & 0xff);
buffer = accumulatedBits = 0;
}
// "Advance position by one character."
}
// "If buffer is not empty, it contains either 12 or 18 bits. If it
// contains 12 bits, discard the last four and interpret the remaining
// eight as an 8-bit big-endian number. If it contains 18 bits, discard the
// last two and interpret the remaining 16 as two 8-bit big-endian numbers.
// Append the one or two characters with code points equal to those one or
// two numbers to output, in the same order."
if (accumulatedBits == 12) {
buffer >>= 4;
output += String.fromCharCode(buffer);
} else if (accumulatedBits == 18) {
buffer >>= 2;
output += String.fromCharCode((buffer & 0xff00) >> 8);
output += String.fromCharCode(buffer & 0xff);
}
// "Return output."
return output;
}
/**
* A lookup table for atob(), which converts an ASCII character to the
* corresponding six-bit number.
*/
function atobLookup(chr) {
if (/[A-Z]/.test(chr)) {
return chr.charCodeAt(0) - 'A'.charCodeAt(0);
}
if (/[a-z]/.test(chr)) {
return chr.charCodeAt(0) - 'a'.charCodeAt(0) + 26;
}
if (/[0-9]/.test(chr)) {
return chr.charCodeAt(0) - '0'.charCodeAt(0) + 52;
}
if (chr == '+') {
return 62;
}
if (chr == '/') {
return 63;
}
// Throw exception; should not be hit in tests
}
module.exports = atob;
'use strict';
/**
* btoa() as defined by the HTML5 spec, which mostly just references RFC4648.
*/
function btoa(s) {
var i;
// String conversion as required by WebIDL.
s = String(s);
// "The btoa() method must throw an INVALID_CHARACTER_ERR exception if the
// method's first argument contains any character whose code point is
// greater than U+00FF."
for (i = 0; i < s.length; i++) {
if (s.charCodeAt(i) > 255) {
return null;
}
}
var out = '';
for (i = 0; i < s.length; i += 3) {
var groupsOfSix = [undefined, undefined, undefined, undefined];
groupsOfSix[0] = s.charCodeAt(i) >> 2;
groupsOfSix[1] = (s.charCodeAt(i) & 0x03) << 4;
if (s.length > i + 1) {
groupsOfSix[1] |= s.charCodeAt(i + 1) >> 4;
groupsOfSix[2] = (s.charCodeAt(i + 1) & 0x0f) << 2;
}
if (s.length > i + 2) {
groupsOfSix[2] |= s.charCodeAt(i + 2) >> 6;
groupsOfSix[3] = s.charCodeAt(i + 2) & 0x3f;
}
for (var j = 0; j < groupsOfSix.length; j++) {
if (typeof groupsOfSix[j] == 'undefined') {
out += '=';
} else {
out += btoaLookup(groupsOfSix[j]);
}
}
}
return out;
}
/**
* Lookup table for btoa(), which converts a six-bit number into the
* corresponding ASCII character.
*/
function btoaLookup(idx) {
if (idx < 26) {
return String.fromCharCode(idx + 'A'.charCodeAt(0));
}
if (idx < 52) {
return String.fromCharCode(idx - 26 + 'a'.charCodeAt(0));
}
if (idx < 62) {
return String.fromCharCode(idx - 52 + '0'.charCodeAt(0));
}
if (idx == 62) {
return '+';
}
if (idx == 63) {
return '/';
}
// Throw INVALID_CHARACTER_ERR exception here -- won't be hit in the tests.
}
module.exports = btoa;
{
"_args": [
[
"abab@^1.0.0",
"/home/theowl/projects/tmp/test_task/node_modules/jsdom"
]
],
"_from": "abab@>=1.0.0 <2.0.0",
"_id": "abab@1.0.3",
"_inCache": true,
"_installable": true,
"_location": "/abab",
"_nodeVersion": "4.1.2",
"_npmUser": {
"email": "gcarpenterv@gmail.com",
"name": "jeffcarp"
},
"_npmVersion": "2.14.4",
"_phantomChildren": {},
"_requested": {
"name": "abab",
"raw": "abab@^1.0.0",
"rawSpec": "^1.0.0",
"scope": null,
"spec": ">=1.0.0 <2.0.0",
"type": "range"
},
"_requiredBy": [
"/jsdom"
],
"_resolved": "https://registry.npmjs.org/abab/-/abab-1.0.3.tgz",
"_shasum": "b81de5f7274ec4e756d797cd834f303642724e5d",
"_shrinkwrap": null,
"_spec": "abab@^1.0.0",
"_where": "/home/theowl/projects/tmp/test_task/node_modules/jsdom",
"author": {
"email": "gcarpenterv@gmail.com",
"name": "Jeff Carpenter"
},
"bugs": {
"url": "https://github.com/jsdom/abab/issues"
},
"dependencies": {},
"description": "WHATWG spec-compliant implementations of window.atob and window.btoa.",
"devDependencies": {
"babel-core": "^6.1.4",
"babel-loader": "^6.1.0",
"babel-preset-es2015": "^6.1.4",
"eslint": "^1.3.1",
"jscs": "^2.1.1",
"karma": "^0.13.10",
"karma-cli": "^0.1.1",
"karma-firefox-launcher": "^0.1.6",
"karma-mocha": "^0.2.0",
"karma-sauce-launcher": "^0.2.14",
"karma-webpack": "^1.7.0",
"mocha": "^2.2.5",
"webpack": "^1.12.2"
},
"directories": {},
"dist": {
"shasum": "b81de5f7274ec4e756d797cd834f303642724e5d",
"tarball": "http://registry.npmjs.org/abab/-/abab-1.0.3.tgz"
},
"files": [
"index.js",
"lib/"
],
"gitHead": "223c06f29e0e4d4f3bc11164f762898474158c3a",
"homepage": "https://github.com/jsdom/abab#readme",
"keywords": [
"atob",
"browser",
"btoa"
],
"license": "ISC",
"main": "index.js",
"maintainers": [
{
"name": "jeffcarp",
"email": "gcarpenterv@gmail.com"
}
],
"name": "abab",
"optionalDependencies": {},
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "git+https://github.com/jsdom/abab.git"
},
"scripts": {
"karma": "karma start",
"lint": "jscs . && eslint .",
"mocha": "mocha test/node",
"test": "npm run lint && npm run mocha && npm run karma"
},
"version": "1.0.3"
}
Copyright (c) 2014 Forbes Lindesay
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
\ No newline at end of file
# acorn-globals
Detect global variables in JavaScript using acorn
[![Build Status](https://img.shields.io/travis/ForbesLindesay/acorn-globals/master.svg)](https://travis-ci.org/ForbesLindesay/acorn-globals)
[![Dependency Status](https://img.shields.io/david/ForbesLindesay/acorn-globals.svg)](https://david-dm.org/ForbesLindesay/acorn-globals)
[![NPM version](https://img.shields.io/npm/v/acorn-globals.svg)](https://www.npmjs.org/package/acorn-globals)
## Installation
npm install acorn-globals
## Usage
detect.js
```js
var fs = require('fs');
var detect = require('acorn-globals');
var src = fs.readFileSync(__dirname + '/input.js', 'utf8');
var scope = detect(src);
console.dir(scope);
```
input.js
```js
var x = 5;
var y = 3, z = 2;
w.foo();
w = 2;
RAWR=444;
RAWR.foo();
BLARG=3;
foo(function () {
var BAR = 3;
process.nextTick(function (ZZZZZZZZZZZZ) {
console.log('beep boop');
var xyz = 4;
x += 10;
x.zzzzzz;
ZZZ=6;
});
function doom () {
}
ZZZ.foo();
});
console.log(xyz);
```
output:
```
$ node example/detect.js
[ { name: 'BLARG', nodes: [ [Object] ] },
{ name: 'RAWR', nodes: [ [Object], [Object] ] },
{ name: 'ZZZ', nodes: [ [Object], [Object] ] },
{ name: 'console', nodes: [ [Object], [Object] ] },
{ name: 'foo', nodes: [ [Object] ] },
{ name: 'process', nodes: [ [Object] ] },
{ name: 'w', nodes: [ [Object], [Object] ] },
{ name: 'xyz', nodes: [ [Object] ] } ]
```
## License
MIT
'use strict';
var acorn = require('acorn');
var walk = require('acorn/dist/walk');
function isScope(node) {
return node.type === 'FunctionExpression' || node.type === 'FunctionDeclaration' || node.type === 'ArrowFunctionExpression' || node.type === 'Program';
}
function isBlockScope(node) {
return node.type === 'BlockStatement' || isScope(node);
}
function declaresArguments(node) {
return node.type === 'FunctionExpression' || node.type === 'FunctionDeclaration';
}
function declaresThis(node) {
return node.type === 'FunctionExpression' || node.type === 'FunctionDeclaration';
}
function reallyParse(source) {
try {
return acorn.parse(source, {
ecmaVersion: 6,
allowReturnOutsideFunction: true,
allowImportExportEverywhere: true,
allowHashBang: true
});
} catch (ex) {
return acorn.parse(source, {
ecmaVersion: 5,
allowReturnOutsideFunction: true,
allowImportExportEverywhere: true,
allowHashBang: true
});
}
}
module.exports = findGlobals;
module.exports.parse = reallyParse;
function findGlobals(source) {
var globals = [];
var ast;
// istanbul ignore else
if (typeof source === 'string') {
ast = reallyParse(source);
} else {
ast = source;
}
// istanbul ignore if
if (!(ast && typeof ast === 'object' && ast.type === 'Program')) {
throw new TypeError('Source must be either a string of JavaScript or an acorn AST');
}
var declareFunction = function (node) {
var fn = node;
fn.locals = fn.locals || {};
node.params.forEach(function (node) {
declarePattern(node, fn);
});
if (node.id) {
fn.locals[node.id.name] = true;
}
}
var declarePattern = function (node, parent) {
switch (node.type) {
case 'Identifier':
parent.locals[node.name] = true;
break;
case 'ObjectPattern':
node.properties.forEach(function (node) {
declarePattern(node.value, parent);
});
break;
case 'ArrayPattern':
node.elements.forEach(function (node) {
if (node) declarePattern(node, parent);
});
break;
case 'RestElement':
declarePattern(node.argument, parent);
break;
case 'AssignmentPattern':
declarePattern(node.left, parent);
break;
// istanbul ignore next
default:
throw new Error('Unrecognized pattern type: ' + node.type);
}
}
var declareModuleSpecifier = function (node, parents) {
ast.locals = ast.locals || {};
ast.locals[node.local.name] = true;
}
walk.ancestor(ast, {
'VariableDeclaration': function (node, parents) {
var parent = null;
for (var i = parents.length - 1; i >= 0 && parent === null; i--) {
if (node.kind === 'var' ? isScope(parents[i]) : isBlockScope(parents[i])) {
parent = parents[i];
}
}
parent.locals = parent.locals || {};
node.declarations.forEach(function (declaration) {
declarePattern(declaration.id, parent);
});
},
'FunctionDeclaration': function (node, parents) {
var parent = null;
for (var i = parents.length - 2; i >= 0 && parent === null; i--) {
if (isScope(parents[i])) {
parent = parents[i];
}
}
parent.locals = parent.locals || {};
parent.locals[node.id.name] = true;
declareFunction(node);
},
'Function': declareFunction,
'ClassDeclaration': function (node, parents) {
var parent = null;
for (var i = parents.length - 2; i >= 0 && parent === null; i--) {
if (isScope(parents[i])) {
parent = parents[i];
}
}
parent.locals = parent.locals || {};
parent.locals[node.id.name] = true;
},
'TryStatement': function (node) {
if (node.handler === null) return;
node.handler.body.locals = node.handler.body.locals || {};
node.handler.body.locals[node.handler.param.name] = true;
},
'ImportDefaultSpecifier': declareModuleSpecifier,
'ImportSpecifier': declareModuleSpecifier,
'ImportNamespaceSpecifier': declareModuleSpecifier
});
function identifier(node, parents) {
var name = node.name;
if (name === 'undefined') return;
for (var i = 0; i < parents.length; i++) {
if (name === 'arguments' && declaresArguments(parents[i])) {
return;
}
if (parents[i].locals && name in parents[i].locals) {
return;
}
}
if (
parents[parents.length - 2] &&
parents[parents.length - 2].type === 'TryStatement' &&
parents[parents.length - 2].handler &&
node === parents[parents.length - 2].handler.param
) {
return;
}
node.parents = parents;
globals.push(node);
}
walk.ancestor(ast, {
'VariablePattern': identifier,
'Identifier': identifier,
'ThisExpression': function (node, parents) {
for (var i = 0; i < parents.length; i++) {
if (declaresThis(parents[i])) {
return;
}
}
node.parents = parents;
globals.push(node);
}
});
var groupedGlobals = {};
globals.forEach(function (node) {
groupedGlobals[node.name] = (groupedGlobals[node.name] || []);
groupedGlobals[node.name].push(node);
});
return Object.keys(groupedGlobals).sort().map(function (name) {
return {name: name, nodes: groupedGlobals[name]};
});
}
{
"_args": [
[
"acorn-globals@^1.0.4",
"/home/theowl/projects/tmp/test_task/node_modules/jsdom"
]
],
"_from": "acorn-globals@>=1.0.4 <2.0.0",
"_id": "acorn-globals@1.0.9",
"_inCache": true,
"_installable": true,
"_location": "/acorn-globals",
"_nodeVersion": "1.6.2",
"_npmUser": {
"email": "forbes@lindesay.co.uk",
"name": "forbeslindesay"
},
"_npmVersion": "2.7.1",
"_phantomChildren": {},
"_requested": {
"name": "acorn-globals",
"raw": "acorn-globals@^1.0.4",
"rawSpec": "^1.0.4",
"scope": null,
"spec": ">=1.0.4 <2.0.0",
"type": "range"
},
"_requiredBy": [
"/jsdom"
],
"_resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-1.0.9.tgz",
"_shasum": "55bb5e98691507b74579d0513413217c380c54cf",
"_shrinkwrap": null,
"_spec": "acorn-globals@^1.0.4",
"_where": "/home/theowl/projects/tmp/test_task/node_modules/jsdom",
"author": {
"name": "ForbesLindesay"
},
"bugs": {
"url": "https://github.com/ForbesLindesay/acorn-globals/issues"
},
"dependencies": {
"acorn": "^2.1.0"
},
"description": "Detect global variables in JavaScript using acorn",
"devDependencies": {
"testit": "^2.0.2"
},
"directories": {},
"dist": {
"shasum": "55bb5e98691507b74579d0513413217c380c54cf",
"tarball": "http://registry.npmjs.org/acorn-globals/-/acorn-globals-1.0.9.tgz"
},
"files": [
"LICENSE",
"index.js"
],
"gitHead": "808cab09764b63679138b012602ca1bb51657f97",
"homepage": "https://github.com/ForbesLindesay/acorn-globals",
"keywords": [
"ast",
"global",
"implicit",
"lexical",
"local",
"name",
"scope",
"variable"
],
"license": "MIT",
"maintainers": [
{
"name": "forbeslindesay",
"email": "forbes@lindesay.co.uk"
},
{
"name": "timothygu",
"email": "timothygu99@gmail.com"
}
],
"name": "acorn-globals",
"optionalDependencies": {},
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "git+https://github.com/ForbesLindesay/acorn-globals.git"
},
"scripts": {
"test": "node test"
},
"version": "1.0.9"
}
root = true
[*]
indent_style = space
indent_size = 2
end_of_line = lf
insert_final_newline = true
/.tern-port
/test
/local
{
"plugins": {
"node": true,
"es_modules": true
}
}
\ No newline at end of file
language: node_js
sudo: false
node_js:
- '0.10'
- '0.12'
- '4'
List of Acorn contributors. Updated before every release.
Adrian Rakovsky
Alistair Braidwood
Andres Suarez
Aparajita Fishman
Arian Stolwijk
Artem Govorov
Brandon Mills
Charles Hughes
Conrad Irwin
David Bonnet
ForbesLindesay
Forbes Lindesay
Gilad Peleg
impinball
Ingvar Stepanyan
Jesse McCarthy
Jiaxing Wang
Joel Kemp
Johannes Herr
Jürg Lehni
keeyipchan
Kevin Kwok
krator
Marijn Haverbeke
Martin Carlberg
Mathias Bynens
Mathieu 'p01' Henri
Max Schaefer
Max Zerzouri
Mihai Bazon
Mike Rennie
Nick Fitzgerald
Oskar Schöldström
Paul Harper
Peter Rust
PlNG
r-e-d
Rich Harris
Sebastian McKenzie
Timothy Gu
zsjforcn
Copyright (C) 2012-2014 by various contributors (see AUTHORS)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
#!/usr/bin/env node
"use strict";
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj["default"] = obj; return newObj; } }
var _path = require("path");
var _fs = require("fs");
var _distAcornJs = require("../dist/acorn.js");
var acorn = _interopRequireWildcard(_distAcornJs);
var infile = undefined,
forceFile = undefined,
silent = false,
compact = false,
tokenize = false;
var options = {};
function help(status) {
var print = status == 0 ? console.log : console.error;
print("usage: " + (0, _path.basename)(process.argv[1]) + " [--ecma3|--ecma5|--ecma6]");
print(" [--tokenize] [--locations] [---allow-hash-bang] [--compact] [--silent] [--module] [--help] [--] [infile]");
process.exit(status);
}
for (var i = 2; i < process.argv.length; ++i) {
var arg = process.argv[i];
if ((arg == "-" || arg[0] != "-") && !infile) infile = arg;else if (arg == "--" && !infile && i + 2 == process.argv.length) forceFile = infile = process.argv[++i];else if (arg == "--ecma3") options.ecmaVersion = 3;else if (arg == "--ecma5") options.ecmaVersion = 5;else if (arg == "--ecma6") options.ecmaVersion = 6;else if (arg == "--locations") options.locations = true;else if (arg == "--allow-hash-bang") options.allowHashBang = true;else if (arg == "--silent") silent = true;else if (arg == "--compact") compact = true;else if (arg == "--help") help(0);else if (arg == "--tokenize") tokenize = true;else if (arg == "--module") options.sourceType = 'module';else help(1);
}
function run(code) {
var result = undefined;
if (!tokenize) {
try {
result = acorn.parse(code, options);
} catch (e) {
console.error(e.message);process.exit(1);
}
} else {
result = [];
var tokenizer = acorn.tokenizer(code, options),
token = undefined;
while (true) {
try {
token = tokenizer.getToken();
} catch (e) {
console.error(e.message);process.exit(1);
}
result.push(token);
if (token.type == acorn.tokTypes.eof) break;
}
}
if (!silent) console.log(JSON.stringify(result, null, compact ? null : 2));
}
if (forceFile || infile && infile != "-") {
run((0, _fs.readFileSync)(infile, "utf8"));
} else {
(function () {
var code = "";
process.stdin.resume();
process.stdin.on("data", function (chunk) {
return code += chunk;
});
process.stdin.on("end", function () {
return run(code);
});
})();
}
\ No newline at end of file
var fs = require("fs"), path = require("path")
var stream = require("stream")
var browserify = require("browserify")
var babel = require('babel-core')
var babelify = require("babelify").configure({loose: "all"})
process.chdir(path.resolve(__dirname, ".."))
browserify({standalone: "acorn"})
.plugin(require('browserify-derequire'))
.transform(babelify)
.require("./src/index.js", {entry: true})
.bundle()
.on("error", function (err) { console.log("Error: " + err.message) })
.pipe(fs.createWriteStream("dist/acorn.js"))
var ACORN_PLACEHOLDER = "this_function_call_should_be_replaced_with_a_call_to_load_acorn()";
function acornShimPrepare(file) {
var tr = new stream.Transform
if (file == path.resolve(__dirname, "../src/index.js")) {
var sent = false
tr._transform = function(chunk, _, callback) {
if (!sent) {
sent = true
callback(null, ACORN_PLACEHOLDER);
} else {
callback()
}
}
} else {
tr._transform = function(chunk, _, callback) { callback(null, chunk) }
}
return tr
}
function acornShimComplete() {
var tr = new stream.Transform
var buffer = "";
tr._transform = function(chunk, _, callback) {
buffer += chunk.toString("utf8");
callback();
};
tr._flush = function (callback) {
tr.push(buffer.replace(ACORN_PLACEHOLDER, "module.exports = typeof acorn != 'undefined' ? acorn : require(\"./acorn\")"));
callback(null);
};
return tr;
}
browserify({standalone: "acorn.loose"})
.plugin(require('browserify-derequire'))
.transform(acornShimPrepare)
.transform(babelify)
.require("./src/loose/index.js", {entry: true})
.bundle()
.on("error", function (err) { console.log("Error: " + err.message) })
.pipe(acornShimComplete())
.pipe(fs.createWriteStream("dist/acorn_loose.js"))
browserify({standalone: "acorn.walk"})
.plugin(require('browserify-derequire'))
.transform(acornShimPrepare)
.transform(babelify)
.require("./src/walk/index.js", {entry: true})
.bundle()
.on("error", function (err) { console.log("Error: " + err.message) })
.pipe(acornShimComplete())
.pipe(fs.createWriteStream("dist/walk.js"))
babel.transformFile("./src/bin/acorn.js", function (err, result) {
if (err) return console.log("Error: " + err.message)
fs.writeFile("bin/acorn", result.code, function (err) {
if (err) return console.log("Error: " + err.message)
// Make bin/acorn executable
if (process.platform === 'win32')
return
var stat = fs.statSync("bin/acorn")
var newPerm = stat.mode | parseInt('111', 8)
fs.chmodSync("bin/acorn", newPerm)
})
})
// Note: run `npm install unicode-7.0.0` first.
// Which Unicode version should be used?
var version = '7.0.0';
var start = require('unicode-' + version + '/properties/ID_Start/code-points')
.filter(function(ch) { return ch > 127; });
var cont = [0x200c, 0x200d].concat(require('unicode-' + version + '/properties/ID_Continue/code-points')
.filter(function(ch) { return ch > 127 && start.indexOf(ch) == -1; }));
function pad(str, width) {
while (str.length < width) str = "0" + str;
return str;
}
function esc(code) {
var hex = code.toString(16);
if (hex.length <= 2) return "\\x" + pad(hex, 2);
else return "\\u" + pad(hex, 4);
}
function generate(chars) {
var astral = [], re = "";
for (var i = 0, at = 0x10000; i < chars.length; i++) {
var from = chars[i], to = from;
while (i < chars.length - 1 && chars[i + 1] == to + 1) {
i++;
to++;
}
if (to <= 0xffff) {
if (from == to) re += esc(from);
else if (from + 1 == to) re += esc(from) + esc(to);
else re += esc(from) + "-" + esc(to);
} else {
astral.push(from - at, to - from);
at = to;
}
}
return {nonASCII: re, astral: astral};
}
var startData = generate(start), contData = generate(cont);
console.log(" var nonASCIIidentifierStartChars = \"" + startData.nonASCII + "\";");
console.log(" var nonASCIIidentifierChars = \"" + contData.nonASCII + "\";");
console.log(" var astralIdentifierStartCodes = " + JSON.stringify(startData.astral) + ";");
console.log(" var astralIdentifierCodes = " + JSON.stringify(contData.astral) + ";");
# Combine existing list of authors with everyone known in git, sort, add header.
tail --lines=+3 AUTHORS > AUTHORS.tmp
git log --format='%aN' | grep -v abraidwood >> AUTHORS.tmp
echo -e "List of Acorn contributors. Updated before every release.\n" > AUTHORS
sort -u AUTHORS.tmp >> AUTHORS
rm -f AUTHORS.tmp
This source diff could not be displayed because it is too large. You can view the blob instead.
{
"_args": [
[
"acorn@^2.4.0",
"/home/theowl/projects/tmp/test_task/node_modules/jsdom"
]
],
"_from": "acorn@>=2.4.0 <3.0.0",
"_id": "acorn@2.7.0",
"_inCache": true,
"_installable": true,
"_location": "/acorn",
"_nodeVersion": "4.2.2",
"_npmUser": {
"email": "marijnh@gmail.com",
"name": "marijn"
},
"_npmVersion": "2.14.7",
"_phantomChildren": {},
"_requested": {
"name": "acorn",
"raw": "acorn@^2.4.0",
"rawSpec": "^2.4.0",
"scope": null,
"spec": ">=2.4.0 <3.0.0",
"type": "range"
},
"_requiredBy": [
"/acorn-globals",
"/jsdom"
],
"_resolved": "https://registry.npmjs.org/acorn/-/acorn-2.7.0.tgz",
"_shasum": "ab6e7d9d886aaca8b085bc3312b79a198433f0e7",
"_shrinkwrap": null,
"_spec": "acorn@^2.4.0",
"_where": "/home/theowl/projects/tmp/test_task/node_modules/jsdom",
"bin": {
"acorn": "./bin/acorn"
},
"bugs": {
"url": "https://github.com/ternjs/acorn/issues"
},
"contributors": [
{
"name": "keeyipchan"
},
{
"name": "List of Acorn contributors. Updated before every release."
},
{
"name": "Alistair Braidwood"
},
{
"name": "Andres Suarez"
},
{
"name": "Aparajita Fishman"
},
{
"name": "Arian Stolwijk"
},
{
"name": "Artem Govorov"
},
{
"name": "Brandon Mills"
},
{
"name": "Charles Hughes"
},
{
"name": "Conrad Irwin"
},
{
"name": "David Bonnet"
},
{
"name": "ForbesLindesay"
},
{
"name": "Forbes Lindesay"
},
{
"name": "Gilad Peleg"
},
{
"name": "impinball"
},
{
"name": "Ingvar Stepanyan"
},
{
"name": "Jesse McCarthy"
},
{
"name": "Jiaxing Wang"
},
{
"name": "Joel Kemp"
},
{
"name": "Johannes Herr"
},
{
"name": "Jürg Lehni"
},
{
"name": "Adrian Rakovsky"
},
{
"name": "Kevin Kwok"
},
{
"name": "krator"
},
{
"name": "Marijn Haverbeke"
},
{
"name": "Martin Carlberg"
},
{
"name": "Mathias Bynens"
},
{
"name": "Mathieu 'p01' Henri"
},
{
"name": "Max Schaefer"
},
{
"name": "Max Zerzouri"
},
{
"name": "Mihai Bazon"
},
{
"name": "Mike Rennie"
},
{
"name": "Nick Fitzgerald"
},
{
"name": "Oskar Schöldström"
},
{
"name": "Paul Harper"
},
{
"name": "Peter Rust"
},
{
"name": "PlNG"
},
{
"name": "r-e-d"
},
{
"name": "Rich Harris"
},
{
"name": "Sebastian McKenzie"
},
{
"name": "Timothy Gu"
},
{
"name": "zsjforcn"
}
],
"dependencies": {},
"description": "ECMAScript parser",
"devDependencies": {
"babel-core": "^5.6.15",
"babelify": "^6.1.2",
"browserify": "^10.2.4",
"browserify-derequire": "^0.9.4",
"unicode-7.0.0": "~0.1.5"
},
"directories": {},
"dist": {
"shasum": "ab6e7d9d886aaca8b085bc3312b79a198433f0e7",
"tarball": "http://registry.npmjs.org/acorn/-/acorn-2.7.0.tgz"
},
"engines": {
"node": ">=0.4.0"
},
"gitHead": "1405436064bff087f14af55a763396aa5c0ca148",
"homepage": "https://github.com/ternjs/acorn",
"license": "MIT",
"main": "dist/acorn.js",
"maintainers": [
{
"name": "marijn",
"email": "marijnh@gmail.com"
},
{
"name": "rreverser",
"email": "me@rreverser.com"
}
],
"name": "acorn",
"optionalDependencies": {},
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "git+https://github.com/ternjs/acorn.git"
},
"scripts": {
"prepublish": "node bin/build-acorn.js",
"test": "node test/run.js"
},
"version": "2.7.0"
}
#!/usr/bin/env node
import {basename} from "path"
import {readFileSync as readFile} from "fs"
import * as acorn from "../dist/acorn.js"
let infile, forceFile, silent = false, compact = false, tokenize = false
const options = {}
function help(status) {
const print = (status == 0) ? console.log : console.error
print("usage: " + basename(process.argv[1]) + " [--ecma3|--ecma5|--ecma6]")
print(" [--tokenize] [--locations] [---allow-hash-bang] [--compact] [--silent] [--module] [--help] [--] [infile]")
process.exit(status)
}
for (let i = 2; i < process.argv.length; ++i) {
const arg = process.argv[i]
if ((arg == "-" || arg[0] != "-") && !infile) infile = arg
else if (arg == "--" && !infile && i + 2 == process.argv.length) forceFile = infile = process.argv[++i]
else if (arg == "--ecma3") options.ecmaVersion = 3
else if (arg == "--ecma5") options.ecmaVersion = 5
else if (arg == "--ecma6") options.ecmaVersion = 6
else if (arg == "--locations") options.locations = true
else if (arg == "--allow-hash-bang") options.allowHashBang = true
else if (arg == "--silent") silent = true
else if (arg == "--compact") compact = true
else if (arg == "--help") help(0)
else if (arg == "--tokenize") tokenize = true
else if (arg == "--module") options.sourceType = 'module'
else help(1)
}
function run(code) {
let result
if (!tokenize) {
try { result = acorn.parse(code, options) }
catch(e) { console.error(e.message); process.exit(1) }
} else {
result = []
let tokenizer = acorn.tokenizer(code, options), token
while (true) {
try { token = tokenizer.getToken() }
catch(e) { console.error(e.message); process.exit(1) }
result.push(token)
if (token.type == acorn.tokTypes.eof) break
}
}
if (!silent) console.log(JSON.stringify(result, null, compact ? null : 2))
}
if (forceFile || infile && infile != "-") {
run(readFile(infile, "utf8"))
} else {
let code = ""
process.stdin.resume()
process.stdin.on("data", chunk => code += chunk)
process.stdin.on("end", () => run(code))
}
// Acorn is a tiny, fast JavaScript parser written in JavaScript.
//
// Acorn was written by Marijn Haverbeke, Ingvar Stepanyan, and
// various contributors and released under an MIT license.
//
// Git repositories for Acorn are available at
//
// http://marijnhaverbeke.nl/git/acorn
// https://github.com/ternjs/acorn.git
//
// Please use the [github bug tracker][ghbt] to report issues.
//
// [ghbt]: https://github.com/ternjs/acorn/issues
//
// This file defines the main parser interface. The library also comes
// with a [error-tolerant parser][dammit] and an
// [abstract syntax tree walker][walk], defined in other files.
//
// [dammit]: acorn_loose.js
// [walk]: util/walk.js
import {Parser} from "./state"
import "./parseutil"
import "./statement"
import "./lval"
import "./expression"
import "./location"
export {Parser, plugins} from "./state"
export {defaultOptions} from "./options"
export {Position, SourceLocation, getLineInfo} from "./locutil"
export {Node} from "./node"
export {TokenType, types as tokTypes} from "./tokentype"
export {TokContext, types as tokContexts} from "./tokencontext"
export {isIdentifierChar, isIdentifierStart} from "./identifier"
export {Token} from "./tokenize"
export {isNewLine, lineBreak, lineBreakG} from "./whitespace"
export const version = "2.7.0"
// The main exported interface (under `self.acorn` when in the
// browser) is a `parse` function that takes a code string and
// returns an abstract syntax tree as specified by [Mozilla parser
// API][api].
//
// [api]: https://developer.mozilla.org/en-US/docs/SpiderMonkey/Parser_API
export function parse(input, options) {
return new Parser(options, input).parse()
}
// This function tries to parse a single expression at a given
// offset in a string. Useful for parsing mixed-language formats
// that embed JavaScript expressions.
export function parseExpressionAt(input, pos, options) {
let p = new Parser(options, input, pos)
p.nextToken()
return p.parseExpression()
}
// Acorn is organized as a tokenizer and a recursive-descent parser.
// The `tokenizer` export provides an interface to the tokenizer.
export function tokenizer(input, options) {
return new Parser(options, input)
}
import {Parser} from "./state"
import {Position, getLineInfo} from "./locutil"
const pp = Parser.prototype
// This function is used to raise exceptions on parse errors. It
// takes an offset integer (into the current `input`) to indicate
// the location of the error, attaches the position to the end
// of the error message, and then raises a `SyntaxError` with that
// message.
pp.raise = function(pos, message) {
let loc = getLineInfo(this.input, pos)
message += " (" + loc.line + ":" + loc.column + ")"
let err = new SyntaxError(message)
err.pos = pos; err.loc = loc; err.raisedAt = this.pos
throw err
}
pp.curPosition = function() {
if (this.options.locations) {
return new Position(this.curLine, this.pos - this.lineStart)
}
}
import {lineBreakG} from "./whitespace"
// These are used when `options.locations` is on, for the
// `startLoc` and `endLoc` properties.
export class Position {
constructor(line, col) {
this.line = line
this.column = col
}
offset(n) {
return new Position(this.line, this.column + n)
}
}
export class SourceLocation {
constructor(p, start, end) {
this.start = start
this.end = end
if (p.sourceFile !== null) this.source = p.sourceFile
}
}
// The `getLineInfo` function is mostly useful when the
// `locations` option is off (for performance reasons) and you
// want to find the line/column position for a given character
// offset. `input` should be the code string that the offset refers
// into.
export function getLineInfo(input, offset) {
for (let line = 1, cur = 0;;) {
lineBreakG.lastIndex = cur
let match = lineBreakG.exec(input)
if (match && match.index < offset) {
++line
cur = match.index + match[0].length
} else {
return new Position(line, offset - cur)
}
}
}
// Acorn: Loose parser
//
// This module provides an alternative parser (`parse_dammit`) that
// exposes that same interface as `parse`, but will try to parse
// anything as JavaScript, repairing syntax error the best it can.
// There are circumstances in which it will raise an error and give
// up, but they are very rare. The resulting AST will be a mostly
// valid JavaScript AST (as per the [Mozilla parser API][api], except
// that:
//
// - Return outside functions is allowed
//
// - Label consistency (no conflicts, break only to existing labels)
// is not enforced.
//
// - Bogus Identifier nodes with a name of `"✖"` are inserted whenever
// the parser got too confused to return anything meaningful.
//
// [api]: https://developer.mozilla.org/en-US/docs/SpiderMonkey/Parser_API
//
// The expected use for this is to *first* try `acorn.parse`, and only
// if that fails switch to `parse_dammit`. The loose parser might
// parse badly indented code incorrectly, so **don't** use it as
// your default parser.
//
// Quite a lot of acorn.js is duplicated here. The alternative was to
// add a *lot* of extra cruft to that file, making it less readable
// and slower. Copying and editing the code allowed me to make
// invasive changes and simplifications without creating a complicated
// tangle.
import * as acorn from ".."
import {LooseParser, pluginsLoose} from "./state"
import "./tokenize"
import "./statement"
import "./expression"
export {LooseParser, pluginsLoose} from "./state"
acorn.defaultOptions.tabSize = 4
export function parse_dammit(input, options) {
let p = new LooseParser(input, options)
p.next()
return p.parseTopLevel()
}
acorn.parse_dammit = parse_dammit
acorn.LooseParser = LooseParser
acorn.pluginsLoose = pluginsLoose
export function isDummy(node) { return node.name == "✖" }
\ No newline at end of file
import {tokenizer, SourceLocation, tokTypes as tt, Node, lineBreak, isNewLine} from ".."
// Registered plugins
export const pluginsLoose = {}
export class LooseParser {
constructor(input, options) {
this.toks = tokenizer(input, options)
this.options = this.toks.options
this.input = this.toks.input
this.tok = this.last = {type: tt.eof, start: 0, end: 0}
if (this.options.locations) {
let here = this.toks.curPosition()
this.tok.loc = new SourceLocation(this.toks, here, here)
}
this.ahead = []; // Tokens ahead
this.context = []; // Indentation contexted
this.curIndent = 0
this.curLineStart = 0
this.nextLineStart = this.lineEnd(this.curLineStart) + 1
// Load plugins
this.options.pluginsLoose = options.pluginsLoose || {}
this.loadPlugins(this.options.pluginsLoose)
}
startNode() {
return new Node(this.toks, this.tok.start, this.options.locations ? this.tok.loc.start : null)
}
storeCurrentPos() {
return this.options.locations ? [this.tok.start, this.tok.loc.start] : this.tok.start
}
startNodeAt(pos) {
if (this.options.locations) {
return new Node(this.toks, pos[0], pos[1])
} else {
return new Node(this.toks, pos)
}
}
finishNode(node, type) {
node.type = type
node.end = this.last.end
if (this.options.locations)
node.loc.end = this.last.loc.end
if (this.options.ranges)
node.range[1] = this.last.end
return node
}
dummyNode(type) {
let dummy = this.startNode()
dummy.type = type
dummy.end = dummy.start
if (this.options.locations)
dummy.loc.end = dummy.loc.start
if (this.options.ranges)
dummy.range[1] = dummy.start
this.last = {type: tt.name, start: dummy.start, end: dummy.start, loc: dummy.loc}
return dummy
}
dummyIdent() {
let dummy = this.dummyNode("Identifier")
dummy.name = "✖"
return dummy
}
dummyString() {
let dummy = this.dummyNode("Literal")
dummy.value = dummy.raw = "✖"
return dummy
}
eat(type) {
if (this.tok.type === type) {
this.next()
return true
} else {
return false
}
}
isContextual(name) {
return this.tok.type === tt.name && this.tok.value === name
}
eatContextual(name) {
return this.tok.value === name && this.eat(tt.name)
}
canInsertSemicolon() {
return this.tok.type === tt.eof || this.tok.type === tt.braceR ||
lineBreak.test(this.input.slice(this.last.end, this.tok.start))
}
semicolon() {
return this.eat(tt.semi)
}
expect(type) {
if (this.eat(type)) return true
for (let i = 1; i <= 2; i++) {
if (this.lookAhead(i).type == type) {
for (let j = 0; j < i; j++) this.next()
return true
}
}
}
pushCx() {
this.context.push(this.curIndent)
}
popCx() {
this.curIndent = this.context.pop()
}
lineEnd(pos) {
while (pos < this.input.length && !isNewLine(this.input.charCodeAt(pos))) ++pos
return pos
}
indentationAfter(pos) {
for (let count = 0;; ++pos) {
let ch = this.input.charCodeAt(pos)
if (ch === 32) ++count
else if (ch === 9) count += this.options.tabSize
else return count
}
}
closes(closeTok, indent, line, blockHeuristic) {
if (this.tok.type === closeTok || this.tok.type === tt.eof) return true
return line != this.curLineStart && this.curIndent < indent && this.tokenStartsLine() &&
(!blockHeuristic || this.nextLineStart >= this.input.length ||
this.indentationAfter(this.nextLineStart) < indent)
}
tokenStartsLine() {
for (let p = this.tok.start - 1; p >= this.curLineStart; --p) {
let ch = this.input.charCodeAt(p)
if (ch !== 9 && ch !== 32) return false
}
return true
}
extend(name, f) {
this[name] = f(this[name])
}
loadPlugins(pluginConfigs) {
for (let name in pluginConfigs) {
let plugin = pluginsLoose[name]
if (!plugin) throw new Error("Plugin '" + name + "' not found")
plugin(this, pluginConfigs[name])
}
}
}
import {tokTypes as tt, Token, isNewLine, SourceLocation, getLineInfo, lineBreakG} from ".."
import {LooseParser} from "./state"
const lp = LooseParser.prototype
function isSpace(ch) {
return (ch < 14 && ch > 8) || ch === 32 || ch === 160 || isNewLine(ch)
}
lp.next = function() {
this.last = this.tok
if (this.ahead.length)
this.tok = this.ahead.shift()
else
this.tok = this.readToken()
if (this.tok.start >= this.nextLineStart) {
while (this.tok.start >= this.nextLineStart) {
this.curLineStart = this.nextLineStart
this.nextLineStart = this.lineEnd(this.curLineStart) + 1
}
this.curIndent = this.indentationAfter(this.curLineStart)
}
}
lp.readToken = function() {
for (;;) {
try {
this.toks.next()
if (this.toks.type === tt.dot &&
this.input.substr(this.toks.end, 1) === "." &&
this.options.ecmaVersion >= 6) {
this.toks.end++
this.toks.type = tt.ellipsis
}
return new Token(this.toks)
} catch(e) {
if (!(e instanceof SyntaxError)) throw e
// Try to skip some text, based on the error message, and then continue
let msg = e.message, pos = e.raisedAt, replace = true
if (/unterminated/i.test(msg)) {
pos = this.lineEnd(e.pos + 1)
if (/string/.test(msg)) {
replace = {start: e.pos, end: pos, type: tt.string, value: this.input.slice(e.pos + 1, pos)}
} else if (/regular expr/i.test(msg)) {
let re = this.input.slice(e.pos, pos)
try { re = new RegExp(re) } catch(e) {}
replace = {start: e.pos, end: pos, type: tt.regexp, value: re}
} else if (/template/.test(msg)) {
replace = {start: e.pos, end: pos,
type: tt.template,
value: this.input.slice(e.pos, pos)}
} else {
replace = false
}
} else if (/invalid (unicode|regexp|number)|expecting unicode|octal literal|is reserved|directly after number|expected number in radix/i.test(msg)) {
while (pos < this.input.length && !isSpace(this.input.charCodeAt(pos))) ++pos
} else if (/character escape|expected hexadecimal/i.test(msg)) {
while (pos < this.input.length) {
let ch = this.input.charCodeAt(pos++)
if (ch === 34 || ch === 39 || isNewLine(ch)) break
}
} else if (/unexpected character/i.test(msg)) {
pos++
replace = false
} else if (/regular expression/i.test(msg)) {
replace = true
} else {
throw e
}
this.resetTo(pos)
if (replace === true) replace = {start: pos, end: pos, type: tt.name, value: "✖"}
if (replace) {
if (this.options.locations)
replace.loc = new SourceLocation(
this.toks,
getLineInfo(this.input, replace.start),
getLineInfo(this.input, replace.end))
return replace
}
}
}
}
lp.resetTo = function(pos) {
this.toks.pos = pos
let ch = this.input.charAt(pos - 1)
this.toks.exprAllowed = !ch || /[\[\{\(,;:?\/*=+\-~!|&%^<>]/.test(ch) ||
/[enwfd]/.test(ch) &&
/\b(keywords|case|else|return|throw|new|in|(instance|type)of|delete|void)$/.test(this.input.slice(pos - 10, pos))
if (this.options.locations) {
this.toks.curLine = 1
this.toks.lineStart = lineBreakG.lastIndex = 0
let match
while ((match = lineBreakG.exec(this.input)) && match.index < pos) {
++this.toks.curLine
this.toks.lineStart = match.index + match[0].length
}
}
}
lp.lookAhead = function(n) {
while (n > this.ahead.length)
this.ahead.push(this.readToken())
return this.ahead[n - 1]
}
import {types as tt} from "./tokentype"
import {Parser} from "./state"
import {has} from "./util"
const pp = Parser.prototype
// Convert existing expression atom to assignable pattern
// if possible.
pp.toAssignable = function(node, isBinding) {
if (this.options.ecmaVersion >= 6 && node) {
switch (node.type) {
case "Identifier":
case "ObjectPattern":
case "ArrayPattern":
break
case "ObjectExpression":
node.type = "ObjectPattern"
for (let i = 0; i < node.properties.length; i++) {
let prop = node.properties[i]
if (prop.kind !== "init") this.raise(prop.key.start, "Object pattern can't contain getter or setter")
this.toAssignable(prop.value, isBinding)
}
break
case "ArrayExpression":
node.type = "ArrayPattern"
this.toAssignableList(node.elements, isBinding)
break
case "AssignmentExpression":
if (node.operator === "=") {
node.type = "AssignmentPattern"
delete node.operator
// falls through to AssignmentPattern
} else {
this.raise(node.left.end, "Only '=' operator can be used for specifying default value.")
break;
}
case "AssignmentPattern":
if (node.right.type === "YieldExpression")
this.raise(node.right.start, "Yield expression cannot be a default value")
break;
case "ParenthesizedExpression":
node.expression = this.toAssignable(node.expression, isBinding)
break
case "MemberExpression":
if (!isBinding) break
default:
this.raise(node.start, "Assigning to rvalue")
}
}
return node
}
// Convert list of expression atoms to binding list.
pp.toAssignableList = function(exprList, isBinding) {
let end = exprList.length
if (end) {
let last = exprList[end - 1]
if (last && last.type == "RestElement") {
--end
} else if (last && last.type == "SpreadElement") {
last.type = "RestElement"
let arg = last.argument
this.toAssignable(arg, isBinding)
if (arg.type !== "Identifier" && arg.type !== "MemberExpression" && arg.type !== "ArrayPattern")
this.unexpected(arg.start)
--end
}
if (isBinding && last.type === "RestElement" && last.argument.type !== "Identifier")
this.unexpected(last.argument.start);
}
for (let i = 0; i < end; i++) {
let elt = exprList[i]
if (elt) this.toAssignable(elt, isBinding)
}
return exprList
}
// Parses spread element.
pp.parseSpread = function(refDestructuringErrors) {
let node = this.startNode()
this.next()
node.argument = this.parseMaybeAssign(refDestructuringErrors)
return this.finishNode(node, "SpreadElement")
}
pp.parseRest = function(allowNonIdent) {
let node = this.startNode()
this.next()
// RestElement inside of a function parameter must be an identifier
if (allowNonIdent) node.argument = this.type === tt.name ? this.parseIdent() : this.unexpected()
else node.argument = this.type === tt.name || this.type === tt.bracketL ? this.parseBindingAtom() : this.unexpected()
return this.finishNode(node, "RestElement")
}
// Parses lvalue (assignable) atom.
pp.parseBindingAtom = function() {
if (this.options.ecmaVersion < 6) return this.parseIdent()
switch (this.type) {
case tt.name:
return this.parseIdent()
case tt.bracketL:
let node = this.startNode()
this.next()
node.elements = this.parseBindingList(tt.bracketR, true, true)
return this.finishNode(node, "ArrayPattern")
case tt.braceL:
return this.parseObj(true)
default:
this.unexpected()
}
}
pp.parseBindingList = function(close, allowEmpty, allowTrailingComma, allowNonIdent) {
let elts = [], first = true
while (!this.eat(close)) {
if (first) first = false
else this.expect(tt.comma)
if (allowEmpty && this.type === tt.comma) {
elts.push(null)
} else if (allowTrailingComma && this.afterTrailingComma(close)) {
break
} else if (this.type === tt.ellipsis) {
let rest = this.parseRest(allowNonIdent)
this.parseBindingListItem(rest)
elts.push(rest)
this.expect(close)
break
} else {
let elem = this.parseMaybeDefault(this.start, this.startLoc)
this.parseBindingListItem(elem)
elts.push(elem)
}
}
return elts
}
pp.parseBindingListItem = function(param) {
return param
}
// Parses assignment pattern around given atom if possible.
pp.parseMaybeDefault = function(startPos, startLoc, left) {
left = left || this.parseBindingAtom()
if (this.options.ecmaVersion < 6 || !this.eat(tt.eq)) return left
let node = this.startNodeAt(startPos, startLoc)
node.left = left
node.right = this.parseMaybeAssign()
return this.finishNode(node, "AssignmentPattern")
}
// Verify that a node is an lval — something that can be assigned
// to.
pp.checkLVal = function(expr, isBinding, checkClashes) {
switch (expr.type) {
case "Identifier":
if (this.strict && this.reservedWordsStrictBind.test(expr.name))
this.raise(expr.start, (isBinding ? "Binding " : "Assigning to ") + expr.name + " in strict mode")
if (checkClashes) {
if (has(checkClashes, expr.name))
this.raise(expr.start, "Argument name clash")
checkClashes[expr.name] = true
}
break
case "MemberExpression":
if (isBinding) this.raise(expr.start, (isBinding ? "Binding" : "Assigning to") + " member expression")
break
case "ObjectPattern":
for (let i = 0; i < expr.properties.length; i++)
this.checkLVal(expr.properties[i].value, isBinding, checkClashes)
break
case "ArrayPattern":
for (let i = 0; i < expr.elements.length; i++) {
let elem = expr.elements[i]
if (elem) this.checkLVal(elem, isBinding, checkClashes)
}
break
case "AssignmentPattern":
this.checkLVal(expr.left, isBinding, checkClashes)
break
case "RestElement":
this.checkLVal(expr.argument, isBinding, checkClashes)
break
case "ParenthesizedExpression":
this.checkLVal(expr.expression, isBinding, checkClashes)
break
default:
this.raise(expr.start, (isBinding ? "Binding" : "Assigning to") + " rvalue")
}
}
import {Parser} from "./state"
import {SourceLocation} from "./locutil"
export class Node {
constructor(parser, pos, loc) {
this.type = ""
this.start = pos
this.end = 0
if (parser.options.locations)
this.loc = new SourceLocation(parser, loc)
if (parser.options.directSourceFile)
this.sourceFile = parser.options.directSourceFile
if (parser.options.ranges)
this.range = [pos, 0]
}
}
// Start an AST node, attaching a start offset.
const pp = Parser.prototype
pp.startNode = function() {
return new Node(this, this.start, this.startLoc)
}
pp.startNodeAt = function(pos, loc) {
return new Node(this, pos, loc)
}
// Finish an AST node, adding `type` and `end` properties.
function finishNodeAt(node, type, pos, loc) {
node.type = type
node.end = pos
if (this.options.locations)
node.loc.end = loc
if (this.options.ranges)
node.range[1] = pos
return node
}
pp.finishNode = function(node, type) {
return finishNodeAt.call(this, node, type, this.lastTokEnd, this.lastTokEndLoc)
}
// Finish node at given position
pp.finishNodeAt = function(node, type, pos, loc) {
return finishNodeAt.call(this, node, type, pos, loc)
}
import {has, isArray} from "./util"
import {SourceLocation} from "./locutil"
// A second optional argument can be given to further configure
// the parser process. These options are recognized:
export const defaultOptions = {
// `ecmaVersion` indicates the ECMAScript version to parse. Must
// be either 3, or 5, or 6. This influences support for strict
// mode, the set of reserved words, support for getters and
// setters and other features.
ecmaVersion: 5,
// Source type ("script" or "module") for different semantics
sourceType: "script",
// `onInsertedSemicolon` can be a callback that will be called
// when a semicolon is automatically inserted. It will be passed
// th position of the comma as an offset, and if `locations` is
// enabled, it is given the location as a `{line, column}` object
// as second argument.
onInsertedSemicolon: null,
// `onTrailingComma` is similar to `onInsertedSemicolon`, but for
// trailing commas.
onTrailingComma: null,
// By default, reserved words are only enforced if ecmaVersion >= 5.
// Set `allowReserved` to a boolean value to explicitly turn this on
// an off. When this option has the value "never", reserved words
// and keywords can also not be used as property names.
allowReserved: null,
// When enabled, a return at the top level is not considered an
// error.
allowReturnOutsideFunction: false,
// When enabled, import/export statements are not constrained to
// appearing at the top of the program.
allowImportExportEverywhere: false,
// When enabled, hashbang directive in the beginning of file
// is allowed and treated as a line comment.
allowHashBang: false,
// When `locations` is on, `loc` properties holding objects with
// `start` and `end` properties in `{line, column}` form (with
// line being 1-based and column 0-based) will be attached to the
// nodes.
locations: false,
// A function can be passed as `onToken` option, which will
// cause Acorn to call that function with object in the same
// format as tokens returned from `tokenizer().getToken()`. Note
// that you are not allowed to call the parser from the
// callback—that will corrupt its internal state.
onToken: null,
// A function can be passed as `onComment` option, which will
// cause Acorn to call that function with `(block, text, start,
// end)` parameters whenever a comment is skipped. `block` is a
// boolean indicating whether this is a block (`/* */`) comment,
// `text` is the content of the comment, and `start` and `end` are
// character offsets that denote the start and end of the comment.
// When the `locations` option is on, two more parameters are
// passed, the full `{line, column}` locations of the start and
// end of the comments. Note that you are not allowed to call the
// parser from the callback—that will corrupt its internal state.
onComment: null,
// Nodes have their start and end characters offsets recorded in
// `start` and `end` properties (directly on the node, rather than
// the `loc` object, which holds line/column data. To also add a
// [semi-standardized][range] `range` property holding a `[start,
// end]` array with the same numbers, set the `ranges` option to
// `true`.
//
// [range]: https://bugzilla.mozilla.org/show_bug.cgi?id=745678
ranges: false,
// It is possible to parse multiple files into a single AST by
// passing the tree produced by parsing the first file as
// `program` option in subsequent parses. This will add the
// toplevel forms of the parsed file to the `Program` (top) node
// of an existing parse tree.
program: null,
// When `locations` is on, you can pass this to record the source
// file in every node's `loc` object.
sourceFile: null,
// This value, if given, is stored in every node, whether
// `locations` is on or off.
directSourceFile: null,
// When enabled, parenthesized expressions are represented by
// (non-standard) ParenthesizedExpression nodes
preserveParens: false,
plugins: {}
}
// Interpret and default an options object
export function getOptions(opts) {
let options = {}
for (let opt in defaultOptions)
options[opt] = opts && has(opts, opt) ? opts[opt] : defaultOptions[opt]
if (options.allowReserved == null)
options.allowReserved = options.ecmaVersion < 5
if (isArray(options.onToken)) {
let tokens = options.onToken
options.onToken = (token) => tokens.push(token)
}
if (isArray(options.onComment))
options.onComment = pushComment(options, options.onComment)
return options
}
function pushComment(options, array) {
return function (block, text, start, end, startLoc, endLoc) {
let comment = {
type: block ? 'Block' : 'Line',
value: text,
start: start,
end: end
}
if (options.locations)
comment.loc = new SourceLocation(this, startLoc, endLoc)
if (options.ranges)
comment.range = [start, end]
array.push(comment)
}
}
import {types as tt} from "./tokentype"
import {Parser} from "./state"
import {lineBreak} from "./whitespace"
const pp = Parser.prototype
// ## Parser utilities
// Test whether a statement node is the string literal `"use strict"`.
pp.isUseStrict = function(stmt) {
return this.options.ecmaVersion >= 5 && stmt.type === "ExpressionStatement" &&
stmt.expression.type === "Literal" &&
stmt.expression.raw.slice(1, -1) === "use strict"
}
// Predicate that tests whether the next token is of the given
// type, and if yes, consumes it as a side effect.
pp.eat = function(type) {
if (this.type === type) {
this.next()
return true
} else {
return false
}
}
// Tests whether parsed token is a contextual keyword.
pp.isContextual = function(name) {
return this.type === tt.name && this.value === name
}
// Consumes contextual keyword if possible.
pp.eatContextual = function(name) {
return this.value === name && this.eat(tt.name)
}
// Asserts that following token is given contextual keyword.
pp.expectContextual = function(name) {
if (!this.eatContextual(name)) this.unexpected()
}
// Test whether a semicolon can be inserted at the current position.
pp.canInsertSemicolon = function() {
return this.type === tt.eof ||
this.type === tt.braceR ||
lineBreak.test(this.input.slice(this.lastTokEnd, this.start))
}
pp.insertSemicolon = function() {
if (this.canInsertSemicolon()) {
if (this.options.onInsertedSemicolon)
this.options.onInsertedSemicolon(this.lastTokEnd, this.lastTokEndLoc)
return true
}
}
// Consume a semicolon, or, failing that, see if we are allowed to
// pretend that there is a semicolon at this position.
pp.semicolon = function() {
if (!this.eat(tt.semi) && !this.insertSemicolon()) this.unexpected()
}
pp.afterTrailingComma = function(tokType) {
if (this.type == tokType) {
if (this.options.onTrailingComma)
this.options.onTrailingComma(this.lastTokStart, this.lastTokStartLoc)
this.next()
return true
}
}
// Expect a token of a given type. If found, consume it, otherwise,
// raise an unexpected token error.
pp.expect = function(type) {
this.eat(type) || this.unexpected()
}
// Raise an unexpected token error.
pp.unexpected = function(pos) {
this.raise(pos != null ? pos : this.start, "Unexpected token")
}
pp.checkPatternErrors = function(refDestructuringErrors, andThrow) {
let pos = refDestructuringErrors && refDestructuringErrors.trailingComma
if (!andThrow) return !!pos
if (pos) this.raise(pos, "Trailing comma is not permitted in destructuring patterns")
}
pp.checkExpressionErrors = function(refDestructuringErrors, andThrow) {
let pos = refDestructuringErrors && refDestructuringErrors.shorthandAssign
if (!andThrow) return !!pos
if (pos) this.raise(pos, "Shorthand property assignments are valid only in destructuring patterns")
}
import {reservedWords, keywords} from "./identifier"
import {types as tt} from "./tokentype"
import {lineBreak} from "./whitespace"
import {getOptions} from "./options"
// Registered plugins
export const plugins = {}
function keywordRegexp(words) {
return new RegExp("^(" + words.replace(/ /g, "|") + ")$")
}
export class Parser {
constructor(options, input, startPos) {
this.options = options = getOptions(options)
this.sourceFile = options.sourceFile
this.keywords = keywordRegexp(keywords[options.ecmaVersion >= 6 ? 6 : 5])
let reserved = options.allowReserved ? "" :
reservedWords[options.ecmaVersion] + (options.sourceType == "module" ? " await" : "")
this.reservedWords = keywordRegexp(reserved)
let reservedStrict = (reserved ? reserved + " " : "") + reservedWords.strict
this.reservedWordsStrict = keywordRegexp(reservedStrict)
this.reservedWordsStrictBind = keywordRegexp(reservedStrict + " " + reservedWords.strictBind)
this.input = String(input)
// Used to signal to callers of `readWord1` whether the word
// contained any escape sequences. This is needed because words with
// escape sequences must not be interpreted as keywords.
this.containsEsc = false;
// Load plugins
this.loadPlugins(options.plugins)
// Set up token state
// The current position of the tokenizer in the input.
if (startPos) {
this.pos = startPos
this.lineStart = Math.max(0, this.input.lastIndexOf("\n", startPos))
this.curLine = this.input.slice(0, this.lineStart).split(lineBreak).length
} else {
this.pos = this.lineStart = 0
this.curLine = 1
}
// Properties of the current token:
// Its type
this.type = tt.eof
// For tokens that include more information than their type, the value
this.value = null
// Its start and end offset
this.start = this.end = this.pos
// And, if locations are used, the {line, column} object
// corresponding to those offsets
this.startLoc = this.endLoc = this.curPosition()
// Position information for the previous token
this.lastTokEndLoc = this.lastTokStartLoc = null
this.lastTokStart = this.lastTokEnd = this.pos
// The context stack is used to superficially track syntactic
// context to predict whether a regular expression is allowed in a
// given position.
this.context = this.initialContext()
this.exprAllowed = true
// Figure out if it's a module code.
this.strict = this.inModule = options.sourceType === "module"
// Used to signify the start of a potential arrow function
this.potentialArrowAt = -1
// Flags to track whether we are in a function, a generator.
this.inFunction = this.inGenerator = false
// Labels in scope.
this.labels = []
// If enabled, skip leading hashbang line.
if (this.pos === 0 && options.allowHashBang && this.input.slice(0, 2) === '#!')
this.skipLineComment(2)
}
// DEPRECATED Kept for backwards compatibility until 3.0 in case a plugin uses them
isKeyword(word) { return this.keywords.test(word) }
isReservedWord(word) { return this.reservedWords.test(word) }
extend(name, f) {
this[name] = f(this[name])
}
loadPlugins(pluginConfigs) {
for (let name in pluginConfigs) {
let plugin = plugins[name]
if (!plugin) throw new Error("Plugin '" + name + "' not found")
plugin(this, pluginConfigs[name])
}
}
parse() {
let node = this.options.program || this.startNode()
this.nextToken()
return this.parseTopLevel(node)
}
}
// The algorithm used to determine whether a regexp can appear at a
// given point in the program is loosely based on sweet.js' approach.
// See https://github.com/mozilla/sweet.js/wiki/design
import {Parser} from "./state"
import {types as tt} from "./tokentype"
import {lineBreak} from "./whitespace"
export class TokContext {
constructor(token, isExpr, preserveSpace, override) {
this.token = token
this.isExpr = !!isExpr
this.preserveSpace = !!preserveSpace
this.override = override
}
}
export const types = {
b_stat: new TokContext("{", false),
b_expr: new TokContext("{", true),
b_tmpl: new TokContext("${", true),
p_stat: new TokContext("(", false),
p_expr: new TokContext("(", true),
q_tmpl: new TokContext("`", true, true, p => p.readTmplToken()),
f_expr: new TokContext("function", true)
}
const pp = Parser.prototype
pp.initialContext = function() {
return [types.b_stat]
}
pp.braceIsBlock = function(prevType) {
if (prevType === tt.colon) {
let parent = this.curContext()
if (parent === types.b_stat || parent === types.b_expr)
return !parent.isExpr
}
if (prevType === tt._return)
return lineBreak.test(this.input.slice(this.lastTokEnd, this.start))
if (prevType === tt._else || prevType === tt.semi || prevType === tt.eof || prevType === tt.parenR)
return true
if (prevType == tt.braceL)
return this.curContext() === types.b_stat
return !this.exprAllowed
}
pp.updateContext = function(prevType) {
let update, type = this.type
if (type.keyword && prevType == tt.dot)
this.exprAllowed = false
else if (update = type.updateContext)
update.call(this, prevType)
else
this.exprAllowed = type.beforeExpr
}
// Token-specific context update code
tt.parenR.updateContext = tt.braceR.updateContext = function() {
if (this.context.length == 1) {
this.exprAllowed = true
return
}
let out = this.context.pop()
if (out === types.b_stat && this.curContext() === types.f_expr) {
this.context.pop()
this.exprAllowed = false
} else if (out === types.b_tmpl) {
this.exprAllowed = true
} else {
this.exprAllowed = !out.isExpr
}
}
tt.braceL.updateContext = function(prevType) {
this.context.push(this.braceIsBlock(prevType) ? types.b_stat : types.b_expr)
this.exprAllowed = true
}
tt.dollarBraceL.updateContext = function() {
this.context.push(types.b_tmpl)
this.exprAllowed = true
}
tt.parenL.updateContext = function(prevType) {
let statementParens = prevType === tt._if || prevType === tt._for || prevType === tt._with || prevType === tt._while
this.context.push(statementParens ? types.p_stat : types.p_expr)
this.exprAllowed = true
}
tt.incDec.updateContext = function() {
// tokExprAllowed stays unchanged
}
tt._function.updateContext = function() {
if (this.curContext() !== types.b_stat)
this.context.push(types.f_expr)
this.exprAllowed = false
}
tt.backQuote.updateContext = function() {
if (this.curContext() === types.q_tmpl)
this.context.pop()
else
this.context.push(types.q_tmpl)
this.exprAllowed = false
}
// ## Token types
// The assignment of fine-grained, information-carrying type objects
// allows the tokenizer to store the information it has about a
// token in a way that is very cheap for the parser to look up.
// All token type variables start with an underscore, to make them
// easy to recognize.
// The `beforeExpr` property is used to disambiguate between regular
// expressions and divisions. It is set on all token types that can
// be followed by an expression (thus, a slash after them would be a
// regular expression).
//
// The `startsExpr` property is used to check if the token ends a
// `yield` expression. It is set on all token types that either can
// directly start an expression (like a quotation mark) or can
// continue an expression (like the body of a string).
//
// `isLoop` marks a keyword as starting a loop, which is important
// to know when parsing a label, in order to allow or disallow
// continue jumps to that label.
export class TokenType {
constructor(label, conf = {}) {
this.label = label
this.keyword = conf.keyword
this.beforeExpr = !!conf.beforeExpr
this.startsExpr = !!conf.startsExpr
this.isLoop = !!conf.isLoop
this.isAssign = !!conf.isAssign
this.prefix = !!conf.prefix
this.postfix = !!conf.postfix
this.binop = conf.binop || null
this.updateContext = null
}
}
function binop(name, prec) {
return new TokenType(name, {beforeExpr: true, binop: prec})
}
const beforeExpr = {beforeExpr: true}, startsExpr = {startsExpr: true}
export const types = {
num: new TokenType("num", startsExpr),
regexp: new TokenType("regexp", startsExpr),
string: new TokenType("string", startsExpr),
name: new TokenType("name", startsExpr),
eof: new TokenType("eof"),
// Punctuation token types.
bracketL: new TokenType("[", {beforeExpr: true, startsExpr: true}),
bracketR: new TokenType("]"),
braceL: new TokenType("{", {beforeExpr: true, startsExpr: true}),
braceR: new TokenType("}"),
parenL: new TokenType("(", {beforeExpr: true, startsExpr: true}),
parenR: new TokenType(")"),
comma: new TokenType(",", beforeExpr),
semi: new TokenType(";", beforeExpr),
colon: new TokenType(":", beforeExpr),
dot: new TokenType("."),
question: new TokenType("?", beforeExpr),
arrow: new TokenType("=>", beforeExpr),
template: new TokenType("template"),
ellipsis: new TokenType("...", beforeExpr),
backQuote: new TokenType("`", startsExpr),
dollarBraceL: new TokenType("${", {beforeExpr: true, startsExpr: true}),
// Operators. These carry several kinds of properties to help the
// parser use them properly (the presence of these properties is
// what categorizes them as operators).
//
// `binop`, when present, specifies that this operator is a binary
// operator, and will refer to its precedence.
//
// `prefix` and `postfix` mark the operator as a prefix or postfix
// unary operator.
//
// `isAssign` marks all of `=`, `+=`, `-=` etcetera, which act as
// binary operators with a very low precedence, that should result
// in AssignmentExpression nodes.
eq: new TokenType("=", {beforeExpr: true, isAssign: true}),
assign: new TokenType("_=", {beforeExpr: true, isAssign: true}),
incDec: new TokenType("++/--", {prefix: true, postfix: true, startsExpr: true}),
prefix: new TokenType("prefix", {beforeExpr: true, prefix: true, startsExpr: true}),
logicalOR: binop("||", 1),
logicalAND: binop("&&", 2),
bitwiseOR: binop("|", 3),
bitwiseXOR: binop("^", 4),
bitwiseAND: binop("&", 5),
equality: binop("==/!=", 6),
relational: binop("</>", 7),
bitShift: binop("<</>>", 8),
plusMin: new TokenType("+/-", {beforeExpr: true, binop: 9, prefix: true, startsExpr: true}),
modulo: binop("%", 10),
star: binop("*", 10),
slash: binop("/", 10)
}
// Map keyword names to token types.
export const keywords = {}
// Succinct definitions of keyword token types
function kw(name, options = {}) {
options.keyword = name
keywords[name] = types["_" + name] = new TokenType(name, options)
}
kw("break")
kw("case", beforeExpr)
kw("catch")
kw("continue")
kw("debugger")
kw("default", beforeExpr)
kw("do", {isLoop: true, beforeExpr: true})
kw("else", beforeExpr)
kw("finally")
kw("for", {isLoop: true})
kw("function", startsExpr)
kw("if")
kw("return", beforeExpr)
kw("switch")
kw("throw", beforeExpr)
kw("try")
kw("var")
kw("let")
kw("const")
kw("while", {isLoop: true})
kw("with")
kw("new", {beforeExpr: true, startsExpr: true})
kw("this", startsExpr)
kw("super", startsExpr)
kw("class")
kw("extends", beforeExpr)
kw("export")
kw("import")
kw("yield", {beforeExpr: true, startsExpr: true})
kw("null", startsExpr)
kw("true", startsExpr)
kw("false", startsExpr)
kw("in", {beforeExpr: true, binop: 7})
kw("instanceof", {beforeExpr: true, binop: 7})
kw("typeof", {beforeExpr: true, prefix: true, startsExpr: true})
kw("void", {beforeExpr: true, prefix: true, startsExpr: true})
kw("delete", {beforeExpr: true, prefix: true, startsExpr: true})
export function isArray(obj) {
return Object.prototype.toString.call(obj) === "[object Array]"
}
// Checks if an object has a property.
export function has(obj, propName) {
return Object.prototype.hasOwnProperty.call(obj, propName)
}
// Matches a whole line break (where CRLF is considered a single
// line break). Used to count lines.
export const lineBreak = /\r\n?|\n|\u2028|\u2029/
export const lineBreakG = new RegExp(lineBreak.source, "g")
export function isNewLine(code) {
return code === 10 || code === 13 || code === 0x2028 || code == 0x2029
}
export const nonASCIIwhitespace = /[\u1680\u180e\u2000-\u200a\u202f\u205f\u3000\ufeff]/
amdefine is released under two licenses: new BSD, and MIT. You may pick the
license that best suits your development needs. The text of both licenses are
provided below.
The "New" BSD License:
----------------------
Copyright (c) 2011-2015, The Dojo Foundation
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the Dojo Foundation nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
MIT License
-----------
Copyright (c) 2011-2015, The Dojo Foundation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
# amdefine
A module that can be used to implement AMD's define() in Node. This allows you
to code to the AMD API and have the module work in node programs without
requiring those other programs to use AMD.
## Usage
**1)** Update your package.json to indicate amdefine as a dependency:
```javascript
"dependencies": {
"amdefine": ">=0.1.0"
}
```
Then run `npm install` to get amdefine into your project.
**2)** At the top of each module that uses define(), place this code:
```javascript
if (typeof define !== 'function') { var define = require('amdefine')(module) }
```
**Only use these snippets** when loading amdefine. If you preserve the basic structure,
with the braces, it will be stripped out when using the [RequireJS optimizer](#optimizer).
You can add spaces, line breaks and even require amdefine with a local path, but
keep the rest of the structure to get the stripping behavior.
As you may know, because `if` statements in JavaScript don't have their own scope, the var
declaration in the above snippet is made whether the `if` expression is truthy or not. If
RequireJS is loaded then the declaration is superfluous because `define` is already already
declared in the same scope in RequireJS. Fortunately JavaScript handles multiple `var`
declarations of the same variable in the same scope gracefully.
If you want to deliver amdefine.js with your code rather than specifying it as a dependency
with npm, then just download the latest release and refer to it using a relative path:
[Latest Version](https://github.com/jrburke/amdefine/raw/latest/amdefine.js)
### amdefine/intercept
Consider this very experimental.
Instead of pasting the piece of text for the amdefine setup of a `define`
variable in each module you create or consume, you can use `amdefine/intercept`
instead. It will automatically insert the above snippet in each .js file loaded
by Node.
**Warning**: you should only use this if you are creating an application that
is consuming AMD style defined()'d modules that are distributed via npm and want
to run that code in Node.
For library code where you are not sure if it will be used by others in Node or
in the browser, then explicitly depending on amdefine and placing the code
snippet above is suggested path, instead of using `amdefine/intercept`. The
intercept module affects all .js files loaded in the Node app, and it is
inconsiderate to modify global state like that unless you are also controlling
the top level app.
#### Why distribute AMD-style modules via npm?
npm has a lot of weaknesses for front-end use (installed layout is not great,
should have better support for the `baseUrl + moduleID + '.js' style of loading,
single file JS installs), but some people want a JS package manager and are
willing to live with those constraints. If that is you, but still want to author
in AMD style modules to get dynamic require([]), better direct source usage and
powerful loader plugin support in the browser, then this tool can help.
#### amdefine/intercept usage
Just require it in your top level app module (for example index.js, server.js):
```javascript
require('amdefine/intercept');
```
The module does not return a value, so no need to assign the result to a local
variable.
Then just require() code as you normally would with Node's require(). Any .js
loaded after the intercept require will have the amdefine check injected in
the .js source as it is loaded. It does not modify the source on disk, just
prepends some content to the text of the module as it is loaded by Node.
#### How amdefine/intercept works
It overrides the `Module._extensions['.js']` in Node to automatically prepend
the amdefine snippet above. So, it will affect any .js file loaded by your
app.
## define() usage
It is best if you use the anonymous forms of define() in your module:
```javascript
define(function (require) {
var dependency = require('dependency');
});
```
or
```javascript
define(['dependency'], function (dependency) {
});
```
## RequireJS optimizer integration. <a name="optimizer"></name>
Version 1.0.3 of the [RequireJS optimizer](http://requirejs.org/docs/optimization.html)
will have support for stripping the `if (typeof define !== 'function')` check
mentioned above, so you can include this snippet for code that runs in the
browser, but avoid taking the cost of the if() statement once the code is
optimized for deployment.
## Node 0.4 Support
If you want to support Node 0.4, then add `require` as the second parameter to amdefine:
```javascript
//Only if you want Node 0.4. If using 0.5 or later, use the above snippet.
if (typeof define !== 'function') { var define = require('amdefine')(module, require) }
```
## Limitations
### Synchronous vs Asynchronous
amdefine creates a define() function that is callable by your code. It will
execute and trace dependencies and call the factory function *synchronously*,
to keep the behavior in line with Node's synchronous dependency tracing.
The exception: calling AMD's callback-style require() from inside a factory
function. The require callback is called on process.nextTick():
```javascript
define(function (require) {
require(['a'], function(a) {
//'a' is loaded synchronously, but
//this callback is called on process.nextTick().
});
});
```
### Loader Plugins
Loader plugins are supported as long as they call their load() callbacks
synchronously. So ones that do network requests will not work. However plugins
like [text](http://requirejs.org/docs/api.html#text) can load text files locally.
The plugin API's `load.fromText()` is **not supported** in amdefine, so this means
transpiler plugins like the [CoffeeScript loader plugin](https://github.com/jrburke/require-cs)
will not work. This may be fixable, but it is a bit complex, and I do not have
enough node-fu to figure it out yet. See the source for amdefine.js if you want
to get an idea of the issues involved.
## Tests
To run the tests, cd to **tests** and run:
```
node all.js
node all-intercept.js
```
## License
New BSD and MIT. Check the LICENSE file for all the details.
/*jshint node: true */
var inserted,
Module = require('module'),
fs = require('fs'),
existingExtFn = Module._extensions['.js'],
amdefineRegExp = /amdefine\.js/;
inserted = "if (typeof define !== 'function') {var define = require('amdefine')(module)}";
//From the node/lib/module.js source:
function stripBOM(content) {
// Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
// because the buffer-to-string conversion in `fs.readFileSync()`
// translates it to FEFF, the UTF-16 BOM.
if (content.charCodeAt(0) === 0xFEFF) {
content = content.slice(1);
}
return content;
}
//Also adapted from the node/lib/module.js source:
function intercept(module, filename) {
var content = stripBOM(fs.readFileSync(filename, 'utf8'));
if (!amdefineRegExp.test(module.id)) {
content = inserted + content;
}
module._compile(content, filename);
}
intercept._id = 'amdefine/intercept';
if (!existingExtFn._id || existingExtFn._id !== intercept._id) {
Module._extensions['.js'] = intercept;
}
{
"_args": [
[
"amdefine@>=0.0.4",
"/home/theowl/projects/tmp/test_task/node_modules/source-map"
]
],
"_from": "amdefine@>=0.0.4",
"_id": "amdefine@1.0.0",
"_inCache": true,
"_installable": true,
"_location": "/amdefine",
"_nodeVersion": "0.10.36",
"_npmUser": {
"email": "jrburke@gmail.com",
"name": "jrburke"
},
"_npmVersion": "2.12.1",
"_phantomChildren": {},
"_requested": {
"name": "amdefine",
"raw": "amdefine@>=0.0.4",
"rawSpec": ">=0.0.4",
"scope": null,
"spec": ">=0.0.4",
"type": "range"
},
"_requiredBy": [
"/source-map"
],
"_resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.0.tgz",
"_shasum": "fd17474700cb5cc9c2b709f0be9d23ce3c198c33",
"_shrinkwrap": null,
"_spec": "amdefine@>=0.0.4",
"_where": "/home/theowl/projects/tmp/test_task/node_modules/source-map",
"author": {
"email": "jrburke@gmail.com",
"name": "James Burke",
"url": "http://github.com/jrburke"
},
"bugs": {
"url": "https://github.com/jrburke/amdefine/issues"
},
"dependencies": {},
"description": "Provide AMD's define() API for declaring modules in the AMD format",
"devDependencies": {},
"directories": {},
"dist": {
"shasum": "fd17474700cb5cc9c2b709f0be9d23ce3c198c33",
"tarball": "http://registry.npmjs.org/amdefine/-/amdefine-1.0.0.tgz"
},
"engines": {
"node": ">=0.4.2"
},
"gitHead": "578bc4a3f7dede33f3f3e10edde0c1607005d761",
"homepage": "http://github.com/jrburke/amdefine",
"license": "BSD-3-Clause AND MIT",
"main": "./amdefine.js",
"maintainers": [
{
"name": "jrburke",
"email": "jrburke@gmail.com"
}
],
"name": "amdefine",
"optionalDependencies": {},
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "git+https://github.com/jrburke/amdefine.git"
},
"scripts": {},
"version": "1.0.0"
}
'use strict';
module.exports = function () {
return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g;
};
The MIT License (MIT)
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
{
"_args": [
[
"ansi-regex@^2.0.0",
"/home/theowl/projects/tmp/test_task/node_modules/has-ansi"
]
],
"_from": "ansi-regex@>=2.0.0 <3.0.0",
"_id": "ansi-regex@2.0.0",
"_inCache": true,
"_installable": true,
"_location": "/ansi-regex",
"_nodeVersion": "0.12.5",
"_npmUser": {
"email": "sindresorhus@gmail.com",
"name": "sindresorhus"
},
"_npmVersion": "2.11.2",
"_phantomChildren": {},
"_requested": {
"name": "ansi-regex",
"raw": "ansi-regex@^2.0.0",
"rawSpec": "^2.0.0",
"scope": null,
"spec": ">=2.0.0 <3.0.0",
"type": "range"
},
"_requiredBy": [
"/has-ansi",
"/strip-ansi"
],
"_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz",
"_shasum": "c5061b6e0ef8a81775e50f5d66151bf6bf371107",
"_shrinkwrap": null,
"_spec": "ansi-regex@^2.0.0",
"_where": "/home/theowl/projects/tmp/test_task/node_modules/has-ansi",
"author": {
"email": "sindresorhus@gmail.com",
"name": "Sindre Sorhus",
"url": "sindresorhus.com"
},
"bugs": {
"url": "https://github.com/sindresorhus/ansi-regex/issues"
},
"dependencies": {},
"description": "Regular expression for matching ANSI escape codes",
"devDependencies": {
"mocha": "*"
},
"directories": {},
"dist": {
"shasum": "c5061b6e0ef8a81775e50f5d66151bf6bf371107",
"tarball": "http://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz"
},
"engines": {
"node": ">=0.10.0"
},
"files": [
"index.js"
],
"gitHead": "57c3f2941a73079fa8b081e02a522e3d29913e2f",
"homepage": "https://github.com/sindresorhus/ansi-regex",
"keywords": [
"256",
"ansi",
"cli",
"color",
"colors",
"colour",
"command-line",
"console",
"escape",
"find",
"formatting",
"match",
"pattern",
"re",
"regex",
"regexp",
"rgb",
"shell",
"string",
"styles",
"terminal",
"test",
"text",
"tty",
"xterm"
],
"license": "MIT",
"maintainers": [
{
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
},
{
"name": "jbnicolai",
"email": "jappelman@xebia.com"
}
],
"name": "ansi-regex",
"optionalDependencies": {},
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "git+https://github.com/sindresorhus/ansi-regex.git"
},
"scripts": {
"test": "mocha test/test.js",
"view-supported": "node test/viewCodes.js"
},
"version": "2.0.0"
}
# ansi-regex [![Build Status](https://travis-ci.org/sindresorhus/ansi-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/ansi-regex)
> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
## Install
```
$ npm install --save ansi-regex
```
## Usage
```js
var ansiRegex = require('ansi-regex');
ansiRegex().test('\u001b[4mcake\u001b[0m');
//=> true
ansiRegex().test('cake');
//=> false
'\u001b[4mcake\u001b[0m'.match(ansiRegex());
//=> ['\u001b[4m', '\u001b[0m']
```
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)
'use strict';
function assembleStyles () {
var styles = {
modifiers: {
reset: [0, 0],
bold: [1, 22], // 21 isn't widely supported and 22 does the same thing
dim: [2, 22],
italic: [3, 23],
underline: [4, 24],
inverse: [7, 27],
hidden: [8, 28],
strikethrough: [9, 29]
},
colors: {
black: [30, 39],
red: [31, 39],
green: [32, 39],
yellow: [33, 39],
blue: [34, 39],
magenta: [35, 39],
cyan: [36, 39],
white: [37, 39],
gray: [90, 39]
},
bgColors: {
bgBlack: [40, 49],
bgRed: [41, 49],
bgGreen: [42, 49],
bgYellow: [43, 49],
bgBlue: [44, 49],
bgMagenta: [45, 49],
bgCyan: [46, 49],
bgWhite: [47, 49]
}
};
// fix humans
styles.colors.grey = styles.colors.gray;
Object.keys(styles).forEach(function (groupName) {
var group = styles[groupName];
Object.keys(group).forEach(function (styleName) {
var style = group[styleName];
styles[styleName] = group[styleName] = {
open: '\u001b[' + style[0] + 'm',
close: '\u001b[' + style[1] + 'm'
};
});
Object.defineProperty(styles, groupName, {
value: group,
enumerable: false
});
});
return styles;
}
Object.defineProperty(module, 'exports', {
enumerable: true,
get: assembleStyles
});
The MIT License (MIT)
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
{
"_args": [
[
"ansi-styles@^2.1.0",
"/home/theowl/projects/tmp/test_task/node_modules/chalk"
]
],
"_from": "ansi-styles@>=2.1.0 <3.0.0",
"_id": "ansi-styles@2.1.0",
"_inCache": true,
"_installable": true,
"_location": "/ansi-styles",
"_nodeVersion": "0.12.4",
"_npmUser": {
"email": "jappelman@xebia.com",
"name": "jbnicolai"
},
"_npmVersion": "2.10.1",
"_phantomChildren": {},
"_requested": {
"name": "ansi-styles",
"raw": "ansi-styles@^2.1.0",
"rawSpec": "^2.1.0",
"scope": null,
"spec": ">=2.1.0 <3.0.0",
"type": "range"
},
"_requiredBy": [
"/chalk"
],
"_resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.1.0.tgz",
"_shasum": "990f747146927b559a932bf92959163d60c0d0e2",
"_shrinkwrap": null,
"_spec": "ansi-styles@^2.1.0",
"_where": "/home/theowl/projects/tmp/test_task/node_modules/chalk",
"author": {
"email": "sindresorhus@gmail.com",
"name": "Sindre Sorhus",
"url": "sindresorhus.com"
},
"bugs": {
"url": "https://github.com/chalk/ansi-styles/issues"
},
"dependencies": {},
"description": "ANSI escape codes for styling strings in the terminal",
"devDependencies": {
"mocha": "*"
},
"directories": {},
"dist": {
"shasum": "990f747146927b559a932bf92959163d60c0d0e2",
"tarball": "http://registry.npmjs.org/ansi-styles/-/ansi-styles-2.1.0.tgz"
},
"engines": {
"node": ">=0.10.0"
},
"files": [
"index.js"
],
"gitHead": "18421cbe4a2d93359ec2599a894f704be126d066",
"homepage": "https://github.com/chalk/ansi-styles",
"keywords": [
"256",
"ansi",
"cli",
"color",
"colors",
"colour",
"command-line",
"console",
"escape",
"formatting",
"log",
"logging",
"rgb",
"shell",
"string",
"styles",
"terminal",
"text",
"tty",
"xterm"
],
"license": "MIT",
"maintainers": [
{
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
},
{
"name": "jbnicolai",
"email": "jappelman@xebia.com"
}
],
"name": "ansi-styles",
"optionalDependencies": {},
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "git+https://github.com/chalk/ansi-styles.git"
},
"scripts": {
"test": "mocha"
},
"version": "2.1.0"
}
# ansi-styles [![Build Status](https://travis-ci.org/chalk/ansi-styles.svg?branch=master)](https://travis-ci.org/chalk/ansi-styles)
> [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal
You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings.
![](screenshot.png)
## Install
```
$ npm install --save ansi-styles
```
## Usage
```js
var ansi = require('ansi-styles');
console.log(ansi.green.open + 'Hello world!' + ansi.green.close);
```
## API
Each style has an `open` and `close` property.
## Styles
### Modifiers
- `reset`
- `bold`
- `dim`
- `italic` *(not widely supported)*
- `underline`
- `inverse`
- `hidden`
- `strikethrough` *(not widely supported)*
### Colors
- `black`
- `red`
- `green`
- `yellow`
- `blue`
- `magenta`
- `cyan`
- `white`
- `gray`
### Background colors
- `bgBlack`
- `bgRed`
- `bgGreen`
- `bgYellow`
- `bgBlue`
- `bgMagenta`
- `bgCyan`
- `bgWhite`
## Advanced usage
By default you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module.
- `ansi.modifiers`
- `ansi.colors`
- `ansi.bgColors`
###### Example
```js
console.log(ansi.colors.green.open);
```
## License
MIT © [Sindre Sorhus](http://sindresorhus.com)
The MIT License (MIT)
Copyright (c) 2014 Jonathan Ong me@jongleberry.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
# Array Equal
Check if two arrays are equal:
```js
var equals = require('array-equal')
assert(equals([1, 2, 3], [1, 2, 3])) // => true
assert(equals([1, 2, 3], [1, 2, 3, 4])) // => false
```
{
"name": "array-equal",
"description": "check if two arrays are equal",
"version": "1.0.0",
"license": "MIT",
"repository": "component/array-equal",
"scripts": [
"index.js"
]
}
module.exports = function equal(arr1, arr2) {
var length = arr1.length
if (length !== arr2.length) return false
for (var i = 0; i < length; i++)
if (arr1[i] !== arr2[i])
return false
return true
}
{
"_args": [
[
"array-equal@^1.0.0",
"/home/theowl/projects/tmp/test_task/node_modules/jsdom"
]
],
"_from": "array-equal@>=1.0.0 <2.0.0",
"_id": "array-equal@1.0.0",
"_inCache": true,
"_installable": true,
"_location": "/array-equal",
"_npmUser": {
"email": "jonathanrichardong@gmail.com",
"name": "jongleberry"
},
"_npmVersion": "1.4.6",
"_phantomChildren": {},
"_requested": {
"name": "array-equal",
"raw": "array-equal@^1.0.0",
"rawSpec": "^1.0.0",
"scope": null,
"spec": ">=1.0.0 <2.0.0",
"type": "range"
},
"_requiredBy": [
"/jsdom"
],
"_resolved": "https://registry.npmjs.org/array-equal/-/array-equal-1.0.0.tgz",
"_shasum": "8c2a5ef2472fd9ea742b04c77a75093ba2757c93",
"_shrinkwrap": null,
"_spec": "array-equal@^1.0.0",
"_where": "/home/theowl/projects/tmp/test_task/node_modules/jsdom",
"author": {
"email": "me@jongleberry.com",
"name": "Jonathan Ong",
"url": "http://jongleberry.com"
},
"bugs": {
"url": "https://github.com/component/array-equal/issues"
},
"dependencies": {},
"description": "check if two arrays are equal",
"devDependencies": {},
"directories": {},
"dist": {
"shasum": "8c2a5ef2472fd9ea742b04c77a75093ba2757c93",
"tarball": "http://registry.npmjs.org/array-equal/-/array-equal-1.0.0.tgz"
},
"homepage": "https://github.com/component/array-equal",
"license": "MIT",
"maintainers": [
{
"name": "jongleberry",
"email": "jonathanrichardong@gmail.com"
}
],
"name": "array-equal",
"optionalDependencies": {},
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "git://github.com/component/array-equal.git"
},
"version": "1.0.0"
}
language: node_js
node_js:
- 0.8
- 0.10
Copyright (c) 2011 Mark Cavage, All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE
node-asn1 is a library for encoding and decoding ASN.1 datatypes in pure JS.
Currently BER encoding is supported; at some point I'll likely have to do DER.
## Usage
Mostly, if you're *actually* needing to read and write ASN.1, you probably don't
need this readme to explain what and why. If you have no idea what ASN.1 is,
see this: ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc
The source is pretty much self-explanatory, and has read/write methods for the
common types out there.
### Decoding
The following reads an ASN.1 sequence with a boolean.
var Ber = require('asn1').Ber;
var reader = new Ber.Reader(new Buffer([0x30, 0x03, 0x01, 0x01, 0xff]));
reader.readSequence();
console.log('Sequence len: ' + reader.length);
if (reader.peek() === Ber.Boolean)
console.log(reader.readBoolean());
### Encoding
The following generates the same payload as above.
var Ber = require('asn1').Ber;
var writer = new Ber.Writer();
writer.startSequence();
writer.writeBoolean(true);
writer.endSequence();
console.log(writer.buffer);
## Installation
npm install asn1
## License
MIT.
## Bugs
See <https://github.com/mcavage/node-asn1/issues>.
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
module.exports = {
newInvalidAsn1Error: function(msg) {
var e = new Error();
e.name = 'InvalidAsn1Error';
e.message = msg || '';
return e;
}
};
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
var errors = require('./errors');
var types = require('./types');
var Reader = require('./reader');
var Writer = require('./writer');
///--- Exports
module.exports = {
Reader: Reader,
Writer: Writer
};
for (var t in types) {
if (types.hasOwnProperty(t))
module.exports[t] = types[t];
}
for (var e in errors) {
if (errors.hasOwnProperty(e))
module.exports[e] = errors[e];
}
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
var assert = require('assert');
var ASN1 = require('./types');
var errors = require('./errors');
///--- Globals
var newInvalidAsn1Error = errors.newInvalidAsn1Error;
///--- API
function Reader(data) {
if (!data || !Buffer.isBuffer(data))
throw new TypeError('data must be a node Buffer');
this._buf = data;
this._size = data.length;
// These hold the "current" state
this._len = 0;
this._offset = 0;
}
Object.defineProperty(Reader.prototype, 'length', {
enumerable: true,
get: function () { return (this._len); }
});
Object.defineProperty(Reader.prototype, 'offset', {
enumerable: true,
get: function () { return (this._offset); }
});
Object.defineProperty(Reader.prototype, 'remain', {
get: function () { return (this._size - this._offset); }
});
Object.defineProperty(Reader.prototype, 'buffer', {
get: function () { return (this._buf.slice(this._offset)); }
});
/**
* Reads a single byte and advances offset; you can pass in `true` to make this
* a "peek" operation (i.e., get the byte, but don't advance the offset).
*
* @param {Boolean} peek true means don't move offset.
* @return {Number} the next byte, null if not enough data.
*/
Reader.prototype.readByte = function(peek) {
if (this._size - this._offset < 1)
return null;
var b = this._buf[this._offset] & 0xff;
if (!peek)
this._offset += 1;
return b;
};
Reader.prototype.peek = function() {
return this.readByte(true);
};
/**
* Reads a (potentially) variable length off the BER buffer. This call is
* not really meant to be called directly, as callers have to manipulate
* the internal buffer afterwards.
*
* As a result of this call, you can call `Reader.length`, until the
* next thing called that does a readLength.
*
* @return {Number} the amount of offset to advance the buffer.
* @throws {InvalidAsn1Error} on bad ASN.1
*/
Reader.prototype.readLength = function(offset) {
if (offset === undefined)
offset = this._offset;
if (offset >= this._size)
return null;
var lenB = this._buf[offset++] & 0xff;
if (lenB === null)
return null;
if ((lenB & 0x80) == 0x80) {
lenB &= 0x7f;
if (lenB == 0)
throw newInvalidAsn1Error('Indefinite length not supported');
if (lenB > 4)
throw newInvalidAsn1Error('encoding too long');
if (this._size - offset < lenB)
return null;
this._len = 0;
for (var i = 0; i < lenB; i++)
this._len = (this._len << 8) + (this._buf[offset++] & 0xff);
} else {
// Wasn't a variable length
this._len = lenB;
}
return offset;
};
/**
* Parses the next sequence in this BER buffer.
*
* To get the length of the sequence, call `Reader.length`.
*
* @return {Number} the sequence's tag.
*/
Reader.prototype.readSequence = function(tag) {
var seq = this.peek();
if (seq === null)
return null;
if (tag !== undefined && tag !== seq)
throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) +
': got 0x' + seq.toString(16));
var o = this.readLength(this._offset + 1); // stored in `length`
if (o === null)
return null;
this._offset = o;
return seq;
};
Reader.prototype.readInt = function() {
return this._readTag(ASN1.Integer);
};
Reader.prototype.readBoolean = function() {
return (this._readTag(ASN1.Boolean) === 0 ? false : true);
};
Reader.prototype.readEnumeration = function() {
return this._readTag(ASN1.Enumeration);
};
Reader.prototype.readString = function(tag, retbuf) {
if (!tag)
tag = ASN1.OctetString;
var b = this.peek();
if (b === null)
return null;
if (b !== tag)
throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) +
': got 0x' + b.toString(16));
var o = this.readLength(this._offset + 1); // stored in `length`
if (o === null)
return null;
if (this.length > this._size - o)
return null;
this._offset = o;
if (this.length === 0)
return retbuf ? new Buffer(0) : '';
var str = this._buf.slice(this._offset, this._offset + this.length);
this._offset += this.length;
return retbuf ? str : str.toString('utf8');
};
Reader.prototype.readOID = function(tag) {
if (!tag)
tag = ASN1.OID;
var b = this.readString(tag, true);
if (b === null)
return null;
var values = [];
var value = 0;
for (var i = 0; i < b.length; i++) {
var byte = b[i] & 0xff;
value <<= 7;
value += byte & 0x7f;
if ((byte & 0x80) == 0) {
values.push(value);
value = 0;
}
}
value = values.shift();
values.unshift(value % 40);
values.unshift((value / 40) >> 0);
return values.join('.');
};
Reader.prototype._readTag = function(tag) {
assert.ok(tag !== undefined);
var b = this.peek();
if (b === null)
return null;
if (b !== tag)
throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) +
': got 0x' + b.toString(16));
var o = this.readLength(this._offset + 1); // stored in `length`
if (o === null)
return null;
if (this.length > 4)
throw newInvalidAsn1Error('Integer too long: ' + this.length);
if (this.length > this._size - o)
return null;
this._offset = o;
var fb = this._buf[this._offset];
var value = 0;
for (var i = 0; i < this.length; i++) {
value <<= 8;
value |= (this._buf[this._offset++] & 0xff);
}
if ((fb & 0x80) == 0x80 && i !== 4)
value -= (1 << (i * 8));
return value >> 0;
};
///--- Exported API
module.exports = Reader;
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
module.exports = {
EOC: 0,
Boolean: 1,
Integer: 2,
BitString: 3,
OctetString: 4,
Null: 5,
OID: 6,
ObjectDescriptor: 7,
External: 8,
Real: 9, // float
Enumeration: 10,
PDV: 11,
Utf8String: 12,
RelativeOID: 13,
Sequence: 16,
Set: 17,
NumericString: 18,
PrintableString: 19,
T61String: 20,
VideotexString: 21,
IA5String: 22,
UTCTime: 23,
GeneralizedTime: 24,
GraphicString: 25,
VisibleString: 26,
GeneralString: 28,
UniversalString: 29,
CharacterString: 30,
BMPString: 31,
Constructor: 32,
Context: 128
};
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
var assert = require('assert');
var ASN1 = require('./types');
var errors = require('./errors');
///--- Globals
var newInvalidAsn1Error = errors.newInvalidAsn1Error;
var DEFAULT_OPTS = {
size: 1024,
growthFactor: 8
};
///--- Helpers
function merge(from, to) {
assert.ok(from);
assert.equal(typeof(from), 'object');
assert.ok(to);
assert.equal(typeof(to), 'object');
var keys = Object.getOwnPropertyNames(from);
keys.forEach(function(key) {
if (to[key])
return;
var value = Object.getOwnPropertyDescriptor(from, key);
Object.defineProperty(to, key, value);
});
return to;
}
///--- API
function Writer(options) {
options = merge(DEFAULT_OPTS, options || {});
this._buf = new Buffer(options.size || 1024);
this._size = this._buf.length;
this._offset = 0;
this._options = options;
// A list of offsets in the buffer where we need to insert
// sequence tag/len pairs.
this._seq = [];
}
Object.defineProperty(Writer.prototype, 'buffer', {
get: function () {
if (this._seq.length)
throw new InvalidAsn1Error(this._seq.length + ' unended sequence(s)');
return (this._buf.slice(0, this._offset));
}
});
Writer.prototype.writeByte = function(b) {
if (typeof(b) !== 'number')
throw new TypeError('argument must be a Number');
this._ensure(1);
this._buf[this._offset++] = b;
};
Writer.prototype.writeInt = function(i, tag) {
if (typeof(i) !== 'number')
throw new TypeError('argument must be a Number');
if (typeof(tag) !== 'number')
tag = ASN1.Integer;
var sz = 4;
while ((((i & 0xff800000) === 0) || ((i & 0xff800000) === 0xff800000 >> 0)) &&
(sz > 1)) {
sz--;
i <<= 8;
}
if (sz > 4)
throw new InvalidAsn1Error('BER ints cannot be > 0xffffffff');
this._ensure(2 + sz);
this._buf[this._offset++] = tag;
this._buf[this._offset++] = sz;
while (sz-- > 0) {
this._buf[this._offset++] = ((i & 0xff000000) >>> 24);
i <<= 8;
}
};
Writer.prototype.writeNull = function() {
this.writeByte(ASN1.Null);
this.writeByte(0x00);
};
Writer.prototype.writeEnumeration = function(i, tag) {
if (typeof(i) !== 'number')
throw new TypeError('argument must be a Number');
if (typeof(tag) !== 'number')
tag = ASN1.Enumeration;
return this.writeInt(i, tag);
};
Writer.prototype.writeBoolean = function(b, tag) {
if (typeof(b) !== 'boolean')
throw new TypeError('argument must be a Boolean');
if (typeof(tag) !== 'number')
tag = ASN1.Boolean;
this._ensure(3);
this._buf[this._offset++] = tag;
this._buf[this._offset++] = 0x01;
this._buf[this._offset++] = b ? 0xff : 0x00;
};
Writer.prototype.writeString = function(s, tag) {
if (typeof(s) !== 'string')
throw new TypeError('argument must be a string (was: ' + typeof(s) + ')');
if (typeof(tag) !== 'number')
tag = ASN1.OctetString;
var len = Buffer.byteLength(s);
this.writeByte(tag);
this.writeLength(len);
if (len) {
this._ensure(len);
this._buf.write(s, this._offset);
this._offset += len;
}
};
Writer.prototype.writeBuffer = function(buf, tag) {
if (typeof(tag) !== 'number')
throw new TypeError('tag must be a number');
if (!Buffer.isBuffer(buf))
throw new TypeError('argument must be a buffer');
this.writeByte(tag);
this.writeLength(buf.length);
this._ensure(buf.length);
buf.copy(this._buf, this._offset, 0, buf.length);
this._offset += buf.length;
};
Writer.prototype.writeStringArray = function(strings) {
if ((!strings instanceof Array))
throw new TypeError('argument must be an Array[String]');
var self = this;
strings.forEach(function(s) {
self.writeString(s);
});
};
// This is really to solve DER cases, but whatever for now
Writer.prototype.writeOID = function(s, tag) {
if (typeof(s) !== 'string')
throw new TypeError('argument must be a string');
if (typeof(tag) !== 'number')
tag = ASN1.OID;
if (!/^([0-9]+\.){3,}[0-9]+$/.test(s))
throw new Error('argument is not a valid OID string');
function encodeOctet(bytes, octet) {
if (octet < 128) {
bytes.push(octet);
} else if (octet < 16384) {
bytes.push((octet >>> 7) | 0x80);
bytes.push(octet & 0x7F);
} else if (octet < 2097152) {
bytes.push((octet >>> 14) | 0x80);
bytes.push(((octet >>> 7) | 0x80) & 0xFF);
bytes.push(octet & 0x7F);
} else if (octet < 268435456) {
bytes.push((octet >>> 21) | 0x80);
bytes.push(((octet >>> 14) | 0x80) & 0xFF);
bytes.push(((octet >>> 7) | 0x80) & 0xFF);
bytes.push(octet & 0x7F);
} else {
bytes.push(((octet >>> 28) | 0x80) & 0xFF);
bytes.push(((octet >>> 21) | 0x80) & 0xFF);
bytes.push(((octet >>> 14) | 0x80) & 0xFF);
bytes.push(((octet >>> 7) | 0x80) & 0xFF);
bytes.push(octet & 0x7F);
}
}
var tmp = s.split('.');
var bytes = [];
bytes.push(parseInt(tmp[0], 10) * 40 + parseInt(tmp[1], 10));
tmp.slice(2).forEach(function(b) {
encodeOctet(bytes, parseInt(b, 10));
});
var self = this;
this._ensure(2 + bytes.length);
this.writeByte(tag);
this.writeLength(bytes.length);
bytes.forEach(function(b) {
self.writeByte(b);
});
};
Writer.prototype.writeLength = function(len) {
if (typeof(len) !== 'number')
throw new TypeError('argument must be a Number');
this._ensure(4);
if (len <= 0x7f) {
this._buf[this._offset++] = len;
} else if (len <= 0xff) {
this._buf[this._offset++] = 0x81;
this._buf[this._offset++] = len;
} else if (len <= 0xffff) {
this._buf[this._offset++] = 0x82;
this._buf[this._offset++] = len >> 8;
this._buf[this._offset++] = len;
} else if (len <= 0xffffff) {
this._buf[this._offset++] = 0x83;
this._buf[this._offset++] = len >> 16;
this._buf[this._offset++] = len >> 8;
this._buf[this._offset++] = len;
} else {
throw new InvalidAsn1ERror('Length too long (> 4 bytes)');
}
};
Writer.prototype.startSequence = function(tag) {
if (typeof(tag) !== 'number')
tag = ASN1.Sequence | ASN1.Constructor;
this.writeByte(tag);
this._seq.push(this._offset);
this._ensure(3);
this._offset += 3;
};
Writer.prototype.endSequence = function() {
var seq = this._seq.pop();
var start = seq + 3;
var len = this._offset - start;
if (len <= 0x7f) {
this._shift(start, len, -2);
this._buf[seq] = len;
} else if (len <= 0xff) {
this._shift(start, len, -1);
this._buf[seq] = 0x81;
this._buf[seq + 1] = len;
} else if (len <= 0xffff) {
this._buf[seq] = 0x82;
this._buf[seq + 1] = len >> 8;
this._buf[seq + 2] = len;
} else if (len <= 0xffffff) {
this._shift(start, len, 1);
this._buf[seq] = 0x83;
this._buf[seq + 1] = len >> 16;
this._buf[seq + 2] = len >> 8;
this._buf[seq + 3] = len;
} else {
throw new InvalidAsn1Error('Sequence too long');
}
};
Writer.prototype._shift = function(start, len, shift) {
assert.ok(start !== undefined);
assert.ok(len !== undefined);
assert.ok(shift);
this._buf.copy(this._buf, start + shift, start, start + len);
this._offset += shift;
};
Writer.prototype._ensure = function(len) {
assert.ok(len);
if (this._size - this._offset < len) {
var sz = this._size * this._options.growthFactor;
if (sz - this._offset < len)
sz += len;
var buf = new Buffer(sz);
this._buf.copy(buf, 0, 0, this._offset);
this._buf = buf;
this._size = sz;
}
};
///--- Exported API
module.exports = Writer;
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
// If you have no idea what ASN.1 or BER is, see this:
// ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc
var Ber = require('./ber/index');
///--- Exported API
module.exports = {
Ber: Ber,
BerReader: Ber.Reader,
BerWriter: Ber.Writer
};
{
"_args": [
[
"asn1@>=0.2.3 <0.3.0",
"/home/theowl/projects/tmp/test_task/node_modules/sshpk"
]
],
"_from": "asn1@>=0.2.3 <0.3.0",
"_id": "asn1@0.2.3",
"_inCache": true,
"_installable": true,
"_location": "/asn1",
"_npmUser": {
"email": "patrick.f.mooney@gmail.com",
"name": "pfmooney"
},
"_npmVersion": "1.4.28",
"_phantomChildren": {},
"_requested": {
"name": "asn1",
"raw": "asn1@>=0.2.3 <0.3.0",
"rawSpec": ">=0.2.3 <0.3.0",
"scope": null,
"spec": ">=0.2.3 <0.3.0",
"type": "range"
},
"_requiredBy": [
"/sshpk"
],
"_resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz",
"_shasum": "dac8787713c9966849fc8180777ebe9c1ddf3b86",
"_shrinkwrap": null,
"_spec": "asn1@>=0.2.3 <0.3.0",
"_where": "/home/theowl/projects/tmp/test_task/node_modules/sshpk",
"author": {
"email": "mcavage@gmail.com",
"name": "Mark Cavage"
},
"bugs": {
"url": "https://github.com/mcavage/node-asn1/issues"
},
"contributors": [
{
"name": "David Gwynne",
"email": "loki@animata.net"
},
{
"name": "Yunong Xiao",
"email": "yunong@joyent.com"
},
{
"name": "Alex Wilson",
"email": "alex.wilson@joyent.com"
}
],
"dependencies": {},
"description": "Contains parsers and serializers for ASN.1 (currently BER only)",
"devDependencies": {
"tap": "0.4.8"
},
"directories": {},
"dist": {
"shasum": "dac8787713c9966849fc8180777ebe9c1ddf3b86",
"tarball": "http://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz"
},
"homepage": "https://github.com/mcavage/node-asn1",
"license": "MIT",
"main": "lib/index.js",
"maintainers": [
{
"name": "mcavage",
"email": "mcavage@gmail.com"
},
{
"name": "pfmooney",
"email": "patrick.f.mooney@gmail.com"
}
],
"name": "asn1",
"optionalDependencies": {},
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "git://github.com/mcavage/node-asn1.git"
},
"scripts": {
"test": "tap ./tst"
},
"version": "0.2.3"
}
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
var test = require('tap').test;
///--- Globals
var BerReader;
///--- Tests
test('load library', function(t) {
BerReader = require('../../lib/index').BerReader;
t.ok(BerReader);
try {
new BerReader();
t.fail('Should have thrown');
} catch (e) {
t.ok(e instanceof TypeError, 'Should have been a type error');
}
t.end();
});
test('read byte', function(t) {
var reader = new BerReader(new Buffer([0xde]));
t.ok(reader);
t.equal(reader.readByte(), 0xde, 'wrong value');
t.end();
});
test('read 1 byte int', function(t) {
var reader = new BerReader(new Buffer([0x02, 0x01, 0x03]));
t.ok(reader);
t.equal(reader.readInt(), 0x03, 'wrong value');
t.equal(reader.length, 0x01, 'wrong length');
t.end();
});
test('read 2 byte int', function(t) {
var reader = new BerReader(new Buffer([0x02, 0x02, 0x7e, 0xde]));
t.ok(reader);
t.equal(reader.readInt(), 0x7ede, 'wrong value');
t.equal(reader.length, 0x02, 'wrong length');
t.end();
});
test('read 3 byte int', function(t) {
var reader = new BerReader(new Buffer([0x02, 0x03, 0x7e, 0xde, 0x03]));
t.ok(reader);
t.equal(reader.readInt(), 0x7ede03, 'wrong value');
t.equal(reader.length, 0x03, 'wrong length');
t.end();
});
test('read 4 byte int', function(t) {
var reader = new BerReader(new Buffer([0x02, 0x04, 0x7e, 0xde, 0x03, 0x01]));
t.ok(reader);
t.equal(reader.readInt(), 0x7ede0301, 'wrong value');
t.equal(reader.length, 0x04, 'wrong length');
t.end();
});
test('read 1 byte negative int', function(t) {
var reader = new BerReader(new Buffer([0x02, 0x01, 0xdc]));
t.ok(reader);
t.equal(reader.readInt(), -36, 'wrong value');
t.equal(reader.length, 0x01, 'wrong length');
t.end();
});
test('read 2 byte negative int', function(t) {
var reader = new BerReader(new Buffer([0x02, 0x02, 0xc0, 0x4e]));
t.ok(reader);
t.equal(reader.readInt(), -16306, 'wrong value');
t.equal(reader.length, 0x02, 'wrong length');
t.end();
});
test('read 3 byte negative int', function(t) {
var reader = new BerReader(new Buffer([0x02, 0x03, 0xff, 0x00, 0x19]));
t.ok(reader);
t.equal(reader.readInt(), -65511, 'wrong value');
t.equal(reader.length, 0x03, 'wrong length');
t.end();
});
test('read 4 byte negative int', function(t) {
var reader = new BerReader(new Buffer([0x02, 0x04, 0x91, 0x7c, 0x22, 0x1f]));
t.ok(reader);
t.equal(reader.readInt(), -1854135777, 'wrong value');
t.equal(reader.length, 0x04, 'wrong length');
t.end();
});
test('read boolean true', function(t) {
var reader = new BerReader(new Buffer([0x01, 0x01, 0xff]));
t.ok(reader);
t.equal(reader.readBoolean(), true, 'wrong value');
t.equal(reader.length, 0x01, 'wrong length');
t.end();
});
test('read boolean false', function(t) {
var reader = new BerReader(new Buffer([0x01, 0x01, 0x00]));
t.ok(reader);
t.equal(reader.readBoolean(), false, 'wrong value');
t.equal(reader.length, 0x01, 'wrong length');
t.end();
});
test('read enumeration', function(t) {
var reader = new BerReader(new Buffer([0x0a, 0x01, 0x20]));
t.ok(reader);
t.equal(reader.readEnumeration(), 0x20, 'wrong value');
t.equal(reader.length, 0x01, 'wrong length');
t.end();
});
test('read string', function(t) {
var dn = 'cn=foo,ou=unit,o=test';
var buf = new Buffer(dn.length + 2);
buf[0] = 0x04;
buf[1] = Buffer.byteLength(dn);
buf.write(dn, 2);
var reader = new BerReader(buf);
t.ok(reader);
t.equal(reader.readString(), dn, 'wrong value');
t.equal(reader.length, dn.length, 'wrong length');
t.end();
});
test('read sequence', function(t) {
var reader = new BerReader(new Buffer([0x30, 0x03, 0x01, 0x01, 0xff]));
t.ok(reader);
t.equal(reader.readSequence(), 0x30, 'wrong value');
t.equal(reader.length, 0x03, 'wrong length');
t.equal(reader.readBoolean(), true, 'wrong value');
t.equal(reader.length, 0x01, 'wrong length');
t.end();
});
test('anonymous LDAPv3 bind', function(t) {
var BIND = new Buffer(14);
BIND[0] = 0x30; // Sequence
BIND[1] = 12; // len
BIND[2] = 0x02; // ASN.1 Integer
BIND[3] = 1; // len
BIND[4] = 0x04; // msgid (make up 4)
BIND[5] = 0x60; // Bind Request
BIND[6] = 7; // len
BIND[7] = 0x02; // ASN.1 Integer
BIND[8] = 1; // len
BIND[9] = 0x03; // v3
BIND[10] = 0x04; // String (bind dn)
BIND[11] = 0; // len
BIND[12] = 0x80; // ContextSpecific (choice)
BIND[13] = 0; // simple bind
// Start testing ^^
var ber = new BerReader(BIND);
t.equal(ber.readSequence(), 48, 'Not an ASN.1 Sequence');
t.equal(ber.length, 12, 'Message length should be 12');
t.equal(ber.readInt(), 4, 'Message id should have been 4');
t.equal(ber.readSequence(), 96, 'Bind Request should have been 96');
t.equal(ber.length, 7, 'Bind length should have been 7');
t.equal(ber.readInt(), 3, 'LDAP version should have been 3');
t.equal(ber.readString(), '', 'Bind DN should have been empty');
t.equal(ber.length, 0, 'string length should have been 0');
t.equal(ber.readByte(), 0x80, 'Should have been ContextSpecific (choice)');
t.equal(ber.readByte(), 0, 'Should have been simple bind');
t.equal(null, ber.readByte(), 'Should be out of data');
t.end();
});
test('long string', function(t) {
var buf = new Buffer(256);
var o;
var s =
'2;649;CN=Red Hat CS 71GA Demo,O=Red Hat CS 71GA Demo,C=US;' +
'CN=RHCS Agent - admin01,UID=admin01,O=redhat,C=US [1] This is ' +
'Teena Vradmin\'s description.';
buf[0] = 0x04;
buf[1] = 0x81;
buf[2] = 0x94;
buf.write(s, 3);
var ber = new BerReader(buf.slice(0, 3 + s.length));
t.equal(ber.readString(), s);
t.end();
});
This source diff could not be displayed because it is too large. You can view the blob instead.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment