Update code format and style

This is related to my last commit. I've updated all the JavaScript files to satisfy 'eslint-config-futagozaryuu', my eslint configuration.

I'm sure I've probally missed something, but I've run all NPM scripts and Gulp tasks, fixed any bugs that cropped up, and updated some stuff (mainly related to generated messages), so as far as I can, tell this conversion is over (I know I've probally jixed it just by saying this ;P).
master
Futago-za Ryuu 7 years ago
parent 3c6523ff83
commit e6d018a88d

@ -3,6 +3,11 @@
module.exports = {
"extends": "futagozaryuu/node-v4",
"root": true
"root": true,
"rules": {
"prefer-rest-params": 0,
},
};

@ -1,15 +1,15 @@
"use strict";
let fs = require("fs");
let path = require("path");
let peg = require("../");
const fs = require( "fs" );
const path = require( "path" );
const peg = require( "../" );
// Options
let inputFile = null;
let outputFile = null;
let options = {
const options = {
"--": [],
"cache": false,
"dependencies": {},
@ -29,49 +29,68 @@ const OPTIMIZATION_GOALS = ["size", "speed"];
// Helpers
function abort( message ) {
console.error( message );
process.exit( 1 );
}
function addExtraOptions( json ) {
let extraOptions;
try {
extraOptions = JSON.parse( json );
} catch ( e ) {
if (!(e instanceof SyntaxError)) { throw e; }
if ( ! ( e instanceof SyntaxError ) ) throw e;
abort( "Error parsing JSON: " + e.message );
}
if ( typeof extraOptions !== "object" ) {
abort( "The JSON with extra options has to represent an object." );
}
Object
.keys( extraOptions )
.forEach( key => {
options[ key ] = extraOptions[ key ];
} );
}
function formatChoicesList( list ) {
list = list.map( entry => `"${ entry }"` );
let lastOption = list.pop();
const lastOption = list.pop();
return list.length === 0
? lastOption
: list.join( ", " ) + " or " + lastOption;
}
function updateList( list, string ) {
string
.split( "," )
.forEach( entry => {
entry = entry.trim();
if ( list.indexOf( entry ) === -1 ) {
list.push( entry );
}
} );
}
// Arguments
@ -79,23 +98,29 @@ function updateList(list, string) {
let args = process.argv.slice( 2 );
function nextArg( option ) {
if ( args.length === 0 ) {
abort( `Missing parameter of the ${ option } option.` );
}
}
return args.shift();
}
// Parse Arguments
while ( args.length > 0 ) {
let json, mod;
let argument = args.shift();
if ( argument.indexOf( "-" ) === 0 && argument.indexOf( "=" ) > 1 ) {
argument = argument.split( "=" );
args.unshift( argument.length > 2 ? argument.slice( 1 ) : argument[ 1 ] );
argument = argument[ 0 ];
}
switch ( argument ) {
@ -107,9 +132,7 @@ while (args.length > 0) {
case "-a":
case "--allowed-start-rules":
if (!options.allowedStartRules) {
options.allowedStartRules = [];
}
if ( ! options.allowedStartRules ) options.allowedStartRules = [];
updateList( options.allowedStartRules, nextArg( "--allowed-start-rules" ) );
break;
@ -124,14 +147,11 @@ while (args.length > 0) {
case "-d":
case "--dependency":
argument = nextArg( "-d/--dependency" );
if (argument.indexOf(":") === -1) {
mod = [argument, argument];
} else {
mod = argument.split( ":" );
if (mod.length > 2) {
mod[1] = mod.slice(1);
}
}
if ( mod.length === 1 ) mod = [ argument, argument ];
else if ( mod.length > 2 ) mod[ 1 ] = mod.slice( 1 );
options.dependencies[ mod[ 0 ] ] = mod[ 1 ];
break;
@ -149,9 +169,13 @@ while (args.length > 0) {
case "--extra-options-file":
argument = nextArg( "-c/--config/--extra-options-file" );
try {
json = fs.readFileSync( argument, "utf8" );
} catch ( e ) {
abort( `Can't read from file "${ argument }".` );
}
addExtraOptions( json );
break;
@ -160,7 +184,9 @@ while (args.length > 0) {
case "--format":
argument = nextArg( "-f/--format" );
if ( MODULE_FORMATS.indexOf( argument ) === -1 ) {
abort( `Module format must be either ${ formatChoicesList( MODULE_FORMATS ) }.` );
}
options.format = argument;
break;
@ -175,7 +201,9 @@ while (args.length > 0) {
case "--optimize":
argument = nextArg( "-O/--optimize" );
if ( OPTIMIZATION_GOALS.indexOf( argument ) === -1 ) {
abort( `Optimization goal must be either ${ formatChoicesList( OPTIMIZATION_GOALS ) }.` );
}
options.optimize = argument;
break;
@ -189,17 +217,23 @@ while (args.length > 0) {
case "--plugin":
argument = nextArg( "-p/--plugin" );
try {
mod = require( argument );
} catch ( ex1 ) {
if (ex1.code !== "MODULE_NOT_FOUND") { throw ex1; }
if ( ex1.code !== "MODULE_NOT_FOUND" ) throw ex1;
try {
mod = require( path.resolve( argument ) );
} catch ( ex2 ) {
if (ex2.code !== "MODULE_NOT_FOUND") { throw ex2; }
if ( ex2.code !== "MODULE_NOT_FOUND" ) throw ex2;
abort( `Can't load module "${ argument }".` );
}
}
options.plugins.push( mod );
break;
@ -220,36 +254,51 @@ while (args.length > 0) {
default:
if ( inputFile !== null ) {
abort( `Unknown option: "${ argument }".` );
}
inputFile = argument;
}
}
// Validation and defaults
if ( Object.keys( options.dependencies ).length > 0 ) {
if ( DEPENDENCY_FORMATS.indexOf( options.format ) === -1 ) {
abort( `Can't use the -d/--dependency option with the "${ options.format }" module format.` );
}
}
if ( options.exportVar !== null ) {
if ( EXPORT_VAR_FORMATS.indexOf( options.format ) === -1 ) {
abort( `Can't use the -e/--export-var option with the "${ options.format }" module format.` );
}
}
if (inputFile === null) {
inputFile = "-";
}
if ( inputFile === null ) inputFile = "-";
if ( outputFile === null ) {
if (inputFile === "-") {
outputFile = "-";
} else if (inputFile) {
outputFile = inputFile.substr(0, inputFile.length - path.extname(inputFile).length) + ".js";
if ( inputFile === "-" ) outputFile = "-";
else if ( inputFile ) {
outputFile = inputFile
.substr( 0, inputFile.length - path.extname( inputFile ).length )
+ ".js";
}
}
// Export

@ -2,21 +2,33 @@
"use strict";
let fs = require("fs");
let peg = require("../lib/peg");
let options = require("./options");
const fs = require( "fs" );
const peg = require( "../lib/peg" );
const options = require( "./options" );
// Helpers
function readStream( inputStream, callback ) {
let input = "";
inputStream.on("data", data => { input += data; });
inputStream.on("end", () => { callback(input); });
inputStream.on( "data", data => {
input += data;
} );
inputStream.on( "end", () => {
callback( input );
} );
}
function abort( message ) {
console.error( message );
process.exit( 1 );
}
// Main
@ -24,41 +36,66 @@ function abort(message) {
let inputStream, outputStream;
if ( options.inputFile === "-" ) {
process.stdin.resume();
inputStream = process.stdin;
inputStream.on( "error", () => {
abort( `Can't read from file "${ options.inputFile }".` );
} );
} else {
inputStream = fs.createReadStream( options.inputFile );
}
if ( options.outputFile === "-" ) {
outputStream = process.stdout;
} else {
outputStream = fs.createWriteStream( options.outputFile );
outputStream.on( "error", () => {
abort( `Can't write to file "${ options.outputFile }".` );
} );
}
readStream( inputStream, input => {
let location, source;
try {
source = peg.generate( input, options );
} catch ( e ) {
if (e.location !== undefined) {
if ( typeof e.location === "object" ) {
location = e.location.start;
abort(location.line + ":" + location.column + ": " + e.message);
} else {
abort(e.message);
if ( typeof location === "object" ) {
return abort( location.line + ":" + location.column + ": " + e.message );
}
}
return abort( e.message );
}
outputStream.write( source );
if ( outputStream !== process.stdout ) {
outputStream.end();
}
});
} );

@ -1,26 +1,32 @@
"use strict";
let babelify = require("babelify");
let browserify = require("browserify");
let buffer = require("vinyl-buffer");
let del = require("del");
let eslint = require("gulp-eslint");
let gulp = require("gulp");
let header = require("gulp-header");
let mocha = require("gulp-mocha");
let rename = require("gulp-rename");
let runSequence = require("run-sequence");
let source = require("vinyl-source-stream");
let spawn = require("child_process").spawn;
let uglify = require("gulp-uglify");
function execFile(args) {
const version = require( "./package" ).version;
const spawn = require( "child_process" ).spawn;
const gulp = require( "gulp" );
const task = gulp.task.bind( gulp );
const eslint = require( "gulp-eslint" );
const mocha = require( "gulp-mocha" );
const dedent = require( "dedent" );
const browserify = require( "browserify" );
const babelify = require( "babelify" );
const source = require( "vinyl-source-stream" );
const rename = require( "gulp-rename" );
const buffer = require( "vinyl-buffer" );
const uglify = require( "gulp-uglify" );
const header = require( "gulp-header" );
const del = require( "del" );
const runSequence = require( "run-sequence" );
function node( args ) {
return spawn( "node", args.split( " " ), { stdio: "inherit" } );
}
// Run ESLint on all JavaScript files.
gulp.task("lint", () =>
gulp.src([
task( "lint", () => gulp
.src( [
"**/.*rc.js",
"lib/**/*.js",
"!lib/parser.js",
"test/benchmark/**/*.js",
@ -31,36 +37,36 @@ gulp.task("lint", () =>
"bin/*.js",
"gulpfile.js"
] )
.pipe(eslint())
.pipe( eslint( { dotfiles: true } ) )
.pipe( eslint.format() )
.pipe( eslint.failAfterError() )
);
// Run tests.
gulp.task("test", () =>
gulp.src("test/spec/**/*.spec.js", { read: false })
task( "test", () => gulp
.src( "test/spec/**/*.spec.js", { read: false } )
.pipe( mocha() )
);
// Run benchmarks.
gulp.task("benchmark", () => execFile("test/benchmark/run"));
task( "benchmark", () => node( "test/benchmark/run" ) );
// Create the browser build.
gulp.task("browser:build", () => {
const HEADER = [
"//",
"// PEG.js v" + require("./package").version,
"// https://pegjs.org/",
"//",
"// Copyright (c) 2010-2016 David Majda",
"// Copyright (c) 2017+ Futago-za Ryuu",
"//",
"// Licensed under the MIT License.",
"//",
""
]
.map(line => `${line}\n`)
.join("");
task( "browser:build", () => {
const HEADER = dedent`
/**
* PEG.js v${ version }
* https://pegjs.org/
*
* Copyright (c) 2010-2016 David Majda
* Copyright (c) 2017+ Futago-za Ryuu
*
* Released under the MIT License.
*/\n\n
`;
return browserify( "lib/peg.js", { standalone: "peg" } )
.transform( babelify, { presets: "es2015", compact: false } )
@ -73,17 +79,18 @@ gulp.task("browser:build", () => {
.pipe( uglify() )
.pipe( header( HEADER ) )
.pipe( gulp.dest( "browser" ) );
} );
// Delete the browser build.
gulp.task("browser:clean", () => del("browser"));
task( "browser:clean", () => del( "browser" ) );
// Generate the grammar parser.
gulp.task("parser", () =>
execFile("bin/peg src/parser.pegjs -o lib/parser.js")
task( "parser", () =>
node( "bin/peg src/parser.pegjs -o lib/parser.js" )
);
// Default task.
gulp.task("default", cb =>
runSequence("lint", "test", cb)
task( "default", cb =>
runSequence( "benchmark", "test", cb )
);

@ -4,8 +4,16 @@ module.exports = {
"extends": "futagozaryuu/es2015",
"env": {
"commonjs": true
"commonjs": true,
},
"root": true,
"rules": {
"prefer-rest-params": 0,
"strict": 0,
},
"root": true
};

@ -1,49 +1,70 @@
"use strict";
let visitor = require("./visitor");
const visitor = require( "./visitor" );
// AST utilities.
let asts = {
const asts = {
findRule( ast, name ) {
for ( let i = 0; i < ast.rules.length; i++ ) {
if (ast.rules[i].name === name) {
return ast.rules[i];
}
if ( ast.rules[ i ].name === name ) return ast.rules[ i ];
}
return undefined;
return void 0;
},
indexOfRule( ast, name ) {
for ( let i = 0; i < ast.rules.length; i++ ) {
if (ast.rules[i].name === name) {
return i;
}
if ( ast.rules[ i ].name === name ) return i;
}
return -1;
},
alwaysConsumesOnSuccess( ast, node ) {
function consumesTrue() { return true; }
function consumesFalse() { return false; }
let consumes;
function consumesTrue() {
return true;
}
function consumesFalse() {
return false;
}
function consumesExpression( node ) {
return consumes( node.expression );
}
let consumes = visitor.build({
consumes = visitor.build( {
rule: consumesExpression,
named: consumesExpression,
choice( node ) {
return node.alternatives.every( consumes );
},
action: consumesExpression,
sequence( node ) {
return node.elements.some( consumes );
},
labeled: consumesExpression,
@ -58,11 +79,15 @@ let asts = {
semantic_not: consumesFalse,
rule_ref( node ) {
return consumes( asts.findRule( ast, node.name ) );
},
literal( node ) {
return node.value !== "";
},
class: consumesTrue,
@ -70,6 +95,7 @@ let asts = {
} );
return consumes( node );
}
};

@ -1,32 +1,40 @@
"use strict";
let generateBytecode = require("./passes/generate-bytecode");
let generateJS = require("./passes/generate-js");
let removeProxyRules = require("./passes/remove-proxy-rules");
let reportDuplicateLabels = require("./passes/report-duplicate-labels");
let reportDuplicateRules = require("./passes/report-duplicate-rules");
let reportInfiniteRecursion = require("./passes/report-infinite-recursion");
let reportInfiniteRepetition = require("./passes/report-infinite-repetition");
let reportUndefinedRules = require("./passes/report-undefined-rules");
let visitor = require("./visitor");
const generateBytecode = require( "./passes/generate-bytecode" );
const generateJS = require( "./passes/generate-js" );
const removeProxyRules = require( "./passes/remove-proxy-rules" );
const reportDuplicateLabels = require( "./passes/report-duplicate-labels" );
const reportDuplicateRules = require( "./passes/report-duplicate-rules" );
const reportInfiniteRecursion = require( "./passes/report-infinite-recursion" );
const reportInfiniteRepetition = require( "./passes/report-infinite-repetition" );
const reportUndefinedRules = require( "./passes/report-undefined-rules" );
const visitor = require( "./visitor" );
function processOptions( options, defaults ) {
let processedOptions = {};
const processedOptions = {};
Object.keys( options ).forEach( name => {
processedOptions[ name ] = options[ name ];
} );
Object.keys( defaults ).forEach( name => {
if ( ! Object.prototype.hasOwnProperty.call( processedOptions, name ) ) {
processedOptions[ name ] = defaults[ name ];
}
} );
return processedOptions;
}
let compiler = {
const compiler = {
// AST node visitor builder. Useful mainly for plugins which manipulate the
// AST.
visitor: visitor,
@ -58,7 +66,8 @@ let compiler = {
// during the generation and some may protrude to the generated parser and
// cause its malfunction.
compile( ast, passes, options ) {
options = options !== undefined ? options : {};
options = typeof options !== "undefined" ? options : {};
options = processOptions( options, {
allowedStartRules: [ ast.rules[ 0 ].name ],
@ -72,10 +81,17 @@ let compiler = {
} );
Object.keys( passes ).forEach( stage => {
passes[stage].forEach(p => { p(ast, options); });
passes[ stage ].forEach( pass => {
pass( ast, options );
} );
} );
switch ( options.output ) {
case "parser":
return eval( ast.code );
@ -83,8 +99,10 @@ let compiler = {
return ast.code;
default:
throw new Error("Invalid output format: " + options.output + ".");
throw new Error( `Invalid output format: ${ options.output }.` );
}
}
};

@ -1,10 +1,15 @@
"use strict";
function hex(ch) { return ch.charCodeAt(0).toString(16).toUpperCase(); }
function hex( ch ) {
return ch.charCodeAt( 0 ).toString( 16 ).toUpperCase();
}
// JavaScript code generation helpers.
let js = {
const js = {
stringEscape( s ) {
// ECMA-262, 5th ed., 7.8.4: All characters may appear literally in a string
// literal except for the closing quote character, backslash, carriage
// return, line separator, paragraph separator, and line feed. Any character
@ -25,9 +30,11 @@ let js = {
.replace( /[\x10-\x1F\x7F-\xFF]/g, ch => "\\x" + hex( ch ) )
.replace( /[\u0100-\u0FFF]/g, ch => "\\u0" + hex( ch ) )
.replace( /[\u1000-\uFFFF]/g, ch => "\\u" + hex( ch ) );
},
regexpClassEscape( s ) {
// Based on ECMA-262, 5th ed., 7.8.5 & 15.10.1.
//
// For portability, we also escape all control and non-ASCII characters.
@ -48,6 +55,7 @@ let js = {
.replace( /[\x10-\x1F\x7F-\xFF]/g, ch => "\\x" + hex( ch ) )
.replace( /[\u0100-\u0FFF]/g, ch => "\\u0" + hex( ch ) )
.replace( /[\u1000-\uFFFF]/g, ch => "\\u" + hex( ch ) );
}
};

@ -1,7 +1,8 @@
"use strict";
// Bytecode instruction opcodes.
let opcodes = {
const opcodes = {
// Stack Manipulation
PUSH: 0, // PUSH c
@ -49,6 +50,7 @@ let opcodes = {
SILENT_FAILS_ON: 28, // SILENT_FAILS_ON
SILENT_FAILS_OFF: 29 // SILENT_FAILS_OFF
};
module.exports = opcodes;

@ -1,9 +1,9 @@
"use strict";
let asts = require("../asts");
let js = require("../js");
let op = require("../opcodes");
let visitor = require("../visitor");
const asts = require( "../asts" );
const js = require( "../js" );
const op = require( "../opcodes" );
const visitor = require( "../visitor" );
// Generates bytecode.
//
@ -188,53 +188,68 @@ let visitor = require("../visitor");
//
// silentFails--;
function generateBytecode( ast ) {
let consts = [];
const consts = [];
let generate;
function addConst( value ) {
let index = consts.indexOf(value);
const index = consts.indexOf( value );
return index === -1 ? consts.push( value ) - 1 : index;
}
function addFunctionConst( params, code ) {
return addConst(
"function(" + params.join(", ") + ") {" + code + "}"
);
return addConst( `function(${ params.join( ", " ) }) {${ code }}` );
}
function cloneEnv( env ) {
let clone = {};
const clone = {};
Object.keys( env ).forEach( name => {
clone[ name ] = env[ name ];
} );
return clone;
}
function buildSequence() {
return Array.prototype.concat.apply( [], arguments );
}
function buildCondition( condCode, thenCode, elseCode ) {
return condCode.concat(
[ thenCode.length, elseCode.length ],
thenCode,
elseCode
);
}
function buildLoop( condCode, bodyCode ) {
return condCode.concat( [ bodyCode.length ], bodyCode );
}
function buildCall( functionIndex, delta, env, sp ) {
let params = Object.keys(env).map(name => sp - env[name]);
const params = Object.keys( env ).map( name => sp - env[ name ] );
return [ op.CALL, functionIndex, delta, params.length ].concat( params );
}
function buildSimplePredicate( expression, negative, context ) {
return buildSequence(
[ op.PUSH_CURR_POS ],
[ op.SILENT_FAILS_ON ],
@ -258,53 +273,56 @@ function generateBytecode(ast) {
)
)
);
}
function buildSemanticPredicate( code, negative, context ) {
let functionIndex = addFunctionConst(Object.keys(context.env), code);
const functionIndex = addFunctionConst( Object.keys( context.env ), code );
return buildSequence(
[ op.UPDATE_SAVED_POS ],
buildCall( functionIndex, 0, context.env, context.sp ),
buildCondition(
[ op.IF ],
buildSequence(
[op.POP],
negative ? [op.PUSH_FAILED] : [op.PUSH_UNDEFINED]
),
buildSequence(
[op.POP],
negative ? [op.PUSH_UNDEFINED] : [op.PUSH_FAILED]
)
buildSequence( [ op.POP ], negative ? [ op.PUSH_FAILED ] : [ op.PUSH_UNDEFINED ] ),
buildSequence( [ op.POP ], negative ? [ op.PUSH_UNDEFINED ] : [ op.PUSH_FAILED ] )
)
);
}
function buildAppendLoop( expressionCode ) {
return buildLoop(
[ op.WHILE_NOT_ERROR ],
buildSequence( [ op.APPEND ], expressionCode )
);
}
let generate = visitor.build({
generate = visitor.build( {
grammar( node ) {
node.rules.forEach(generate);
node.rules.forEach( generate );
node.consts = consts;
},
rule( node ) {
node.bytecode = generate( node.expression, {
sp: -1, // stack pointer
env: { }, // mapping of label names to stack positions
action: null // action nodes pass themselves to children here
} );
},
named( node, context ) {
let nameIndex = addConst(
"peg$otherExpectation(\"" + js.stringEscape(node.name) + "\")"
const nameIndex = addConst(
`peg$otherExpectation("${ js.stringEscape( node.name ) }")`
);
// The code generated below is slightly suboptimal because |FAIL| pushes
@ -317,18 +335,22 @@ function generateBytecode(ast) {
[ op.SILENT_FAILS_OFF ],
buildCondition( [ op.IF_ERROR ], [ op.FAIL, nameIndex ], [] )
);
},
choice( node, context ) {
function buildAlternativesCode( alternatives, context ) {
return buildSequence(
generate( alternatives[ 0 ], {
sp: context.sp,
env: cloneEnv( context.env ),
action: null
} ),
alternatives.length > 1
? buildCondition(
alternatives.length < 2
? []
: buildCondition(
[ op.IF_ERROR ],
buildSequence(
[ op.POP ],
@ -336,26 +358,28 @@ function generateBytecode(ast) {
),
[]
)
: []
);
}
return buildAlternativesCode( node.alternatives, context );
},
action( node, context ) {
let env = cloneEnv(context.env);
let emitCall = node.expression.type !== "sequence"
|| node.expression.elements.length === 0;
let expressionCode = generate(node.expression, {
const env = cloneEnv( context.env );
const emitCall = node.expression.type !== "sequence" || node.expression.elements.length === 0;
const expressionCode = generate( node.expression, {
sp: context.sp + ( emitCall ? 1 : 0 ),
env: env,
action: node
} );
let functionIndex = addFunctionConst(Object.keys(env), node.code);
const functionIndex = addFunctionConst( Object.keys( env ), node.code );
return emitCall
? buildSequence(
return emitCall === false
? expressionCode
: buildSequence(
[ op.PUSH_CURR_POS ],
expressionCode,
buildCondition(
@ -367,14 +391,17 @@ function generateBytecode(ast) {
[]
),
[ op.NIP ]
)
: expressionCode;
);
},
sequence( node, context ) {
function buildElementsCode( elements, context ) {
if ( elements.length > 0 ) {
let processedCount = node.elements.length - elements.slice(1).length;
const processedCount = node.elements.length - elements.slice( 1 ).length;
return buildSequence(
generate( elements[ 0 ], {
@ -396,9 +423,10 @@ function generateBytecode(ast) {
)
)
);
} else {
if (context.action) {
let functionIndex = addFunctionConst(
} else if ( context.action ) {
const functionIndex = addFunctionConst(
Object.keys( context.env ),
context.action.code
);
@ -412,10 +440,10 @@ function generateBytecode(ast) {
context.sp
)
);
} else {
return buildSequence([op.WRAP, node.elements.length], [op.NIP]);
}
}
return buildSequence( [ op.WRAP, node.elements.length ], [ op.NIP ] );
}
return buildSequence(
@ -426,10 +454,12 @@ function generateBytecode(ast) {
action: context.action
} )
);
},
labeled( node, context ) {
let env = cloneEnv(context.env);
const env = cloneEnv( context.env );
context.env[ node.label ] = context.sp + 1;
@ -438,9 +468,11 @@ function generateBytecode(ast) {
env: env,
action: null
} );
},
text( node, context ) {
return buildSequence(
[ op.PUSH_CURR_POS ],
generate( node.expression, {
@ -454,17 +486,23 @@ function generateBytecode(ast) {
[ op.NIP ]
)
);
},
simple_and( node, context ) {
return buildSimplePredicate( node.expression, false, context );
},
simple_not( node, context ) {
return buildSimplePredicate( node.expression, true, context );
},
optional( node, context ) {
return buildSequence(
generate( node.expression, {
sp: context.sp,
@ -477,10 +515,12 @@ function generateBytecode(ast) {
[]
)
);
},
zero_or_more( node, context ) {
let expressionCode = generate(node.expression, {
const expressionCode = generate( node.expression, {
sp: context.sp + 1,
env: cloneEnv( context.env ),
action: null
@ -492,10 +532,12 @@ function generateBytecode(ast) {
buildAppendLoop( expressionCode ),
[ op.POP ]
);
},
one_or_more( node, context ) {
let expressionCode = generate(node.expression, {
const expressionCode = generate( node.expression, {
sp: context.sp + 1,
env: cloneEnv( context.env ),
action: null
@ -510,39 +552,47 @@ function generateBytecode(ast) {
buildSequence( [ op.POP ], [ op.POP ], [ op.PUSH_FAILED ] )
)
);
},
group( node, context ) {
return generate( node.expression, {
sp: context.sp,
env: cloneEnv( context.env ),
action: null
} );
},
semantic_and( node, context ) {
return buildSemanticPredicate( node.code, false, context );
},
semantic_not( node, context ) {
return buildSemanticPredicate( node.code, true, context );
},
rule_ref( node ) {
return [ op.RULE, asts.indexOfRule( ast, node.name ) ];
},
literal( node ) {
if ( node.value.length > 0 ) {
let stringIndex = addConst("\""
+ js.stringEscape(
const stringIndex = addConst( `"${ js.stringEscape(
node.ignoreCase ? node.value.toLowerCase() : node.value
)
+ "\""
);
let expectedIndex = addConst(
) }"` );
const expectedIndex = addConst(
"peg$literalExpectation("
+ "\"" + js.stringEscape(node.value) + "\", "
+ `"${ js.stringEscape( node.value ) }", `
+ node.ignoreCase
+ ")"
);
@ -559,33 +609,42 @@ function generateBytecode(ast) {
: [ op.ACCEPT_STRING, stringIndex ],
[ op.FAIL, expectedIndex ]
);
} else {
let stringIndex = addConst("\"\"");
return [op.PUSH, stringIndex];
}
const stringIndex = addConst( "\"\"" );
return [ op.PUSH, stringIndex ];
},
class( node ) {
let regexp = "/^["
const regexp = "/^["
+ ( node.inverted ? "^" : "" )
+ node.parts.map(part =>
Array.isArray(part)
+ node.parts
.map( part =>
( Array.isArray( part )
? js.regexpClassEscape( part[ 0 ] )
+ "-"
+ js.regexpClassEscape( part[ 1 ] )
: js.regexpClassEscape(part)
).join("")
+ "]/" + (node.ignoreCase ? "i" : "");
let parts = "["
+ node.parts.map(part =>
Array.isArray(part)
? "[\"" + js.stringEscape(part[0]) + "\", \"" + js.stringEscape(part[1]) + "\"]"
: "\"" + js.stringEscape(part) + "\""
).join(", ")
: js.regexpClassEscape( part ) )
)
.join( "" )
+ "]/"
+ ( node.ignoreCase ? "i" : "" );
const parts = "["
+ node.parts
.map( part =>
( Array.isArray( part )
? `["${ js.stringEscape( part[ 0 ] ) }", "${ js.stringEscape( part[ 1 ] ) }"]`
: "\"" + js.stringEscape( part ) + "\"" )
)
.join( ", " )
+ "]";
let regexpIndex = addConst(regexp);
let expectedIndex = addConst(
const regexpIndex = addConst( regexp );
const expectedIndex = addConst(
"peg$classExpectation("
+ parts + ", "
+ node.inverted + ", "
@ -598,20 +657,24 @@ function generateBytecode(ast) {
[ op.ACCEPT_N, 1 ],
[ op.FAIL, expectedIndex ]
);
},
any() {
let expectedIndex = addConst("peg$anyExpectation()");
const expectedIndex = addConst( "peg$anyExpectation()" );
return buildCondition(
[ op.MATCH_ANY ],
[ op.ACCEPT_N, 1 ],
[ op.FAIL, expectedIndex ]
);
}
} );
generate( ast );
}
module.exports = generateBytecode;

@ -1,44 +1,65 @@
/* eslint no-mixed-operators: 0, prefer-const: 0 */
"use strict";
let asts = require("../asts");
let js = require("../js");
let op = require("../opcodes");
const asts = require( "../asts" );
const js = require( "../js" );
const op = require( "../opcodes" );
// Generates parser JavaScript code.
function generateJS( ast, options ) {
/* These only indent non-empty lines to avoid trailing whitespace. */
const lineMatchRE = /^([^`\r\n]+?(?:`[^`]*?`[^\r\n]*?)?)$/gm;
function indent2(code) { return code.replace(lineMatchRE, " $1"); }
function indent10(code) { return code.replace(lineMatchRE, " $1"); }
function indent2( code ) {
return code.replace( lineMatchRE, " $1" );
}
function indent10( code ) {
return code.replace( lineMatchRE, " $1" );
}
function generateTables() {
if ( options.optimize === "size" ) {
return [
"var peg$consts = [",
indent2( ast.consts.join( ",\n" ) ),
"];",
"",
"var peg$bytecode = [",
indent2(ast.rules.map(rule =>
"peg$decode(\""
+ js.stringEscape(rule.bytecode.map(
b => String.fromCharCode(b + 32)
).join(""))
+ "\")"
).join(",\n")),
indent2( ast.rules
.map( rule =>
`peg$decode("${
js.stringEscape( rule.bytecode
.map( b => String.fromCharCode( b + 32 ) )
.join( "" )
)
}")`
)
.join( ",\n" )
),
"];"
].join( "\n" );
} else {
return ast.consts.map((c, i) => "var peg$c" + i + " = " + c + ";").join("\n");
}
return ast.consts.map( ( c, i ) => "var peg$c" + i + " = " + c + ";" ).join( "\n" );
}
function generateRuleHeader( ruleNameCode, ruleIndexCode ) {
let parts = [];
const parts = [];
parts.push( "" );
if ( options.trace ) {
parts.push( [
"peg$tracer.trace({",
" type: \"rule.enter\",",
@ -47,9 +68,11 @@ function generateJS(ast, options) {
"});",
""
].join( "\n" ) );
}
if ( options.cache ) {
parts.push( [
"var key = peg$currPos * " + ast.rules.length + " + " + ruleIndexCode + ";",
"var cached = peg$resultsCache[key];",
@ -60,6 +83,7 @@ function generateJS(ast, options) {
].join( "\n" ) );
if ( options.trace ) {
parts.push( [
"if (cached.result !== peg$FAILED) {",
" peg$tracer.trace({",
@ -77,6 +101,7 @@ function generateJS(ast, options) {
"}",
""
].join( "\n" ) );
}
parts.push( [
@ -84,22 +109,28 @@ function generateJS(ast, options) {
"}",
""
].join( "\n" ) );
}
return parts.join( "\n" );
}
function generateRuleFooter( ruleNameCode, resultCode ) {
let parts = [];
const parts = [];
if ( options.cache ) {
parts.push( [
"",
"peg$resultsCache[key] = { nextPos: peg$currPos, result: " + resultCode + " };"
].join( "\n" ) );
}
if ( options.trace ) {
parts.push( [
"",
"if (" + resultCode + " !== peg$FAILED) {",
@ -117,6 +148,7 @@ function generateJS(ast, options) {
" });",
"}"
].join( "\n" ) );
}
parts.push( [
@ -125,15 +157,18 @@ function generateJS(ast, options) {
].join( "\n" ) );
return parts.join( "\n" );
}
function generateInterpreter() {
let parts = [];
const parts = [];
function generateCondition( cond, argsLength ) {
let baseLength = argsLength + 3;
let thenLengthCode = "bc[ip + " + (baseLength - 2) + "]";
let elseLengthCode = "bc[ip + " + (baseLength - 1) + "]";
const baseLength = argsLength + 3;
const thenLengthCode = "bc[ip + " + ( baseLength - 2 ) + "]";
const elseLengthCode = "bc[ip + " + ( baseLength - 1 ) + "]";
return [
"ends.push(end);",
@ -149,11 +184,13 @@ function generateJS(ast, options) {
"",
"break;"
].join( "\n" );
}
function generateLoop( cond ) {
let baseLength = 2;
let bodyLengthCode = "bc[ip + " + (baseLength - 1) + "]";
const baseLength = 2;
const bodyLengthCode = "bc[ip + " + ( baseLength - 1 ) + "]";
return [
"if (" + cond + ") {",
@ -168,11 +205,13 @@ function generateJS(ast, options) {
"",
"break;"
].join( "\n" );
}
function generateCall() {
let baseLength = 4;
let paramsLengthCode = "bc[ip + " + (baseLength - 1) + "]";
const baseLength = 4;
const paramsLengthCode = "bc[ip + " + ( baseLength - 1 ) + "]";
return [
"params = bc.slice(ip + " + baseLength + ", ip + " + baseLength + " + " + paramsLengthCode + ")",
@ -187,6 +226,7 @@ function generateJS(ast, options) {
"ip += " + baseLength + " + " + paramsLengthCode + ";",
"break;"
].join( "\n" );
}
parts.push( [
@ -198,6 +238,7 @@ function generateJS(ast, options) {
].join( "\n" ) );
if ( options.trace ) {
parts.push( [
" var bc = peg$bytecode[index];",
" var ip = 0;",
@ -208,7 +249,9 @@ function generateJS(ast, options) {
" var startPos = peg$currPos;",
" var params;"
].join( "\n" ) );
} else {
parts.push( [
" var bc = peg$bytecode[index];",
" var ip = 0;",
@ -218,6 +261,7 @@ function generateJS(ast, options) {
" var stack = [];",
" var params;"
].join( "\n" ) );
}
parts.push( indent2( generateRuleHeader( "peg$ruleNames[index]", "index" ) ) );
@ -401,64 +445,80 @@ function generateJS(ast, options) {
parts.push( "}" );
return parts.join( "\n" );
}
function generateRuleFunction( rule ) {
let parts = [];
let stackVars = [];
let code;
function c(i) { return "peg$c" + i; } // |consts[i]| of the abstract machine
function s(i) { return "s" + i; } // |stack[i]| of the abstract machine
const parts = [];
const stackVars = [];
function c( i ) {
return "peg$c" + i;
let stack = {
} // |consts[i]| of the abstract machine
function s( i ) {
return "s" + i;
} // |stack[i]| of the abstract machine
const stack = {
sp: -1,
maxSp: -1,
push( exprCode ) {
let code = s(++this.sp) + " = " + exprCode + ";";
if (this.sp > this.maxSp) { this.maxSp = this.sp; }
const code = s( ++this.sp ) + " = " + exprCode + ";";
if ( this.sp > this.maxSp ) this.maxSp = this.sp;
return code;
},
pop( n ) {
if (n === undefined) {
return s(this.sp--);
} else {
let values = Array(n);
if ( typeof n === "undefined" ) return s( this.sp-- );
const values = Array( n );
for ( let i = 0; i < n; i++ ) {
values[ i ] = s( this.sp - n + 1 + i );
}
this.sp -= n;
return values;
}
},
top() {
return s( this.sp );
},
index( i ) {
return s( this.sp - i );
}
};
function compile( bc ) {
let ip = 0;
let end = bc.length;
let parts = [];
const end = bc.length;
const parts = [];
let value;
function compileCondition( cond, argCount ) {
let baseLength = argCount + 3;
let thenLength = bc[ip + baseLength - 2];
let elseLength = bc[ip + baseLength - 1];
let baseSp = stack.sp;
const baseLength = argCount + 3;
const thenLength = bc[ ip + baseLength - 2 ];
const elseLength = bc[ ip + baseLength - 1 ];
const baseSp = stack.sp;
let thenCode, elseCode, thenSp, elseSp;
ip += baseLength;
@ -467,31 +527,39 @@ function generateJS(ast, options) {
ip += thenLength;
if ( elseLength > 0 ) {
stack.sp = baseSp;
elseCode = compile( bc.slice( ip, ip + elseLength ) );
elseSp = stack.sp;
ip += elseLength;
if ( thenSp !== elseSp ) {
throw new Error(
"Branches of a condition must move the stack pointer in the same way."
);
}
}
parts.push( "if (" + cond + ") {" );
parts.push( indent2( thenCode ) );
if ( elseLength > 0 ) {
parts.push( "} else {" );
parts.push( indent2( elseCode ) );
}
parts.push( "}" );
}
function compileLoop( cond ) {
let baseLength = 2;
let bodyLength = bc[ip + baseLength - 1];
let baseSp = stack.sp;
const baseLength = 2;
const bodyLength = bc[ ip + baseLength - 1 ];
const baseSp = stack.sp;
let bodyCode, bodySp;
ip += baseLength;
@ -500,30 +568,40 @@ function generateJS(ast, options) {
ip += bodyLength;
if ( bodySp !== baseSp ) {
throw new Error( "Body of a loop can't move the stack pointer." );
}
parts.push( "while (" + cond + ") {" );
parts.push( indent2( bodyCode ) );
parts.push( "}" );
}
function compileCall() {
let baseLength = 4;
let paramsLength = bc[ip + baseLength - 1];
let value = c(bc[ip + 1]) + "("
+ bc.slice(ip + baseLength, ip + baseLength + paramsLength).map(
p => stack.index(p)
).join(", ")
const baseLength = 4;
const paramsLength = bc[ ip + baseLength - 1 ];
const value = c( bc[ ip + 1 ] )
+ "("
+ bc
.slice( ip + baseLength, ip + baseLength + paramsLength )
.map( p => stack.index( p ) )
.join( ", " )
+ ")";
stack.pop( bc[ ip + 2 ] );
parts.push( stack.push( value ) );
ip += baseLength + paramsLength;
}
while ( ip < end ) {
switch ( bc[ ip ] ) {
case op.PUSH: // PUSH c
parts.push( stack.push( c( bc[ ip + 1 ] ) ) );
ip += 2;
@ -624,8 +702,8 @@ function generateJS(ast, options) {
+ ") === "
+ c( bc[ ip + 1 ] )
: "input.charCodeAt(peg$currPos) === "
+ eval(ast.consts[bc[ip + 1]]).charCodeAt(0),
1
+ eval( ast.consts[ bc[ ip + 1 ] ] ).charCodeAt( 0 )
, 1
);
break;
@ -634,16 +712,13 @@ function generateJS(ast, options) {
"input.substr(peg$currPos, "
+ eval( ast.consts[ bc[ ip + 1 ] ] ).length
+ ").toLowerCase() === "
+ c(bc[ip + 1]),
1
+ c( bc[ ip + 1 ] )
, 1
);
break;
case op.MATCH_REGEXP: // MATCH_REGEXP r, a, f, ...
compileCondition(
c(bc[ip + 1]) + ".test(input.charAt(peg$currPos))",
1
);
compileCondition( c( bc[ ip + 1 ] ) + ".test(input.charAt(peg$currPos))", 1 );
break;
case op.ACCEPT_N: // ACCEPT_N n
@ -707,22 +782,29 @@ function generateJS(ast, options) {
default:
throw new Error( "Invalid opcode: " + bc[ ip ] + "." );
}
}
return parts.join( "\n" );
}
code = compile(rule.bytecode);
const code = compile( rule.bytecode );
parts.push( "function peg$parse" + rule.name + "() {" );
if ( options.trace ) {
parts.push( " var startPos = peg$currPos;" );
}
for ( let i = 0; i <= stack.maxSp; i++ ) {
stackVars[ i ] = s( i );
}
parts.push( " var " + stackVars.join( ", " ) + ";" );
@ -740,10 +822,12 @@ function generateJS(ast, options) {
parts.push( "}" );
return parts.join( "\n" );
}
function generateToplevel() {
let parts = [];
const parts = [];
parts.push( [
"function peg$subclass(child, parent) {",
@ -869,6 +953,7 @@ function generateJS(ast, options) {
].join( "\n" ) );
if ( options.trace ) {
parts.push( [
"function peg$DefaultTracer() {",
" this.indentLevel = 0;",
@ -924,6 +1009,7 @@ function generateJS(ast, options) {
"};",
""
].join( "\n" ) );
}
parts.push( [
@ -935,29 +1021,33 @@ function generateJS(ast, options) {
].join( "\n" ) );
if ( options.optimize === "size" ) {
let startRuleIndices = "{ "
+ options.allowedStartRules.map(
r => r + ": " + asts.indexOfRule(ast, r)
).join(", ")
const startRuleIndices = "{ "
+ options.allowedStartRules
.map( r => r + ": " + asts.indexOfRule( ast, r ) )
.join( ", " )
+ " }";
let startRuleIndex = asts.indexOfRule(ast, options.allowedStartRules[0]);
const startRuleIndex = asts.indexOfRule( ast, options.allowedStartRules[ 0 ] );
parts.push( [
" var peg$startRuleIndices = " + startRuleIndices + ";",
" var peg$startRuleIndex = " + startRuleIndex + ";"
].join( "\n" ) );
} else {
let startRuleFunctions = "{ "
+ options.allowedStartRules.map(
r => r + ": peg$parse" + r
).join(", ")
const startRuleFunctions = "{ "
+ options.allowedStartRules
.map( r => r + ": peg$parse" + r )
.join( ", " )
+ " }";
let startRuleFunction = "peg$parse" + options.allowedStartRules[0];
const startRuleFunction = "peg$parse" + options.allowedStartRules[ 0 ];
parts.push( [
" var peg$startRuleFunctions = " + startRuleFunctions + ";",
" var peg$startRuleFunction = " + startRuleFunction + ";"
].join( "\n" ) );
}
parts.push( "" );
@ -976,30 +1066,36 @@ function generateJS(ast, options) {
].join( "\n" ) );
if ( options.cache ) {
parts.push( [
" var peg$resultsCache = {};",
""
].join( "\n" ) );
}
if ( options.trace ) {
if ( options.optimize === "size" ) {
let ruleNames = "["
+ ast.rules.map(
r => "\"" + js.stringEscape(r.name) + "\""
).join(", ")
const ruleNames = "["
+ ast.rules
.map( r => `"${ js.stringEscape( r.name ) }"` )
.join( ", " )
+ "]";
parts.push( [
" var peg$ruleNames = " + ruleNames + ";",
""
].join( "\n" ) );
}
parts.push( [
" var peg$tracer = \"tracer\" in options ? options.tracer : new peg$DefaultTracer();",
""
].join( "\n" ) );
}
parts.push( [
@ -1008,6 +1104,7 @@ function generateJS(ast, options) {
].join( "\n" ) );
if ( options.optimize === "size" ) {
parts.push( [
" if (\"startRule\" in options) {",
" if (!(options.startRule in peg$startRuleIndices)) {",
@ -1017,7 +1114,9 @@ function generateJS(ast, options) {
" peg$startRuleIndex = peg$startRuleIndices[options.startRule];",
" }"
].join( "\n" ) );
} else {
parts.push( [
" if (\"startRule\" in options) {",
" if (!(options.startRule in peg$startRuleFunctions)) {",
@ -1027,6 +1126,7 @@ function generateJS(ast, options) {
" peg$startRuleFunction = peg$startRuleFunctions[options.startRule];",
" }"
].join( "\n" ) );
}
parts.push( [
@ -1167,24 +1267,36 @@ function generateJS(ast, options) {
].join( "\n" ) );
if ( options.optimize === "size" ) {
parts.push( indent2( generateInterpreter() ) );
parts.push( "" );
} else {
ast.rules.forEach( rule => {
parts.push( indent2( generateRuleFunction( rule ) ) );
parts.push( "" );
} );
}
if ( ast.initializer ) {
parts.push( indent2( ast.initializer.code ) );
parts.push( "" );
}
if ( options.optimize === "size" ) {
parts.push( " peg$result = peg$parseRule(peg$startRuleIndex);" );
} else {
parts.push( " peg$result = peg$startRuleFunction();" );
}
parts.push( [
@ -1208,18 +1320,23 @@ function generateJS(ast, options) {
].join( "\n" ) );
return parts.join( "\n" );
}
function generateWrapper( toplevelCode ) {
function generateGeneratedByComment() {
return [
"// Generated by PEG.js 0.10.0.",
"//",
"// https://pegjs.org/"
].join( "\n" );
}
function generateParserObject() {
return options.trace
? [
"{",
@ -1234,9 +1351,11 @@ function generateJS(ast, options) {
" parse: peg$parse",
"}"
].join( "\n" );
}
function generateParserExports() {
return options.trace
? [
"{",
@ -1251,10 +1370,12 @@ function generateJS(ast, options) {
" peg$parse as parse",
"}"
].join( "\n" );
}
let generators = {
const generators = {
bare() {
return [
generateGeneratedByComment(),
"(function() {",
@ -1265,11 +1386,13 @@ function generateJS(ast, options) {
indent2( "return " + generateParserObject() + ";" ),
"})()"
].join( "\n" );
},
commonjs() {
let parts = [];
let dependencyVars = Object.keys(options.dependencies);
const parts = [];
const dependencyVars = Object.keys( options.dependencies );
parts.push( [
generateGeneratedByComment(),
@ -1279,14 +1402,18 @@ function generateJS(ast, options) {
].join( "\n" ) );
if ( dependencyVars.length > 0 ) {
dependencyVars.forEach( variable => {
parts.push( "var " + variable
+ " = require(\""
+ js.stringEscape( options.dependencies[ variable ] )
+ "\");"
);
} );
parts.push( "" );
}
parts.push( [
@ -1297,11 +1424,13 @@ function generateJS(ast, options) {
].join( "\n" ) );
return parts.join( "\n" );
},
es() {
let parts = [];
let dependencyVars = Object.keys(options.dependencies);
const parts = [];
const dependencyVars = Object.keys( options.dependencies );
parts.push(
generateGeneratedByComment(),
@ -1309,14 +1438,18 @@ function generateJS(ast, options) {
);
if ( dependencyVars.length > 0 ) {
dependencyVars.forEach( variable => {
parts.push( "import " + variable
+ " from \""
+ js.stringEscape( options.dependencies[ variable ] )
+ "\";"
);
} );
parts.push( "" );
}
parts.push(
@ -1329,17 +1462,19 @@ function generateJS(ast, options) {
);
return parts.join( "\n" );
},
amd() {
let dependencyVars = Object.keys(options.dependencies);
let dependencyIds = dependencyVars.map(v => options.dependencies[v]);
let dependencies = "["
+ dependencyIds.map(
id => "\"" + js.stringEscape(id) + "\""
).join(", ")
const dependencyVars = Object.keys( options.dependencies );
const dependencyIds = dependencyVars.map( v => options.dependencies[ v ] );
const dependencies = "["
+ dependencyIds
.map( id => `"${ js.stringEscape( id ) }"` )
.join( ", " )
+ "]";
let params = dependencyVars.join(", ");
const params = dependencyVars.join( ", " );
return [
generateGeneratedByComment(),
@ -1352,9 +1487,11 @@ function generateJS(ast, options) {
"});",
""
].join( "\n" );
},
globals() {
return [
generateGeneratedByComment(),
"(function(root) {",
@ -1366,21 +1503,23 @@ function generateJS(ast, options) {
"})(this);",
""
].join( "\n" );
},
umd() {
let parts = [];
let dependencyVars = Object.keys(options.dependencies);
let dependencyIds = dependencyVars.map(v => options.dependencies[v]);
let dependencies = "["
+ dependencyIds.map(
id => "\"" + js.stringEscape(id) + "\""
).join(", ")
const parts = [];
const dependencyVars = Object.keys( options.dependencies );
const dependencyIds = dependencyVars.map( v => options.dependencies[ v ] );
const dependencies = "["
+ dependencyIds
.map( id => `"${ js.stringEscape( id ) }"` )
.join( ", " )
+ "]";
let requires = dependencyIds.map(
id => "require(\"" + js.stringEscape(id) + "\")"
).join(", ");
let params = dependencyVars.join(", ");
const requires = dependencyIds
.map( id => `require("${ js.stringEscape( id ) }")` )
.join( ", " );
const params = dependencyVars.join( ", " );
parts.push( [
generateGeneratedByComment(),
@ -1392,10 +1531,12 @@ function generateJS(ast, options) {
].join( "\n" ) );
if ( options.exportVar !== null ) {
parts.push( [
" } else {",
" root." + options.exportVar + " = factory();"
].join( "\n" ) );
}
parts.push( [
@ -1411,13 +1552,16 @@ function generateJS(ast, options) {
].join( "\n" ) );
return parts.join( "\n" );
}
};
return generators[ options.format ]();
}
ast.code = generateWrapper( generateToplevel() );
}
module.exports = generateJS;

@ -1,39 +1,59 @@
"use strict";
let visitor = require("../visitor");
const visitor = require( "../visitor" );
// Removes proxy rules -- that is, rules that only delegate to other rule.
function removeProxyRules( ast, options ) {
function isProxyRule( node ) {
return node.type === "rule" && node.expression.type === "rule_ref";
}
function replaceRuleRefs( ast, from, to ) {
let replace = visitor.build({
const replace = visitor.build( {
rule_ref( node ) {
if ( node.name === from ) {
node.name = to;
}
}
} );
replace( ast );
}
let indices = [];
const indices = [];
ast.rules.forEach( ( rule, i ) => {
if ( isProxyRule( rule ) ) {
replaceRuleRefs( ast, rule.name, rule.expression.name );
if ( options.allowedStartRules.indexOf( rule.name ) === -1 ) {
indices.push( i );
}
}
} );
indices.reverse();
indices.forEach(i => { ast.rules.splice(i, 1); });
indices.forEach( i => {
ast.rules.splice( i, 1 );
} );
}
module.exports = removeProxyRules;

@ -1,50 +1,70 @@
"use strict";
let GrammarError = require("../../grammar-error");
let visitor = require("../visitor");
const GrammarError = require( "../../grammar-error" );
const visitor = require( "../visitor" );
// Checks that each label is defined only once within each scope.
function reportDuplicateLabels( ast ) {
let check;
function cloneEnv( env ) {
let clone = {};
const clone = {};
Object.keys( env ).forEach( name => {
clone[ name ] = env[ name ];
} );
return clone;
}
function checkExpressionWithClonedEnv( node, env ) {
check( node.expression, cloneEnv( env ) );
}
let check = visitor.build({
check = visitor.build( {
rule( node ) {
check( node.expression, {} );
},
choice( node, env ) {
node.alternatives.forEach( alternative => {
check( alternative, cloneEnv( env ) );
} );
},
action: checkExpressionWithClonedEnv,
labeled( node, env ) {
if (Object.prototype.hasOwnProperty.call(env, node.label)) {
const label = node.label;
if ( Object.prototype.hasOwnProperty.call( env, label ) ) {
const start = env[ label ].start;
throw new GrammarError(
"Label \"" + node.label + "\" is already defined "
+ "at line " + env[node.label].start.line + ", "
+ "column " + env[node.label].start.column + ".",
`Label "${ label }" is already defined at line ${ start.line }, column ${ start.column }.`,
node.location
);
}
check( node.expression, env );
env[ label ] = node.location;
env[node.label] = node.location;
},
text: checkExpressionWithClonedEnv,
@ -57,6 +77,7 @@ function reportDuplicateLabels(ast) {
} );
check( ast );
}
module.exports = reportDuplicateLabels;

@ -1,28 +1,36 @@
"use strict";
let GrammarError = require("../../grammar-error");
let visitor = require("../visitor");
const GrammarError = require( "../../grammar-error" );
const visitor = require( "../visitor" );
// Checks that each rule is defined only once.
function reportDuplicateRules( ast ) {
let rules = {};
let check = visitor.build({
const rules = {};
const check = visitor.build( {
rule( node ) {
if (Object.prototype.hasOwnProperty.call(rules, node.name)) {
const name = node.name;
if ( Object.prototype.hasOwnProperty.call( rules, name ) ) {
const start = rules[ name ].start;
throw new GrammarError(
"Rule \"" + node.name + "\" is already defined "
+ "at line " + rules[node.name].start.line + ", "
+ "column " + rules[node.name].start.column + ".",
`Rule "${ name }" is already defined at line ${ start.line }, column ${ start.column }.`,
node.location
);
}
rules[ node.name ] = node.location;
}
} );
check( ast );
}
module.exports = reportDuplicateRules;

@ -1,8 +1,8 @@
"use strict";
let GrammarError = require("../../grammar-error");
let asts = require("../asts");
let visitor = require("../visitor");
const GrammarError = require( "../../grammar-error" );
const asts = require( "../asts" );
const visitor = require( "../visitor" );
// Reports left recursion in the grammar, which prevents infinite recursion in
// the generated parser.
@ -15,40 +15,51 @@ let visitor = require("../visitor");
// In general, if a rule reference can be reached without consuming any input,
// it can lead to left recursion.
function reportInfiniteRecursion( ast ) {
let visitedRules = [];
let check = visitor.build({
const visitedRules = [];
const check = visitor.build( {
rule( node ) {
visitedRules.push( node.name );
check( node.expression );
visitedRules.pop( node.name );
},
sequence( node ) {
node.elements.every( element => {
check( element );
return ! asts.alwaysConsumesOnSuccess( ast, element );
} );
},
rule_ref( node ) {
if ( visitedRules.indexOf( node.name ) !== -1 ) {
visitedRules.push( node.name );
const rulePath = visitedRules.join( " -> " );
throw new GrammarError(
"Possible infinite loop when parsing (left recursion: "
+ visitedRules.join(" -> ")
+ ").",
`Possible infinite loop when parsing (left recursion: ${ rulePath }).`,
node.location
);
}
check( asts.findRule( ast, node.name ) );
}
} );
check( ast );
}
module.exports = reportInfiniteRecursion;

@ -1,33 +1,43 @@
"use strict";
let GrammarError = require("../../grammar-error");
let asts = require("../asts");
let visitor = require("../visitor");
const GrammarError = require( "../../grammar-error" );
const asts = require( "../asts" );
const visitor = require( "../visitor" );
// Reports expressions that don't consume any input inside |*| or |+| in the
// grammar, which prevents infinite loops in the generated parser.
function reportInfiniteRepetition( ast ) {
let check = visitor.build({
const check = visitor.build( {
zero_or_more( node ) {
if ( ! asts.alwaysConsumesOnSuccess( ast, node.expression ) ) {
throw new GrammarError(
"Possible infinite loop when parsing (repetition used with an expression that may not consume any input).",
node.location
);
}
},
one_or_more( node ) {
if ( ! asts.alwaysConsumesOnSuccess( ast, node.expression ) ) {
throw new GrammarError(
"Possible infinite loop when parsing (repetition used with an expression that may not consume any input).",
node.location
);
}
}
} );
check( ast );
}
module.exports = reportInfiniteRepetition;

@ -1,32 +1,43 @@
"use strict";
let GrammarError = require("../../grammar-error");
let asts = require("../asts");
let visitor = require("../visitor");
const GrammarError = require( "../../grammar-error" );
const asts = require( "../asts" );
const visitor = require( "../visitor" );
// Checks that all referenced rules exist.
function reportUndefinedRules( ast, options ) {
let check = visitor.build({
const check = visitor.build( {
rule_ref( node ) {
if ( ! asts.findRule( ast, node.name ) ) {
throw new GrammarError(
"Rule \"" + node.name + "\" is not defined.",
`Rule "${ node.name }" is not defined.`,
node.location
);
}
}
} );
check( ast );
if ( options.allowedStartRules ) {
options.allowedStartRules.forEach( rule => {
if ( ! asts.findRule( ast, rule ) ) {
throw new GrammarError(
"Start rule \"" + rule + "\" is not defined.");
throw new GrammarError( `Start rule "${ rule }" is not defined.` );
}
} );
}
}
module.exports = reportUndefinedRules;

@ -1,10 +1,13 @@
"use strict";
// Simple AST node visitor builder.
let visitor = {
const visitor = {
build( functions ) {
function visit( node ) {
return functions[ node.type ].apply( null, arguments );
}
function visitNop() {
@ -12,32 +15,46 @@ let visitor = {
}
function visitExpression( node ) {
let extraArgs = Array.prototype.slice.call(arguments, 1);
visit.apply(null, [node.expression].concat(extraArgs));
const extraArgs = Array.prototype.slice.call( arguments, 1 );
visit( ...[ node.expression ].concat( extraArgs ) );
}
function visitChildren( property ) {
return function(node) {
let extraArgs = Array.prototype.slice.call(arguments, 1);
return function visitProperty( node ) {
const extraArgs = Array.prototype.slice.call( arguments, 1 );
node[ property ].forEach( child => {
visit.apply(null, [child].concat(extraArgs));
visit( ...[ child ].concat( extraArgs ) );
} );
};
}
const DEFAULT_FUNCTIONS = {
grammar( node ) {
let extraArgs = Array.prototype.slice.call(arguments, 1);
const extraArgs = Array.prototype.slice.call( arguments, 1 );
if ( node.initializer ) {
visit.apply(null, [node.initializer].concat(extraArgs));
visit( ...[ node.initializer ].concat( extraArgs ) );
}
node.rules.forEach( rule => {
visit.apply(null, [rule].concat(extraArgs));
visit( ...[ rule ].concat( extraArgs ) );
} );
},
initializer: visitNop,
@ -63,12 +80,17 @@ let visitor = {
};
Object.keys( DEFAULT_FUNCTIONS ).forEach( type => {
if ( ! Object.prototype.hasOwnProperty.call( functions, type ) ) {
functions[ type ] = DEFAULT_FUNCTIONS[ type ];
}
} );
return visit;
}
};

@ -2,15 +2,21 @@
// Thrown when the grammar contains an error.
class GrammarError {
constructor( message, location ) {
this.name = "GrammarError";
this.message = message;
this.location = location;
if ( typeof Error.captureStackTrace === "function" ) {
Error.captureStackTrace( this, GrammarError );
}
}
}
module.exports = GrammarError;

@ -1,10 +1,10 @@
"use strict";
let GrammarError = require("./grammar-error");
let compiler = require("./compiler");
let parser = require("./parser");
const GrammarError = require( "./grammar-error" );
const compiler = require( "./compiler" );
const parser = require( "./parser" );
let peg = {
const peg = {
// PEG.js version (uses semantic versioning).
VERSION: "0.10.0",
@ -22,32 +22,42 @@ let peg = {
// errors are detected during the generation and some may protrude to the
// generated parser and cause its malfunction.
generate( grammar, options ) {
options = options !== undefined ? options : {};
options = typeof options !== "undefined" ? options : {};
function convertPasses( passes ) {
let converted = {};
const converted = {};
Object.keys( passes ).forEach( stage => {
converted[ stage ] = Object.keys( passes[ stage ] )
.map( name => passes[ stage ][ name ] );
} );
return converted;
}
let plugins = "plugins" in options ? options.plugins : [];
let config = {
const plugins = "plugins" in options ? options.plugins : [];
const config = {
parser: peg.parser,
passes: convertPasses( peg.compiler.passes )
};
plugins.forEach(p => { p.use(config, options); });
plugins.forEach( p => {
p.use( config, options );
} );
return peg.compiler.compile(
config.parser.parse( grammar ),
config.passes,
options
);
}
};

@ -59,6 +59,7 @@
"devDependencies": {
"babel-preset-es2015": "6.24.1",
"babel-core": "6.26.0",
"dedent": "0.7.0",
"babelify": "8.0.0",
"browserify": "14.5.0",
"chai": "4.1.2",

@ -5,8 +5,10 @@ module.exports = {
"extends": "futagozaryuu/test",
"rules": {
"node/shebang": 0
"node/shebang": 0,
"func-names": 0,
"no-mixed-operators": 0,
}
},
};

@ -1,6 +1,6 @@
"use strict";
let benchmarks = [
const benchmarks = [
{
id: "json",
title: "JSON",

@ -2,15 +2,17 @@
/* eslint-env browser, jquery */
let Runner = require("./runner.js");
let benchmarks = require("./benchmarks.js");
const Runner = require( "./runner.js" );
const benchmarks = require( "./benchmarks.js" );
$( "#run" ).click( () => {
// Results Table Manipulation
let resultsTable = $("#results-table");
const resultsTable = $( "#results-table" );
function appendResult( klass, title, url, inputSize, parseTime ) {
const KB = 1024;
const MS_IN_S = 1000;
@ -41,6 +43,7 @@ $("#run").click(() => {
+ "</td>"
+ "</tr>"
);
}
// Main
@ -54,26 +57,30 @@ $("#run").click(() => {
//
// 2. To minimize random errors.
let runCount = parseInt($("#run-count").val(), 10);
let options = {
const runCount = parseInt( $( "#run-count" ).val(), 10 );
const options = {
cache: $( "#cache" ).is( ":checked" ),
optimize: $( "#optimize" ).val()
};
if ( isNaN( runCount ) || runCount <= 0 ) {
alert( "Number of runs must be a positive integer." );
return;
}
Runner.run( benchmarks, runCount, options, {
readFile( file ) {
return $.ajax( {
type: "GET",
url: "benchmark/" + file,
dataType: "text",
async: false
} ).responseText;
},
testStart() {
@ -81,6 +88,7 @@ $("#run").click(() => {
},
testFinish( benchmark, test, inputSize, parseTime ) {
appendResult(
"individual",
test.title,
@ -88,9 +96,11 @@ $("#run").click(() => {
inputSize,
parseTime
);
},
benchmarkStart( benchmark ) {
resultsTable.append(
"<tr class='heading'><th colspan='4'>"
+ "<a href='../../examples/" + benchmark.id + ".pegjs'>"
@ -98,9 +108,11 @@ $("#run").click(() => {
+ "</a>"
+ "</th></tr>"
);
},
benchmarkFinish( benchmark, inputSize, parseTime ) {
appendResult(
"benchmark-total",
benchmark.title + " total",
@ -108,16 +120,20 @@ $("#run").click(() => {
inputSize,
parseTime
);
},
start() {
$( "#run-count, #cache, #run" ).attr( "disabled", "disabled" );
resultsTable.show();
$( "#results-table tr" ).slice( 1 ).remove();
},
finish( inputSize, parseTime ) {
appendResult(
"total",
"Total",
@ -127,12 +143,11 @@ $("#run").click(() => {
);
$.scrollTo( "max", { axis: "y", duration: 500 } );
$( "#run-count, #cache, #run" ).removeAttr( "disabled" );
}
} );
});
$(document).ready(() => {
$("#run").focus();
} );
$( document ).ready( () => $( "#run" ).focus() );

@ -2,76 +2,95 @@
"use strict";
let Runner = require("./runner.js");
let benchmarks = require("./benchmarks.js");
let fs = require("fs");
const Runner = require( "./runner.js" );
const benchmarks = require( "./benchmarks.js" );
const fs = require( "fs" );
const path = require( "path" );
// Results Table Manipulation
function dup( text, count ) {
let result = "";
for (let i = 1; i <= count; i++) {
result += text;
}
for ( let i = 1; i <= count; i++ ) result += text;
return result;
}
function padLeft( text, length ) {
return dup( " ", length - text.length ) + text;
}
function padRight( text, length ) {
return text + dup( " ", length - text.length );
}
function center( text, length ) {
let padLength = (length - text.length) / 2;
const padLength = ( length - text.length ) / 2;
return dup( " ", Math.floor( padLength ) )
+ text
+ dup( " ", Math.ceil( padLength ) );
}
function writeTableHeader() {
console.log( "┌─────────────────────────────────────┬───────────┬────────────┬──────────────┐" );
console.log( "│ Test │ Inp. size │ Avg. time │ Avg. speed │" );
}
function writeHeading( heading ) {
console.log( "├─────────────────────────────────────┴───────────┴────────────┴──────────────┤" );
console.log( "│ " + center( heading, 75 ) + " │" );
console.log( "├─────────────────────────────────────┬───────────┬────────────┬──────────────┤" );
}
function writeResult( title, inputSize, parseTime ) {
const KB = 1024;
const MS_IN_S = 1000;
console.log("│ "
+ padRight(title, 35)
+ " │ "
+ padLeft((inputSize / KB).toFixed(2), 6)
+ " kB │ "
+ padLeft(parseTime.toFixed(2), 7)
+ " ms │ "
+ padLeft(((inputSize / KB) / (parseTime / MS_IN_S)).toFixed(2), 7)
+ " kB/s │"
console.log(
"│ " +
padRight( title, 35 ) +
" │ " +
padLeft( ( inputSize / KB ).toFixed( 2 ), 6 ) +
" kB │ " +
padLeft( parseTime.toFixed( 2 ), 7 ) +
" ms │ " +
padLeft( ( ( inputSize / KB ) / ( parseTime / MS_IN_S ) ).toFixed( 2 ), 7 ) +
" kB/s │"
);
}
function writeSeparator() {
console.log( "├─────────────────────────────────────┼───────────┼────────────┼──────────────┤" );
}
function writeTableFooter() {
console.log( "└─────────────────────────────────────┴───────────┴────────────┴──────────────┘" );
}
// Helpers
function printHelp() {
console.log( "Usage: run [options]" );
console.log( "" );
console.log( "Runs PEG.js benchmark suite." );
@ -81,52 +100,69 @@ function printHelp() {
console.log( " --cache make tested parsers cache results" );
console.log( " -o, --optimize <goal> select optimization for speed or size (default:" );
console.log( " speed)" );
}
function exitSuccess() {
process.exit( 0 );
}
function exitFailure() {
process.exit( 1 );
}
function abort( message ) {
console.error( message );
exitFailure();
}
// Arguments
let args = process.argv.slice(2); // Trim "node" and the script path.
const args = process.argv.slice( 2 ); // Trim "node" and the script path.
function isOption( arg ) {
return ( /^-/ ).test( arg );
}
function nextArg() {
args.shift();
}
// Main
let runCount = 10;
let options = {
const options = {
cache: false,
optimize: "speed"
};
while ( args.length > 0 && isOption( args[ 0 ] ) ) {
switch ( args[ 0 ] ) {
case "-n":
case "--run-count":
nextArg();
if ( args.length === 0 ) {
abort( "Missing parameter of the -n/--run-count option." );
}
runCount = parseInt( args[ 0 ], 10 );
if ( isNaN( runCount ) || runCount <= 0 ) {
abort( "Number of runs must be a positive integer." );
}
break;
@ -138,10 +174,14 @@ while (args.length > 0 && isOption(args[0])) {
case "--optimize":
nextArg();
if ( args.length === 0 ) {
abort( "Missing parameter of the -o/--optimize option." );
}
if ( args[ 0 ] !== "speed" && args[ 0 ] !== "size" ) {
abort( "Optimization goal must be either \"speed\" or \"size\"." );
}
options.optimize = args[ 0 ];
break;
@ -154,17 +194,23 @@ while (args.length > 0 && isOption(args[0])) {
default:
abort( "Unknown option: " + args[ 0 ] + "." );
}
nextArg();
}
if ( args.length > 0 ) {
abort( "No arguments are allowed." );
}
Runner.run( benchmarks, runCount, options, {
readFile( file ) {
return fs.readFileSync(__dirname + "/" + file, "utf8");
return fs.readFileSync( path.join( __dirname, file ), "utf8" );
},
testStart() {
@ -172,25 +218,35 @@ Runner.run(benchmarks, runCount, options, {
},
testFinish( benchmark, test, inputSize, parseTime ) {
writeResult( test.title, inputSize, parseTime );
},
benchmarkStart( benchmark ) {
writeHeading( benchmark.title );
},
benchmarkFinish( benchmark, inputSize, parseTime ) {
writeSeparator();
writeResult( benchmark.title + " total", inputSize, parseTime );
},
start() {
writeTableHeader();
},
finish( inputSize, parseTime ) {
writeSeparator();
writeResult( "Total", inputSize, parseTime );
writeTableFooter();
}
} );

@ -1,28 +1,37 @@
"use strict";
/* global setTimeout */
const peg = require( "../../lib/peg" );
let peg = require("../../lib/peg");
let Runner = {
const Runner = {
run( benchmarks, runCount, options, callbacks ) {
// Queue
let Q = {
const Q = {
functions: [],
add( f ) {
this.functions.push( f );
},
run() {
if ( this.functions.length > 0 ) {
this.functions.shift()();
// We can't use |arguments.callee| here because |this| would get
// messed-up in that case.
setTimeout(() => { Q.run(); }, 0);
setTimeout( () => {
Q.run();
}, 0 );
}
}
};
@ -39,17 +48,21 @@ let Runner = {
// The enqueued functions share state, which is all stored in the properties
// of the |state| object.
let state = {};
const state = {};
function initialize() {
callbacks.start();
state.totalInputSize = 0;
state.totalParseTime = 0;
}
function benchmarkInitializer( benchmark ) {
return function () {
callbacks.benchmarkStart( benchmark );
state.parser = peg.generate(
@ -58,32 +71,42 @@ let Runner = {
);
state.benchmarkInputSize = 0;
state.benchmarkParseTime = 0;
};
}
function testRunner( benchmark, test ) {
return function () {
callbacks.testStart( benchmark, test );
let input = callbacks.readFile(benchmark.id + "/" + test.file);
const input = callbacks.readFile( benchmark.id + "/" + test.file );
let parseTime = 0;
for ( let i = 0; i < runCount; i++ ) {
let t = (new Date()).getTime();
const t = ( new Date() ).getTime();
state.parser.parse( input );
parseTime += ( new Date() ).getTime() - t;
}
let averageParseTime = parseTime / runCount;
const averageParseTime = parseTime / runCount;
callbacks.testFinish( benchmark, test, input.length, averageParseTime );
state.benchmarkInputSize += input.length;
state.benchmarkParseTime += averageParseTime;
};
}
function benchmarkFinalizer( benchmark ) {
return function () {
callbacks.benchmarkFinish(
benchmark,
state.benchmarkInputSize,
@ -92,26 +115,35 @@ let Runner = {
state.totalInputSize += state.benchmarkInputSize;
state.totalParseTime += state.benchmarkParseTime;
};
}
function finalize() {
callbacks.finish( state.totalInputSize, state.totalParseTime );
}
// Main
Q.add( initialize );
benchmarks.forEach( benchmark => {
Q.add( benchmarkInitializer( benchmark ) );
benchmark.tests.forEach( test => {
Q.add( testRunner( benchmark, test ) );
} );
Q.add( benchmarkFinalizer( benchmark ) );
} );
Q.add( finalize );
Q.run();
}
};

@ -1,27 +1,25 @@
#!/usr/bin/env node
/* eslint camelcase:0, max-len:0, one-var:0 */
//
// Measures impact of a Git commit (or multiple commits) on generated parsers
// speed and size. Makes sense to use only on PEG.js git repository checkout.
//
/* eslint prefer-const: 0 */
"use strict";
let child_process = require("child_process");
let fs = require("fs");
let os = require("os");
let path = require("path");
let glob = require("glob");
const child_process = require( "child_process" );
const fs = require( "fs" );
const os = require( "os" );
const path = require( "path" );
const dedent = require( "dedent" );
const glob = require( "glob" );
// Current Working Directory
let cwd = path.join(__dirname, "..");
if (process.cwd() !== cwd) {
process.chdir(cwd);
}
const cwd = path.join( __dirname, ".." );
if ( process.cwd() !== cwd ) process.chdir( cwd );
// Execution Files
@ -29,89 +27,118 @@ let PEGJS_BIN = "bin/peg.js";
let BENCHMARK_BIN = "test/benchmark/run";
if ( ! fs.existsSync( PEGJS_BIN ) ) {
PEGJS_BIN = "bin/pegjs";
}
if ( ! fs.existsSync( BENCHMARK_BIN ) ) {
BENCHMARK_BIN = "benchmark/run";
}
// Utils
let print = console.log;
function echo( message ) {
process.stdout.write( message );
}
function exec( command ) {
return child_process.execSync( command, { encoding: "utf8" } );
}
function prepare( commit ) {
exec( `git checkout --quiet "${ commit }"` );
}
function runBenchmark() {
return parseFloat(
exec( "node " + BENCHMARK_BIN )
// Split by table seprator, reverse and return the total bytes per second
.split( "│" )
.reverse()[ 1 ]
// Trim the whitespaces and remove ` kB/s` from the end
.trim()
.slice( 0, -5 )
);
}
function measureSpeed() {
return ( runBenchmark() + runBenchmark() + runBenchmark() + runBenchmark() + runBenchmark() / 5 ).toFixed( 2 );
}
function measureSize() {
let size = 0;
glob.sync( "examples/*.pegjs" )
.forEach( example => {
exec( `node ${ PEGJS_BIN } ${ example }` );
example = example.slice( 0, -5 ) + "js";
size += fs.statSync( example ).size;
fs.unlinkSync( example );
} );
return size;
}
function difference( $1, $2 ) {
return ( ( $2 / $1 - 1 ) * 100 ).toFixed( 4 );
}
// Prepare
let argv = process.argv.slice(2);
const argv = process.argv.slice( 2 );
let commit_before, commit_after;
if ( argv.length === 1 ) {
commit_before = argv[ 0 ] + "~1";
commit_after = argv[ 0 ];
} else if ( argv.length === 2 ) {
commit_before = argv[ 0 ];
commit_after = argv[ 1 ];
} else {
print("Usage:");
print("");
print(" test/impact <commit>");
print(" test/impact <commit_before> <commit_after>");
print("");
print("Measures impact of a Git commit (or multiple commits) on generated parsers'");
print("speed and size. Makes sense to use only on PEG.js Git repository checkout.");
print("");
console.log( dedent`
Usage:
test/impact <commit>
test/impact <commit_before> <commit_after>
Measures impact of a Git commit (or multiple commits) on generated parser's
speed and size. Makes sense to use only on PEG.js Git repository checkout.
` );
process.exit( 1 );
}
// Measure
let branch = exec("git rev-parse --abbrev-ref HEAD");
const branch = exec( "git rev-parse --abbrev-ref HEAD" );
let speed1, size1, speed2, size2;
echo( `Measuring commit ${ commit_before }...` );
@ -130,7 +157,8 @@ echo(" OK" + os.EOL);
prepare( branch );
print(`
console.log( dedent`
test/impact ${ commit_before } ${ commit_after }
Speed impact
@ -147,4 +175,5 @@ test/impact ${commit_before} ${commit_after}
- Measured by /test/impact with Node.js ${ process.version }
- Your system: ${ os.type() } ${ os.release() } ${ os.arch() }.
` );

@ -2,13 +2,13 @@
"use strict";
let babelify = require("babelify");
let browserify = require("browserify");
let express = require("express");
let glob = require("glob");
let logger = require("morgan");
const babelify = require( "babelify" );
const browserify = require( "browserify" );
const express = require( "express" );
const glob = require( "glob" );
const logger = require( "morgan" );
let app = express();
const app = express();
app.use( logger( "dev" ) );
app.use( express.static( __dirname ) );
@ -16,6 +16,7 @@ app.use("/benchmark", express.static(`${__dirname}/../benchmark`));
app.use( "/examples", express.static( `${ __dirname }/../../examples` ) );
app.get( "/:dir/bundle.js", ( req, res ) => {
browserify( glob.sync(
`${ __dirname }/../${ req.params.dir }/**/*.js`
) )
@ -25,8 +26,11 @@ app.get("/:dir/bundle.js", (req, res) => {
} )
.bundle()
.pipe( res );
} );
app.listen( 8000, () => {
console.log( "Test server running at: http://localhost:8000/" );
} );

@ -1,64 +1,95 @@
"use strict";
/* global console */
const chai = require( "chai" );
const peg = require( "../../../lib/peg" );
const sinon = require( "sinon" );
let chai = require("chai");
let peg = require("../../../lib/peg");
let sinon = require("sinon");
let expect = chai.expect;
const expect = chai.expect;
describe( "generated parser API", function () {
describe( "parse", function () {
it( "parses input", function () {
let parser = peg.generate("start = 'a'");
const parser = peg.generate( "start = 'a'" );
expect( parser.parse( "a" ) ).to.equal( "a" );
} );
it( "throws an exception on syntax error", function () {
let parser = peg.generate("start = 'a'");
expect(() => { parser.parse("b"); }).to.throw();
const parser = peg.generate( "start = 'a'" );
expect( () => {
parser.parse( "b" );
} ).to.throw();
} );
describe( "start rule", function () {
let parser = peg.generate([
"a = 'x' { return 'a'; }",
"b = 'x' { return 'b'; }",
"c = 'x' { return 'c'; }"
].join("\n"), { allowedStartRules: ["b", "c"] });
const parser = peg.generate( `
a = 'x' { return 'a'; }
b = 'x' { return 'b'; }
c = 'x' { return 'c'; }
`, { allowedStartRules: [ "b", "c" ] } );
describe( "when |startRule| is not set", function () {
it( "starts parsing from the first allowed rule", function () {
expect( parser.parse( "x" ) ).to.equal( "b" );
} );
} );
describe( "when |startRule| is set to an allowed rule", function () {
it( "starts parsing from specified rule", function () {
expect( parser.parse( "x", { startRule: "b" } ) ).to.equal( "b" );
expect( parser.parse( "x", { startRule: "c" } ) ).to.equal( "c" );
} );
} );
describe( "when |startRule| is set to a disallowed start rule", function () {
it( "throws an exception", function () {
expect(() => { parser.parse("x", { startRule: "a" }); }).to.throw();
expect( () => {
parser.parse( "x", { startRule: "a" } );
} ).to.throw();
} );
} );
} );
describe( "tracing", function () {
let parser = peg.generate([
"start = a / b",
"a = 'a'",
"b = 'b'"
].join("\n"), { trace: true });
const parser = peg.generate( `
start = a / b
a = 'a'
b = 'b'
`, { trace: true } );
describe( "default tracer", function () {
it( "traces using console.log (if console is defined)", function () {
let messages = [
const messages = [
"1:1-1:1 rule.enter start",
"1:1-1:1 rule.enter a",
"1:1-1:1 rule.fail a",
@ -67,32 +98,41 @@ describe("generated parser API", function() {
"1:1-1:2 rule.match start"
];
if (typeof console === "object") {
sinon.stub(console, "log");
}
if ( typeof console === "object" ) sinon.stub( console, "log" );
try {
parser.parse( "b" );
if ( typeof console === "object" ) {
expect( console.log.callCount ).to.equal( messages.length );
messages.forEach( ( message, index ) => {
let call = console.log.getCall(index);
const call = console.log.getCall( index );
expect( call.calledWithExactly( message ) ).to.equal( true );
} );
}
} finally {
if (typeof console === "object") {
console.log.restore();
}
if ( typeof console === "object" ) console.log.restore();
}
} );
} );
describe( "custom tracers", function () {
describe( "trace", function () {
it( "receives tracing events", function () {
let events = [
const events = [
{
type: "rule.enter",
rule: "start",
@ -145,24 +185,32 @@ describe("generated parser API", function() {
}
];
let tracer = { trace: sinon.spy() };
const tracer = { trace: sinon.spy() };
parser.parse( "b", { tracer: tracer } );
expect( tracer.trace.callCount ).to.equal( events.length );
events.forEach( ( event, index ) => {
let call = tracer.trace.getCall(index);
const call = tracer.trace.getCall( index );
expect( call.calledWithExactly( event ) ).to.equal( true );
} );
} );
} );
} );
} );
it( "accepts custom options", function () {
let parser = peg.generate("start = 'a'");
const parser = peg.generate( "start = 'a'" );
parser.parse( "a", { foo: 42 } );
} );
} );
} );

@ -1,196 +1,305 @@
"use strict";
let chai = require("chai");
let peg = require("../../../lib/peg");
let sinon = require("sinon");
const chai = require( "chai" );
const peg = require( "../../../lib/peg" );
const sinon = require( "sinon" );
let expect = chai.expect;
const expect = chai.expect;
describe( "PEG.js API", function () {
describe( "generate", function () {
it( "generates a parser", function () {
let parser = peg.generate("start = 'a'");
const parser = peg.generate( "start = 'a'" );
expect( parser ).to.be.an( "object" );
expect( parser.parse( "a" ) ).to.equal( "a" );
} );
it( "throws an exception on syntax error", function () {
expect(() => { peg.generate("start = @"); }).to.throw();
expect( () => {
peg.generate( "start = @" );
} ).to.throw();
} );
it( "throws an exception on semantic error", function () {
expect(() => { peg.generate("start = undefined"); }).to.throw();
expect( () => {
peg.generate( "start = undefined" );
} ).to.throw();
} );
describe( "allowed start rules", function () {
let grammar = [
"a = 'x'",
"b = 'x'",
"c = 'x'"
].join("\n");
const grammar = `
a = 'x'
b = 'x'
c = 'x'
`;
it( "throws an error on missing rule", function () {
expect(() => peg.generate(grammar, {
allowedStartRules: ["missing"]
})).to.throw();
expect( () => {
peg.generate( grammar, { allowedStartRules: [ "missing" ] } );
} ).to.throw();
} );
// The |allowedStartRules| option is implemented separately for each
// optimization mode, so we need to test it in both.
describe( "when optimizing for parsing speed", function () {
describe( "when |allowedStartRules| is not set", function () {
it( "generated parser can start only from the first rule", function () {
let parser = peg.generate(grammar, { optimize: "speed" });
const parser = peg.generate( grammar, { optimize: "speed" } );
expect( parser.parse( "x", { startRule: "a" } ) ).to.equal( "x" );
expect(() => { parser.parse("x", { startRule: "b" }); }).to.throw();
expect(() => { parser.parse("x", { startRule: "c" }); }).to.throw();
expect( () => {
parser.parse( "x", { startRule: "b" } );
} ).to.throw();
expect( () => {
parser.parse( "x", { startRule: "c" } );
} ).to.throw();
} );
} );
describe( "when |allowedStartRules| is set", function () {
it( "generated parser can start only from specified rules", function () {
let parser = peg.generate(grammar, {
const parser = peg.generate( grammar, {
optimize: "speed",
allowedStartRules: [ "b", "c" ]
} );
expect(() => { parser.parse("x", { startRule: "a" }); }).to.throw();
expect( () => {
parser.parse( "x", { startRule: "a" } );
} ).to.throw();
expect( parser.parse( "x", { startRule: "b" } ) ).to.equal( "x" );
expect( parser.parse( "x", { startRule: "c" } ) ).to.equal( "x" );
} );
} );
} );
describe( "when optimizing for code size", function () {
describe( "when |allowedStartRules| is not set", function () {
it( "generated parser can start only from the first rule", function () {
let parser = peg.generate(grammar, { optimize: "size" });
const parser = peg.generate( grammar, { optimize: "size" } );
expect( parser.parse( "x", { startRule: "a" } ) ).to.equal( "x" );
expect(() => { parser.parse("x", { startRule: "b" }); }).to.throw();
expect(() => { parser.parse("x", { startRule: "c" }); }).to.throw();
expect( () => {
parser.parse( "x", { startRule: "b" } );
} ).to.throw();
expect( () => {
parser.parse( "x", { startRule: "c" } );
} ).to.throw();
} );
} );
describe( "when |allowedStartRules| is set", function () {
it( "generated parser can start only from specified rules", function () {
let parser = peg.generate(grammar, {
const parser = peg.generate( grammar, {
optimize: "size",
allowedStartRules: [ "b", "c" ]
} );
expect(() => { parser.parse("x", { startRule: "a" }); }).to.throw();
expect( () => {
parser.parse( "x", { startRule: "a" } );
} ).to.throw();
expect( parser.parse( "x", { startRule: "b" } ) ).to.equal( "x" );
expect( parser.parse( "x", { startRule: "c" } ) ).to.equal( "x" );
} );
} );
} );
} );
describe( "intermediate results caching", function () {
let grammar = [
"{ var n = 0; }",
"start = (a 'b') / (a 'c') { return n; }",
"a = 'a' { n++; }"
].join("\n");
const grammar = `
{ var n = 0; }
start = (a 'b') / (a 'c') { return n; }
a = 'a' { n++; }
`;
describe( "when |cache| is not set", function () {
it( "generated parser doesn't cache intermediate parse results", function () {
let parser = peg.generate(grammar);
const parser = peg.generate( grammar );
expect( parser.parse( "ac" ) ).to.equal( 2 );
} );
} );
describe( "when |cache| is set to |false|", function () {
it( "generated parser doesn't cache intermediate parse results", function () {
let parser = peg.generate(grammar, { cache: false });
const parser = peg.generate( grammar, { cache: false } );
expect( parser.parse( "ac" ) ).to.equal( 2 );
} );
} );
describe( "when |cache| is set to |true|", function () {
it( "generated parser caches intermediate parse results", function () {
let parser = peg.generate(grammar, { cache: true });
const parser = peg.generate( grammar, { cache: true } );
expect( parser.parse( "ac" ) ).to.equal( 1 );
} );
} );
} );
describe( "tracing", function () {
let grammar = "start = 'a'";
const grammar = "start = 'a'";
describe( "when |trace| is not set", function () {
it( "generated parser doesn't trace", function () {
let parser = peg.generate(grammar);
let tracer = { trace: sinon.spy() };
const parser = peg.generate( grammar );
const tracer = { trace: sinon.spy() };
parser.parse( "a", { tracer: tracer } );
expect( tracer.trace.called ).to.equal( false );
} );
} );
describe( "when |trace| is set to |false|", function () {
it( "generated parser doesn't trace", function () {
let parser = peg.generate(grammar, { trace: false });
let tracer = { trace: sinon.spy() };
const parser = peg.generate( grammar, { trace: false } );
const tracer = { trace: sinon.spy() };
parser.parse( "a", { tracer: tracer } );
expect( tracer.trace.called ).to.equal( false );
} );
} );
describe( "when |trace| is set to |true|", function () {
it( "generated parser traces", function () {
let parser = peg.generate(grammar, { trace: true });
let tracer = { trace: sinon.spy() };
const parser = peg.generate( grammar, { trace: true } );
const tracer = { trace: sinon.spy() };
parser.parse( "a", { tracer: tracer } );
expect( tracer.trace.called ).to.equal( true );
} );
} );
} );
// The |optimize| option isn't tested because there is no meaningful way to
// write the tests without turning this into a performance test.
describe( "output", function () {
let grammar = "start = 'a'";
const grammar = "start = 'a'";
describe( "when |output| is not set", function () {
it( "returns generated parser object", function () {
let parser = peg.generate(grammar);
const parser = peg.generate( grammar );
expect( parser ).to.be.an( "object" );
expect( parser.parse( "a" ) ).to.equal( "a" );
} );
} );
describe( "when |output| is set to |\"parser\"|", function () {
it( "returns generated parser object", function () {
let parser = peg.generate(grammar, { output: "parser" });
const parser = peg.generate( grammar, { output: "parser" } );
expect( parser ).to.be.an( "object" );
expect( parser.parse( "a" ) ).to.equal( "a" );
} );
} );
describe( "when |output| is set to |\"source\"|", function () {
it( "returns generated parser source code", function () {
let source = peg.generate(grammar, { output: "source" });
const source = peg.generate( grammar, { output: "source" } );
expect( source ).to.be.a( "string" );
expect( eval( source ).parse( "a" ) ).to.equal( "a" );
} );
} );
} );
// The |format|, |exportVars|, and |dependencies| options are not tested
@ -200,7 +309,11 @@ describe("PEG.js API", function() {
// The |plugins| option is tested in plugin API tests.
it( "accepts custom options", function () {
peg.generate( "start = 'a'", { foo: 42 } );
} );
} );
} );

@ -1,30 +1,48 @@
"use strict";
let chai = require("chai");
let peg = require("../../../lib/peg");
const chai = require( "chai" );
const peg = require( "../../../lib/peg" );
let expect = chai.expect;
const expect = chai.expect;
describe( "plugin API", function () {
describe( "use", function () {
let grammar = "start = 'a'";
const grammar = "start = 'a'";
it( "is called for each plugin", function () {
let pluginsUsed = [false, false, false];
let plugins = [
{ use() { pluginsUsed[0] = true; } },
{ use() { pluginsUsed[1] = true; } },
{ use() { pluginsUsed[2] = true; } }
const pluginsUsed = [ false, false, false ];
const plugins = [
{ use() {
pluginsUsed[ 0 ] = true;
} },
{ use() {
pluginsUsed[ 1 ] = true;
} },
{ use() {
pluginsUsed[ 2 ] = true;
} }
];
peg.generate( grammar, { plugins: plugins } );
expect( pluginsUsed ).to.deep.equal( [ true, true, true ] );
} );
it( "receives configuration", function () {
let plugin = {
const plugin = {
use( config ) {
expect( config ).to.be.an( "object" );
expect( config.parser ).to.be.an( "object" );
@ -34,95 +52,134 @@ describe("plugin API", function() {
expect( config.passes.check ).to.be.an( "array" );
config.passes.check.forEach( pass => {
expect( pass ).to.be.a( "function" );
} );
expect( config.passes.transform ).to.be.an( "array" );
config.passes.transform.forEach( pass => {
expect( pass ).to.be.a( "function" );
} );
expect( config.passes.generate ).to.be.an( "array" );
config.passes.generate.forEach( pass => {
expect( pass ).to.be.a( "function" );
} );
}
};
peg.generate( grammar, { plugins: [ plugin ] } );
} );
it( "receives options", function () {
let plugin = {
const generateOptions = {
plugins: [ {
use( config, options ) {
expect( options ).to.equal( generateOptions );
}
} ],
foo: 42
};
let generateOptions = { plugins: [plugin], foo: 42 };
peg.generate( grammar, generateOptions );
} );
it( "can replace parser", function () {
let plugin = {
const plugin = {
use( config ) {
let parser = peg.generate([
"start = .* {",
" return {",
" type: 'grammar',",
" rules: [",
" {",
" type: 'rule',",
" name: 'start',",
" expression: { type: 'literal', value: text(), ignoreCase: false }",
" }",
" ]",
" };",
"}"
].join("\n"));
config.parser = parser;
config.parser = peg.generate( `
start = .* {
return {
type: 'grammar',
rules: [{
type: 'rule',
name: 'start',
expression: {
type: 'literal',
value: text(),
ignoreCase: false
}
}]
};
}
` );
}
};
let parser = peg.generate("a", { plugins: [plugin] });
const parser = peg.generate( "a", { plugins: [ plugin ] } );
expect( parser.parse( "a" ) ).to.equal( "a" );
} );
it( "can change compiler passes", function () {
let plugin = {
const plugin = {
use( config ) {
function pass( ast ) {
ast.code = "({ parse: function() { return 42; } })";
}
config.passes.generate = [ pass ];
}
};
let parser = peg.generate(grammar, { plugins: [plugin] });
const parser = peg.generate( grammar, { plugins: [ plugin ] } );
expect( parser.parse( "a" ) ).to.equal( 42 );
} );
it( "can change options", function () {
let grammar = [
"a = 'x'",
"b = 'x'",
"c = 'x'"
].join("\n");
let plugin = {
const grammar = `
a = 'x'
b = 'x'
c = 'x'
`;
const plugin = {
use( config, options ) {
options.allowedStartRules = [ "b", "c" ];
}
};
let parser = peg.generate(grammar, {
const parser = peg.generate( grammar, {
allowedStartRules: [ "a" ],
plugins: [ plugin ]
} );
expect(() => { parser.parse("x", { startRule: "a" }); }).to.throw();
expect( () => {
parser.parse( "x", { startRule: "a" } );
} ).to.throw();
expect( parser.parse( "x", { startRule: "b" } ) ).to.equal( "x" );
expect( parser.parse( "x", { startRule: "c" } ) ).to.equal( "x" );
} );
} );
} );

File diff suppressed because it is too large Load Diff

@ -1,24 +1,33 @@
"use strict";
let chai = require("chai");
let helpers = require("./helpers");
let pass = require("../../../../../lib/compiler/passes/generate-bytecode");
const chai = require( "chai" );
const helpers = require( "./helpers" );
const pass = require( "../../../../../lib/compiler/passes/generate-bytecode" );
chai.use( helpers );
let expect = chai.expect;
const expect = chai.expect;
describe( "compiler pass |generateBytecode|", function () {
function bytecodeDetails( bytecode ) {
return {
rules: [ { bytecode: bytecode } ]
};
}
function constsDetails(consts) { return { consts: consts }; }
function constsDetails( consts ) {
return { consts: consts };
}
describe( "for grammar", function () {
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( [
"a = 'a'",
"b = 'b'",
@ -30,9 +39,11 @@ describe("compiler pass |generateBytecode|", function() {
{ bytecode: [ 18, 4, 2, 2, 22, 4, 23, 5 ] }
]
} );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( [
"a = 'a'",
"b = 'b'",
@ -45,21 +56,29 @@ describe("compiler pass |generateBytecode|", function() {
"\"c\"",
"peg$literalExpectation(\"c\", false)"
] ) );
} );
} );
describe( "for rule", function () {
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( "start = 'a'", bytecodeDetails( [
18, 0, 2, 2, 22, 0, 23, 1 // <expression>
] ) );
} );
} );
describe( "for named", function () {
let grammar = "start 'start' = 'a'";
const grammar = "start 'start' = 'a'";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
28, // SILENT_FAILS_ON
18, 1, 2, 2, 22, 1, 23, 2, // <expression>
@ -67,19 +86,25 @@ describe("compiler pass |generateBytecode|", function() {
14, 2, 0, // IF_ERROR
23, 0 // * FAIL
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [
"peg$otherExpectation(\"start\")",
"\"a\"",
"peg$literalExpectation(\"a\", false)"
] ) );
} );
} );
describe( "for choice", function () {
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( "start = 'a' / 'b' / 'c'", bytecodeDetails( [
18, 0, 2, 2, 22, 0, 23, 1, // <alternatives[0]>
14, 21, 0, // IF_ERROR
@ -89,14 +114,19 @@ describe("compiler pass |generateBytecode|", function() {
6, // * POP
18, 4, 2, 2, 22, 4, 23, 5 // <alternatives[2]>
] ) );
} );
} );
describe( "for action", function () {
describe( "without labels", function () {
let grammar = "start = 'a' { code }";
const grammar = "start = 'a' { code }";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
5, // PUSH_CURR_POS
18, 0, 2, 2, 22, 0, 23, 1, // <expression>
@ -105,21 +135,27 @@ describe("compiler pass |generateBytecode|", function() {
26, 2, 1, 0, // CALL
9 // NIP
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [
"\"a\"",
"peg$literalExpectation(\"a\", false)",
"function() { code }"
] ) );
} );
} );
describe( "with one label", function () {
let grammar = "start = a:'a' { code }";
const grammar = "start = a:'a' { code }";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
5, // PUSH_CURR_POS
18, 0, 2, 2, 22, 0, 23, 1, // <expression>
@ -128,21 +164,27 @@ describe("compiler pass |generateBytecode|", function() {
26, 2, 1, 1, 0, // CALL
9 // NIP
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [
"\"a\"",
"peg$literalExpectation(\"a\", false)",
"function(a) { code }"
] ) );
} );
} );
describe( "with multiple labels", function () {
let grammar = "start = a:'a' b:'b' c:'c' { code }";
const grammar = "start = a:'a' b:'b' c:'c' { code }";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
5, // PUSH_CURR_POS
18, 0, 2, 2, 22, 0, 23, 1, // <elements[0]>
@ -163,9 +205,11 @@ describe("compiler pass |generateBytecode|", function() {
7, // POP_CURR_POS
3 // PUSH_FAILED
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [
"\"a\"",
"peg$literalExpectation(\"a\", false)",
@ -175,14 +219,19 @@ describe("compiler pass |generateBytecode|", function() {
"peg$literalExpectation(\"c\", false)",
"function(a, b, c) { code }"
] ) );
} );
} );
} );
describe( "for sequence", function () {
let grammar = "start = 'a' 'b' 'c'";
const grammar = "start = 'a' 'b' 'c'";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
5, // PUSH_CURR_POS
18, 0, 2, 2, 22, 0, 23, 1, // <elements[0]>
@ -203,9 +252,11 @@ describe("compiler pass |generateBytecode|", function() {
7, // POP_CURR_POS
3 // PUSH_FAILED
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [
"\"a\"",
"peg$literalExpectation(\"a\", false)",
@ -214,19 +265,27 @@ describe("compiler pass |generateBytecode|", function() {
"\"c\"",
"peg$literalExpectation(\"c\", false)"
] ) );
} );
} );
describe( "for labeled", function () {
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( "start = a:'a'", bytecodeDetails( [
18, 0, 2, 2, 22, 0, 23, 1 // <expression>
] ) );
} );
} );
describe( "for text", function () {
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( "start = $'a'", bytecodeDetails( [
5, // PUSH_CURR_POS
18, 0, 2, 2, 22, 0, 23, 1, // <expression>
@ -235,13 +294,17 @@ describe("compiler pass |generateBytecode|", function() {
12, // TEXT
9 // * NIP
] ) );
} );
} );
describe( "for simple_and", function () {
let grammar = "start = &'a'";
const grammar = "start = &'a'";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
5, // PUSH_CURR_POS
28, // SILENT_FAILS_ON
@ -255,20 +318,26 @@ describe("compiler pass |generateBytecode|", function() {
6, // POP
3 // PUSH_FAILED
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [
"\"a\"",
"peg$literalExpectation(\"a\", false)"
] ) );
} );
} );
describe( "for simple_not", function () {
let grammar = "start = !'a'";
const grammar = "start = !'a'";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
5, // PUSH_CURR_POS
28, // SILENT_FAILS_ON
@ -282,40 +351,52 @@ describe("compiler pass |generateBytecode|", function() {
7, // POP_CURR_POS
3 // PUSH_FAILED
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [
"\"a\"",
"peg$literalExpectation(\"a\", false)"
] ) );
} );
} );
describe( "for optional", function () {
let grammar = "start = 'a'?";
const grammar = "start = 'a'?";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
18, 0, 2, 2, 22, 0, 23, 1, // <expression>
14, 2, 0, // IF_ERROR
6, // * POP
2 // PUSH_NULL
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [
"\"a\"",
"peg$literalExpectation(\"a\", false)"
] ) );
} );
} );
describe( "for zero_or_more", function () {
let grammar = "start = 'a'*";
const grammar = "start = 'a'*";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
4, // PUSH_EMPTY_ARRAY
18, 0, 2, 2, 22, 0, 23, 1, // <expression>
@ -324,20 +405,26 @@ describe("compiler pass |generateBytecode|", function() {
18, 0, 2, 2, 22, 0, 23, 1, // <expression>
6 // POP
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [
"\"a\"",
"peg$literalExpectation(\"a\", false)"
] ) );
} );
} );
describe( "for one_or_more", function () {
let grammar = "start = 'a'+";
const grammar = "start = 'a'+";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
4, // PUSH_EMPTY_ARRAY
18, 0, 2, 2, 22, 0, 23, 1, // <expression>
@ -350,29 +437,40 @@ describe("compiler pass |generateBytecode|", function() {
6, // POP
3 // PUSH_FAILED
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [
"\"a\"",
"peg$literalExpectation(\"a\", false)"
] ) );
} );
} );
describe( "for group", function () {
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( "start = ('a')", bytecodeDetails( [
18, 0, 2, 2, 22, 0, 23, 1 // <expression>
] ) );
} );
} );
describe( "for semantic_and", function () {
describe( "without labels", function () {
let grammar = "start = &{ code }";
const grammar = "start = &{ code }";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
25, // UPDATE_SAVED_POS
26, 0, 0, 0, // CALL
@ -382,20 +480,26 @@ describe("compiler pass |generateBytecode|", function() {
6, // * POP
3 // PUSH_FAILED
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST(
grammar,
constsDetails( [ "function() { code }" ] )
);
} );
} );
describe( "with labels", function () {
let grammar = "start = a:'a' b:'b' c:'c' &{ code }";
const grammar = "start = a:'a' b:'b' c:'c' &{ code }";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
5, // PUSH_CURR_POS
18, 0, 2, 2, 22, 0, 23, 1, // <elements[0]>
@ -427,9 +531,11 @@ describe("compiler pass |generateBytecode|", function() {
7, // POP_CURR_POS
3 // PUSH_FAILED
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [
"\"a\"",
"peg$literalExpectation(\"a\", false)",
@ -439,15 +545,21 @@ describe("compiler pass |generateBytecode|", function() {
"peg$literalExpectation(\"c\", false)",
"function(a, b, c) { code }"
] ) );
} );
} );
} );
describe( "for semantic_not", function () {
describe( "without labels", function () {
let grammar = "start = !{ code }";
const grammar = "start = !{ code }";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
25, // UPDATE_SAVED_POS
26, 0, 0, 0, // CALL
@ -457,20 +569,26 @@ describe("compiler pass |generateBytecode|", function() {
6, // * POP
1 // PUSH_UNDEFINED
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST(
grammar,
constsDetails( [ "function() { code }" ] )
);
} );
} );
describe( "with labels", function () {
let grammar = "start = a:'a' b:'b' c:'c' !{ code }";
const grammar = "start = a:'a' b:'b' c:'c' !{ code }";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
5, // PUSH_CURR_POS
18, 0, 2, 2, 22, 0, 23, 1, // <elements[0]>
@ -502,9 +620,11 @@ describe("compiler pass |generateBytecode|", function() {
7, // POP_CURR_POS
3 // PUSH_FAILED
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [
"\"a\"",
"peg$literalExpectation(\"a\", false)",
@ -514,12 +634,17 @@ describe("compiler pass |generateBytecode|", function() {
"peg$literalExpectation(\"c\", false)",
"function(a, b, c) { code }"
] ) );
} );
} );
} );
describe( "for rule_ref", function () {
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( [
"start = other",
"other = 'other'"
@ -531,125 +656,174 @@ describe("compiler pass |generateBytecode|", function() {
{ }
]
} );
} );
} );
describe( "for literal", function () {
describe( "empty", function () {
let grammar = "start = ''";
const grammar = "start = ''";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
0, 0 // PUSH
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [ "\"\"" ] ) );
} );
} );
describe( "non-empty case-sensitive", function () {
let grammar = "start = 'a'";
const grammar = "start = 'a'";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
18, 0, 2, 2, // MATCH_STRING
22, 0, // * ACCEPT_STRING
23, 1 // * FAIL
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [
"\"a\"",
"peg$literalExpectation(\"a\", false)"
] ) );
} );
} );
describe( "non-empty case-insensitive", function () {
let grammar = "start = 'A'i";
const grammar = "start = 'A'i";
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
19, 0, 2, 2, // MATCH_STRING_IC
21, 1, // * ACCEPT_N
23, 1 // * FAIL
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST( grammar, constsDetails( [
"\"a\"",
"peg$literalExpectation(\"A\", true)"
] ) );
} );
} );
} );
describe( "for class", function () {
it( "generates correct bytecode", function () {
expect( pass ).to.changeAST( "start = [a]", bytecodeDetails( [
20, 0, 2, 2, // MATCH_REGEXP
21, 1, // * ACCEPT_N
23, 1 // * FAIL
] ) );
} );
describe( "non-inverted case-sensitive", function () {
it( "defines correct constants", function () {
expect( pass ).to.changeAST( "start = [a]", constsDetails( [
"/^[a]/",
"peg$classExpectation([\"a\"], false, false)"
] ) );
} );
} );
describe( "inverted case-sensitive", function () {
it( "defines correct constants", function () {
expect( pass ).to.changeAST( "start = [^a]", constsDetails( [
"/^[^a]/",
"peg$classExpectation([\"a\"], true, false)"
] ) );
} );
} );
describe( "non-inverted case-insensitive", function () {
it( "defines correct constants", function () {
expect( pass ).to.changeAST( "start = [a]i", constsDetails( [
"/^[a]/i",
"peg$classExpectation([\"a\"], false, true)"
] ) );
} );
} );
describe( "complex", function () {
it( "defines correct constants", function () {
expect( pass ).to.changeAST( "start = [ab-def-hij-l]", constsDetails( [
"/^[ab-def-hij-l]/",
"peg$classExpectation([\"a\", [\"b\", \"d\"], \"e\", [\"f\", \"h\"], \"i\", [\"j\", \"l\"]], false, false)"
] ) );
} );
} );
} );
describe( "for any", function () {
let grammar = "start = .";
const grammar = "start = .";
it( "generates bytecode", function () {
expect( pass ).to.changeAST( grammar, bytecodeDetails( [
17, 2, 2, // MATCH_ANY
21, 1, // * ACCEPT_N
23, 0 // * FAIL
] ) );
} );
it( "defines correct constants", function () {
expect( pass ).to.changeAST(
grammar,
constsDetails( [ "peg$anyExpectation()" ] )
);
} );
} );
} );

@ -1,50 +1,59 @@
"use strict";
let parser = require("../../../../../lib/parser");
const parser = require( "../../../../../lib/parser" );
module.exports = function ( chai, utils ) {
let Assertion = chai.Assertion;
const Assertion = chai.Assertion;
Assertion.addMethod( "changeAST", function ( grammar, props, options ) {
options = options !== undefined ? options : {};
options = typeof options !== "undefined" ? options : {};
function matchProps( value, props ) {
function isArray(value) {
return Object.prototype.toString.apply(value) === "[object Array]";
}
function isObject( value ) {
return value !== null && typeof value === "object";
}
if (isArray(props)) {
if (!isArray(value)) { return false; }
if ( Array.isArray( props ) ) {
if ( ! Array.isArray( value ) ) return false;
if ( value.length !== props.length ) return false;
if (value.length !== props.length) { return false; }
for ( let i = 0; i < props.length; i++ ) {
if (!matchProps(value[i], props[i])) { return false; }
if ( ! matchProps( value[ i ], props[ i ] ) ) return false;
}
return true;
} else if ( isObject( props ) ) {
if (!isObject(value)) { return false; }
let keys = Object.keys(props);
if ( ! isObject( value ) ) return false;
const keys = Object.keys( props );
for ( let i = 0; i < keys.length; i++ ) {
let key = keys[i];
if (!(key in value)) { return false; }
const key = keys[ i ];
if ( ! ( key in value ) ) return false;
if ( ! matchProps( value[ key ], props[ key ] ) ) return false;
if (!matchProps(value[key], props[key])) { return false; }
}
return true;
} else {
return value === props;
}
return value === props;
}
let ast = parser.parse(grammar);
const ast = parser.parse( grammar );
utils.flag( this, "object" )( ast, options );
@ -55,21 +64,27 @@ module.exports = function(chai, utils) {
props,
ast
);
} );
Assertion.addMethod( "reportError", function ( grammar, props, options ) {
options = options !== undefined ? options : {};
let ast = parser.parse(grammar);
options = typeof options !== "undefined" ? options : {};
const ast = parser.parse( grammar );
let passed, result;
try {
utils.flag( this, "object" )( ast, options );
passed = true;
} catch ( e ) {
result = e;
passed = false;
}
this.assert(
@ -80,11 +95,18 @@ module.exports = function(chai, utils) {
result
);
if (!passed && props !== undefined) {
if ( ! passed && typeof props !== "undefined" ) {
Object.keys( props ).forEach( key => {
new Assertion(result).to.have.property(key)
new Assertion( result )
.to.have.property( key )
.that.is.deep.equal( props[ key ] );
} );
}
} );
};

@ -1,16 +1,19 @@
"use strict";
let chai = require("chai");
let helpers = require("./helpers");
let pass = require("../../../../../lib/compiler/passes/remove-proxy-rules");
const chai = require( "chai" );
const helpers = require( "./helpers" );
const pass = require( "../../../../../lib/compiler/passes/remove-proxy-rules" );
chai.use( helpers );
let expect = chai.expect;
const expect = chai.expect;
describe( "compiler pass |removeProxyRules|", function () {
describe( "when a proxy rule isn't listed in |allowedStartRules|", function () {
it( "updates references and removes it", function () {
expect( pass ).to.changeAST(
[
"start = proxy",
@ -28,11 +31,15 @@ describe("compiler pass |removeProxyRules|", function() {
},
{ allowedStartRules: [ "start" ] }
);
} );
} );
describe( "when a proxy rule is listed in |allowedStartRules|", function () {
it( "updates references but doesn't remove it", function () {
expect( pass ).to.changeAST(
[
"start = proxy",
@ -54,6 +61,9 @@ describe("compiler pass |removeProxyRules|", function() {
},
{ allowedStartRules: [ "start", "proxy" ] }
);
} );
} );
} );

@ -1,16 +1,19 @@
"use strict";
let chai = require("chai");
let helpers = require("./helpers");
let pass = require("../../../../../lib/compiler/passes/report-duplicate-labels");
const chai = require( "chai" );
const helpers = require( "./helpers" );
const pass = require( "../../../../../lib/compiler/passes/report-duplicate-labels" );
chai.use( helpers );
let expect = chai.expect;
const expect = chai.expect;
describe( "compiler pass |reportDuplicateLabels|", function () {
describe( "in a sequence", function () {
it( "reports labels duplicate with labels of preceding elements", function () {
expect( pass ).to.reportError( "start = a:'a' a:'a'", {
message: "Label \"a\" is already defined at line 1, column 9.",
location: {
@ -18,9 +21,11 @@ describe("compiler pass |reportDuplicateLabels|", function() {
end: { offset: 19, line: 1, column: 20 }
}
} );
} );
it( "doesn't report labels duplicate with labels in subexpressions", function () {
expect( pass ).to.not.reportError( "start = ('a' / a:'a' / 'a') a:'a'" );
expect( pass ).to.not.reportError( "start = (a:'a' { }) a:'a'" );
expect( pass ).to.not.reportError( "start = ('a' a:'a' 'a') a:'a'" );
@ -32,17 +37,25 @@ describe("compiler pass |reportDuplicateLabels|", function() {
expect( pass ).to.not.reportError( "start = (a:'a')* a:'a'" );
expect( pass ).to.not.reportError( "start = (a:'a')+ a:'a'" );
expect( pass ).to.not.reportError( "start = (a:'a') a:'a'" );
} );
} );
describe( "in a choice", function () {
it( "doesn't report labels duplicate with labels of preceding alternatives", function () {
expect( pass ).to.not.reportError( "start = a:'a' / a:'a'" );
} );
} );
describe( "in outer sequence", function () {
it( "reports labels duplicate with labels of preceding elements", function () {
expect( pass ).to.reportError( "start = a:'a' (a:'a')", {
message: "Label \"a\" is already defined at line 1, column 9.",
location: {
@ -50,14 +63,21 @@ describe("compiler pass |reportDuplicateLabels|", function() {
end: { offset: 20, line: 1, column: 21 }
}
} );
} );
it( "doesn't report labels duplicate with the label of the current element", function () {
expect( pass ).to.not.reportError( "start = a:(a:'a')" );
} );
it( "doesn't report labels duplicate with labels of following elements", function () {
expect( pass ).to.not.reportError( "start = (a:'a') a:'a'" );
} );
} );
} );

@ -1,15 +1,17 @@
"use strict";
let chai = require("chai");
let helpers = require("./helpers");
let pass = require("../../../../../lib/compiler/passes/report-duplicate-rules");
const chai = require( "chai" );
const helpers = require( "./helpers" );
const pass = require( "../../../../../lib/compiler/passes/report-duplicate-rules" );
chai.use( helpers );
let expect = chai.expect;
const expect = chai.expect;
describe( "compiler pass |reportDuplicateRules|", function () {
it( "reports duplicate rules", function () {
expect( pass ).to.reportError( [
"start = 'a'",
"start = 'b'"
@ -20,5 +22,7 @@ describe("compiler pass |reportDuplicateRules|", function() {
end: { offset: 23, line: 2, column: 12 }
}
} );
} );
} );

@ -1,15 +1,17 @@
"use strict";
let chai = require("chai");
let helpers = require("./helpers");
let pass = require("../../../../../lib/compiler/passes/report-infinite-recursion");
const chai = require( "chai" );
const helpers = require( "./helpers" );
const pass = require( "../../../../../lib/compiler/passes/report-infinite-recursion" );
chai.use( helpers );
let expect = chai.expect;
const expect = chai.expect;
describe( "compiler pass |reportInfiniteRecursion|", function () {
it( "reports direct left recursion", function () {
expect( pass ).to.reportError( "start = start", {
message: "Possible infinite loop when parsing (left recursion: start -> start).",
location: {
@ -17,9 +19,11 @@ describe("compiler pass |reportInfiniteRecursion|", function() {
end: { offset: 13, line: 1, column: 14 }
}
} );
} );
it( "reports indirect left recursion", function () {
expect( pass ).to.reportError( [
"start = stop",
"stop = start"
@ -30,25 +34,34 @@ describe("compiler pass |reportInfiniteRecursion|", function() {
end: { offset: 25, line: 2, column: 13 }
}
} );
} );
describe( "in sequences", function () {
it( "reports left recursion if all preceding elements match empty string", function () {
expect( pass ).to.reportError( "start = '' '' '' start" );
} );
it( "doesn't report left recursion if some preceding element doesn't match empty string", function () {
expect( pass ).to.not.reportError( "start = 'a' '' '' start" );
expect( pass ).to.not.reportError( "start = '' 'a' '' start" );
expect( pass ).to.not.reportError( "start = '' '' 'a' start" );
} );
// Regression test for #359.
it( "reports left recursion when rule reference is wrapped in an expression", function () {
expect( pass ).to.reportError( "start = '' start?" );
} );
it( "computes expressions that always consume input on success correctly", function () {
expect( pass ).to.reportError( [
"start = a start",
"a 'a' = ''"
@ -114,6 +127,9 @@ describe("compiler pass |reportInfiniteRecursion|", function() {
expect( pass ).to.not.reportError( "start = [a-d] start" );
expect( pass ).to.not.reportError( "start = . start" );
} );
} );
} );

@ -1,15 +1,17 @@
"use strict";
let chai = require("chai");
let helpers = require("./helpers");
let pass = require("../../../../../lib/compiler/passes/report-infinite-repetition");
const chai = require( "chai" );
const helpers = require( "./helpers" );
const pass = require( "../../../../../lib/compiler/passes/report-infinite-repetition" );
chai.use( helpers );
let expect = chai.expect;
const expect = chai.expect;
describe( "compiler pass |reportInfiniteRepetition|", function () {
it( "reports infinite loops for zero_or_more", function () {
expect( pass ).to.reportError( "start = ('')*", {
message: "Possible infinite loop when parsing (repetition used with an expression that may not consume any input).",
location: {
@ -17,9 +19,11 @@ describe("compiler pass |reportInfiniteRepetition|", function() {
end: { offset: 13, line: 1, column: 14 }
}
} );
} );
it( "reports infinite loops for one_or_more", function () {
expect( pass ).to.reportError( "start = ('')+", {
message: "Possible infinite loop when parsing (repetition used with an expression that may not consume any input).",
location: {
@ -27,9 +31,11 @@ describe("compiler pass |reportInfiniteRepetition|", function() {
end: { offset: 13, line: 1, column: 14 }
}
} );
} );
it( "computes expressions that always consume input on success correctly", function () {
expect( pass ).to.reportError( [
"start = a*",
"a 'a' = ''"
@ -95,5 +101,7 @@ describe("compiler pass |reportInfiniteRepetition|", function() {
expect( pass ).to.not.reportError( "start = [a-d]*" );
expect( pass ).to.not.reportError( "start = .*" );
} );
} );

@ -1,15 +1,17 @@
"use strict";
let chai = require("chai");
let helpers = require("./helpers");
let pass = require("../../../../../lib/compiler/passes/report-undefined-rules");
const chai = require( "chai" );
const helpers = require( "./helpers" );
const pass = require( "../../../../../lib/compiler/passes/report-undefined-rules" );
chai.use( helpers );
let expect = chai.expect;
const expect = chai.expect;
describe( "compiler pass |reportUndefinedRules|", function () {
it( "reports undefined rules", function () {
expect( pass ).to.reportError( "start = undefined", {
message: "Rule \"undefined\" is not defined.",
location: {
@ -17,13 +19,17 @@ describe("compiler pass |reportUndefinedRules|", function() {
end: { offset: 17, line: 1, column: 18 }
}
} );
} );
it( "checks allowedStartRules", function () {
expect( pass ).to.reportError( "start = 'a'", {
message: "Start rule \"missing\" is not defined."
}, {
allowedStartRules: [ "missing" ]
} );
} );
} );

@ -1,147 +1,179 @@
"use strict";
let chai = require("chai");
let parser = require("../../../lib/parser");
const chai = require( "chai" );
const parser = require( "../../../lib/parser" );
let expect = chai.expect;
const expect = chai.expect;
// better diagnostics for deep eq failure
chai.config.truncateThreshold = 0;
describe( "PEG.js grammar parser", function () {
let literalAbcd = { type: "literal", value: "abcd", ignoreCase: false };
let literalEfgh = { type: "literal", value: "efgh", ignoreCase: false };
let literalIjkl = { type: "literal", value: "ijkl", ignoreCase: false };
let literalMnop = { type: "literal", value: "mnop", ignoreCase: false };
let semanticAnd = { type: "semantic_and", code: " code " };
let semanticNot = { type: "semantic_not", code: " code " };
let optional = { type: "optional", expression: literalAbcd };
let zeroOrMore = { type: "zero_or_more", expression: literalAbcd };
let oneOrMore = { type: "one_or_more", expression: literalAbcd };
let textOptional = { type: "text", expression: optional };
let simpleNotAbcd = { type: "simple_not", expression: literalAbcd };
let simpleAndOptional = { type: "simple_and", expression: optional };
let simpleNotOptional = { type: "simple_not", expression: optional };
let labeledAbcd = { type: "labeled", label: "a", expression: literalAbcd };
let labeledEfgh = { type: "labeled", label: "b", expression: literalEfgh };
let labeledIjkl = { type: "labeled", label: "c", expression: literalIjkl };
let labeledMnop = { type: "labeled", label: "d", expression: literalMnop };
let labeledSimpleNot = { type: "labeled", label: "a", expression: simpleNotAbcd };
let sequence = {
const literalAbcd = { type: "literal", value: "abcd", ignoreCase: false };
const literalEfgh = { type: "literal", value: "efgh", ignoreCase: false };
const literalIjkl = { type: "literal", value: "ijkl", ignoreCase: false };
const literalMnop = { type: "literal", value: "mnop", ignoreCase: false };
const semanticAnd = { type: "semantic_and", code: " code " };
const semanticNot = { type: "semantic_not", code: " code " };
const optional = { type: "optional", expression: literalAbcd };
const zeroOrMore = { type: "zero_or_more", expression: literalAbcd };
const oneOrMore = { type: "one_or_more", expression: literalAbcd };
const textOptional = { type: "text", expression: optional };
const simpleNotAbcd = { type: "simple_not", expression: literalAbcd };
const simpleAndOptional = { type: "simple_and", expression: optional };
const simpleNotOptional = { type: "simple_not", expression: optional };
const labeledAbcd = { type: "labeled", label: "a", expression: literalAbcd };
const labeledEfgh = { type: "labeled", label: "b", expression: literalEfgh };
const labeledIjkl = { type: "labeled", label: "c", expression: literalIjkl };
const labeledMnop = { type: "labeled", label: "d", expression: literalMnop };
const labeledSimpleNot = { type: "labeled", label: "a", expression: simpleNotAbcd };
const sequence = {
type: "sequence",
elements: [ literalAbcd, literalEfgh, literalIjkl ]
};
let sequence2 = {
const sequence2 = {
type: "sequence",
elements: [ labeledAbcd, labeledEfgh ]
};
let sequence4 = {
const sequence4 = {
type: "sequence",
elements: [ labeledAbcd, labeledEfgh, labeledIjkl, labeledMnop ]
};
let groupLabeled = { type: "group", expression: labeledAbcd };
let groupSequence = { type: "group", expression: sequence };
let actionAbcd = { type: "action", expression: literalAbcd, code: " code " };
let actionEfgh = { type: "action", expression: literalEfgh, code: " code " };
let actionIjkl = { type: "action", expression: literalIjkl, code: " code " };
let actionMnop = { type: "action", expression: literalMnop, code: " code " };
let actionSequence = { type: "action", expression: sequence, code: " code " };
let choice = {
const groupLabeled = { type: "group", expression: labeledAbcd };
const groupSequence = { type: "group", expression: sequence };
const actionAbcd = { type: "action", expression: literalAbcd, code: " code " };
const actionEfgh = { type: "action", expression: literalEfgh, code: " code " };
const actionIjkl = { type: "action", expression: literalIjkl, code: " code " };
const actionMnop = { type: "action", expression: literalMnop, code: " code " };
const actionSequence = { type: "action", expression: sequence, code: " code " };
const choice = {
type: "choice",
alternatives: [ literalAbcd, literalEfgh, literalIjkl ]
};
let choice2 = {
const choice2 = {
type: "choice",
alternatives: [ actionAbcd, actionEfgh ]
};
let choice4 = {
const choice4 = {
type: "choice",
alternatives: [ actionAbcd, actionEfgh, actionIjkl, actionMnop ]
};
let named = { type: "named", name: "start rule", expression: literalAbcd };
let ruleA = { type: "rule", name: "a", expression: literalAbcd };
let ruleB = { type: "rule", name: "b", expression: literalEfgh };
let ruleC = { type: "rule", name: "c", expression: literalIjkl };
let ruleStart = { type: "rule", name: "start", expression: literalAbcd };
let initializer = { type: "initializer", code: " code " };
const named = { type: "named", name: "start rule", expression: literalAbcd };
const ruleA = { type: "rule", name: "a", expression: literalAbcd };
const ruleB = { type: "rule", name: "b", expression: literalEfgh };
const ruleC = { type: "rule", name: "c", expression: literalIjkl };
const ruleStart = { type: "rule", name: "start", expression: literalAbcd };
const initializer = { type: "initializer", code: " code " };
function oneRuleGrammar( expression ) {
return {
type: "grammar",
initializer: null,
rules: [ { type: "rule", name: "start", expression: expression } ]
};
}
function actionGrammar( code ) {
return oneRuleGrammar(
{ type: "action", expression: literalAbcd, code: code }
);
}
function literalGrammar( value, ignoreCase ) {
return oneRuleGrammar(
{ type: "literal", value: value, ignoreCase: ignoreCase }
);
}
function classGrammar( parts, inverted, ignoreCase ) {
return oneRuleGrammar( {
type: "class",
parts: parts,
inverted: inverted,
ignoreCase: ignoreCase
} );
}
function anyGrammar() {
return oneRuleGrammar( { type: "any" } );
}
function ruleRefGrammar( name ) {
return oneRuleGrammar( { type: "rule_ref", name: name } );
}
let trivialGrammar = literalGrammar("abcd", false);
let twoRuleGrammar = {
const trivialGrammar = literalGrammar( "abcd", false );
const twoRuleGrammar = {
type: "grammar",
initializer: null,
rules: [ ruleA, ruleB ]
};
let stripLocation = (function() {
const stripLocation = ( function () {
let strip;
function buildVisitor( functions ) {
return function ( node ) {
return functions[ node.type ].apply( null, arguments );
};
}
function stripLeaf( node ) {
delete node.location;
}
function stripExpression( node ) {
delete node.location;
strip( node.expression );
}
function stripChildren( property ) {
return function ( node ) {
delete node.location;
node[ property ].forEach( strip );
};
}
let strip = buildVisitor({
strip = buildVisitor( {
grammar( node ) {
delete node.location;
if ( node.initializer ) {
strip( node.initializer );
}
node.rules.forEach( strip );
},
initializer: stripLeaf,
@ -167,13 +199,16 @@ describe("PEG.js grammar parser", function() {
} );
return strip;
} )();
function helpers( chai, utils ) {
let Assertion = chai.Assertion;
const Assertion = chai.Assertion;
Assertion.addMethod( "parseAs", function ( expected ) {
let result = parser.parse(utils.flag(this, "object"));
const result = parser.parse( utils.flag( this, "object" ) );
stripLocation( result );
@ -185,21 +220,29 @@ describe("PEG.js grammar parser", function() {
result,
! utils.flag( this, "negate" )
);
} );
Assertion.addMethod( "failToParse", function ( props ) {
let passed, result;
try {
result = parser.parse( utils.flag( this, "object" ) );
passed = true;
} catch ( e ) {
result = e;
passed = false;
}
if ( passed ) {
stripLocation( result );
}
this.assert(
@ -210,26 +253,36 @@ describe("PEG.js grammar parser", function() {
result
);
if (!passed && props !== undefined) {
if ( ! passed && typeof props !== "undefined" ) {
Object.keys( props ).forEach( key => {
new Assertion(result).to.have.property(key)
new Assertion( result )
.to.have.property( key )
.that.is.deep.equal( props[ key ] );
} );
}
} );
}
// Helper activation needs to put inside a |beforeEach| block because the
// helpers conflict with the ones in
// test/behavior/generated-parser-behavior.spec.js.
beforeEach( function () {
chai.use( helpers );
} );
// Grammars without any rules are not accepted.
it( "parses Rule+", function () {
expect( "start = a" ).to.parseAs( ruleRefGrammar( "a" ) );
let grammar = ruleRefGrammar("a");
const grammar = ruleRefGrammar( "a" );
grammar.initializer = {
"type": "initializer",
"code": ""
@ -238,10 +291,12 @@ describe("PEG.js grammar parser", function() {
expect( "" ).to.failToParse();
expect( "{}" ).to.failToParse();
} );
// Canonical Grammar is "a = 'abcd'; b = 'efgh'; c = 'ijkl';".
it( "parses Grammar", function () {
expect( "\na = 'abcd';\n" ).to.parseAs(
{ type: "grammar", initializer: null, rules: [ ruleA ] }
);
@ -251,34 +306,42 @@ describe("PEG.js grammar parser", function() {
expect( "\n{ code };\na = 'abcd';\n" ).to.parseAs(
{ type: "grammar", initializer: initializer, rules: [ ruleA ] }
);
} );
// Canonical Initializer is "{ code }".
it( "parses Initializer", function () {
expect( "{ code };start = 'abcd'" ).to.parseAs(
{ type: "grammar", initializer: initializer, rules: [ ruleStart ] }
);
} );
// Canonical Rule is "a = 'abcd';".
it( "parses Rule", function () {
expect( "start\n=\n'abcd';" ).to.parseAs(
oneRuleGrammar( literalAbcd )
);
expect( "start\n'start rule'\n=\n'abcd';" ).to.parseAs(
oneRuleGrammar( named )
);
} );
// Canonical Expression is "'abcd'".
it( "parses Expression", function () {
expect( "start = 'abcd' / 'efgh' / 'ijkl'" ).to.parseAs(
oneRuleGrammar( choice )
);
} );
// Canonical ChoiceExpression is "'abcd' / 'efgh' / 'ijkl'".
it( "parses ChoiceExpression", function () {
expect( "start = 'abcd' { code }" ).to.parseAs(
oneRuleGrammar( actionAbcd )
);
@ -290,20 +353,24 @@ describe("PEG.js grammar parser", function() {
).to.parseAs(
oneRuleGrammar( choice4 )
);
} );
// Canonical ActionExpression is "'abcd' { code }".
it( "parses ActionExpression", function () {
expect( "start = 'abcd' 'efgh' 'ijkl'" ).to.parseAs(
oneRuleGrammar( sequence )
);
expect( "start = 'abcd' 'efgh' 'ijkl'\n{ code }" ).to.parseAs(
oneRuleGrammar( actionSequence )
);
} );
// Canonical SequenceExpression is "'abcd' 'efgh' 'ijkl'".
it( "parses SequenceExpression", function () {
expect( "start = a:'abcd'" ).to.parseAs(
oneRuleGrammar( labeledAbcd )
);
@ -313,42 +380,54 @@ describe("PEG.js grammar parser", function() {
expect( "start = a:'abcd'\nb:'efgh'\nc:'ijkl'\nd:'mnop'" ).to.parseAs(
oneRuleGrammar( sequence4 )
);
} );
// Canonical LabeledExpression is "a:'abcd'".
it( "parses LabeledExpression", function () {
expect( "start = a\n:\n!'abcd'" ).to.parseAs( oneRuleGrammar( labeledSimpleNot ) );
expect( "start = !'abcd'" ).to.parseAs( oneRuleGrammar( simpleNotAbcd ) );
} );
// Canonical PrefixedExpression is "!'abcd'".
it( "parses PrefixedExpression", function () {
expect( "start = !\n'abcd'?" ).to.parseAs( oneRuleGrammar( simpleNotOptional ) );
expect( "start = 'abcd'?" ).to.parseAs( oneRuleGrammar( optional ) );
} );
// Canonical PrefixedOperator is "!".
it( "parses PrefixedOperator", function () {
expect( "start = $'abcd'?" ).to.parseAs( oneRuleGrammar( textOptional ) );
expect( "start = &'abcd'?" ).to.parseAs( oneRuleGrammar( simpleAndOptional ) );
expect( "start = !'abcd'?" ).to.parseAs( oneRuleGrammar( simpleNotOptional ) );
} );
// Canonical SuffixedExpression is "'abcd'?".
it( "parses SuffixedExpression", function () {
expect( "start = 'abcd'\n?" ).to.parseAs( oneRuleGrammar( optional ) );
expect( "start = 'abcd'" ).to.parseAs( oneRuleGrammar( literalAbcd ) );
} );
// Canonical SuffixedOperator is "?".
it( "parses SuffixedOperator", function () {
expect( "start = 'abcd'?" ).to.parseAs( oneRuleGrammar( optional ) );
expect( "start = 'abcd'*" ).to.parseAs( oneRuleGrammar( zeroOrMore ) );
expect( "start = 'abcd'+" ).to.parseAs( oneRuleGrammar( oneOrMore ) );
} );
// Canonical PrimaryExpression is "'abcd'".
it( "parses PrimaryExpression", function () {
expect( "start = 'abcd'" ).to.parseAs( trivialGrammar );
expect( "start = [a-d]" ).to.parseAs( classGrammar( [ [ "a", "d" ] ], false, false ) );
expect( "start = ." ).to.parseAs( anyGrammar() );
@ -358,31 +437,39 @@ describe("PEG.js grammar parser", function() {
expect( "start = (\na:'abcd'\n)" ).to.parseAs( oneRuleGrammar( groupLabeled ) );
expect( "start = (\n'abcd' 'efgh' 'ijkl'\n)" ).to.parseAs( oneRuleGrammar( groupSequence ) );
expect( "start = (\n'abcd'\n)" ).to.parseAs( trivialGrammar );
} );
// Canonical RuleReferenceExpression is "a".
it( "parses RuleReferenceExpression", function () {
expect( "start = a" ).to.parseAs( ruleRefGrammar( "a" ) );
expect( "start = a\n=" ).to.failToParse();
expect( "start = a\n'abcd'\n=" ).to.failToParse();
} );
// Canonical SemanticPredicateExpression is "!{ code }".
it( "parses SemanticPredicateExpression", function () {
expect( "start = !\n{ code }" ).to.parseAs( oneRuleGrammar( semanticNot ) );
} );
// Canonical SemanticPredicateOperator is "!".
it( "parses SemanticPredicateOperator", function () {
expect( "start = &{ code }" ).to.parseAs( oneRuleGrammar( semanticAnd ) );
expect( "start = !{ code }" ).to.parseAs( oneRuleGrammar( semanticNot ) );
} );
// The SourceCharacter rule is not tested.
// Canonical WhiteSpace is " ".
it( "parses WhiteSpace", function () {
expect( "start =\t'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =\v'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =\f'abcd'" ).to.parseAs( trivialGrammar );
@ -390,99 +477,123 @@ describe("PEG.js grammar parser", function() {
expect( "start =\u00A0'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =\uFEFF'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =\u1680'abcd'" ).to.parseAs( trivialGrammar );
} );
// Canonical LineTerminator is "\n".
it( "parses LineTerminator", function () {
expect( "start = '\n'" ).to.failToParse();
expect( "start = '\r'" ).to.failToParse();
expect( "start = '\u2028'" ).to.failToParse();
expect( "start = '\u2029'" ).to.failToParse();
} );
// Canonical LineTerminatorSequence is "\r\n".
it( "parses LineTerminatorSequence", function () {
expect( "start =\n'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =\r\n'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =\r'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =\u2028'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =\u2029'abcd'" ).to.parseAs( trivialGrammar );
} );
// Canonical Comment is "/* comment */".
it( "parses Comment", function () {
expect( "start =// comment\n'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =/* comment */'abcd'" ).to.parseAs( trivialGrammar );
} );
// Canonical MultiLineComment is "/* comment */".
it( "parses MultiLineComment", function () {
expect( "start =/**/'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =/*a*/'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =/*abc*/'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =/**/*/'abcd'" ).to.failToParse();
} );
// Canonical MultiLineCommentNoLineTerminator is "/* comment */".
it( "parses MultiLineCommentNoLineTerminator", function () {
expect( "a = 'abcd'/**/\r\nb = 'efgh'" ).to.parseAs( twoRuleGrammar );
expect( "a = 'abcd'/*a*/\r\nb = 'efgh'" ).to.parseAs( twoRuleGrammar );
expect( "a = 'abcd'/*abc*/\r\nb = 'efgh'" ).to.parseAs( twoRuleGrammar );
expect( "a = 'abcd'/**/*/\r\nb = 'efgh'" ).to.failToParse();
expect( "a = 'abcd'/*\n*/\r\nb = 'efgh'" ).to.failToParse();
} );
// Canonical SingleLineComment is "// comment".
it( "parses SingleLineComment", function () {
expect( "start =//\n'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =//a\n'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =//abc\n'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =//\n@\n'abcd'" ).to.failToParse();
} );
// Canonical Identifier is "a".
it( "parses Identifier", function () {
expect( "start = a:'abcd'" ).to.parseAs( oneRuleGrammar( labeledAbcd ) );
} );
// Canonical IdentifierName is "a".
it( "parses IdentifierName", function () {
expect( "start = a" ).to.parseAs( ruleRefGrammar( "a" ) );
expect( "start = ab" ).to.parseAs( ruleRefGrammar( "ab" ) );
expect( "start = abcd" ).to.parseAs( ruleRefGrammar( "abcd" ) );
} );
// Canonical IdentifierStart is "a".
it( "parses IdentifierStart", function () {
expect( "start = a" ).to.parseAs( ruleRefGrammar( "a" ) );
expect( "start = $" ).to.parseAs( ruleRefGrammar( "$" ) );
expect( "start = _" ).to.parseAs( ruleRefGrammar( "_" ) );
expect( "start = \\u0061" ).to.parseAs( ruleRefGrammar( "a" ) );
} );
// Canonical IdentifierPart is "a".
it( "parses IdentifierPart", function () {
expect( "start = aa" ).to.parseAs( ruleRefGrammar( "aa" ) );
expect( "start = a\u0300" ).to.parseAs( ruleRefGrammar( "a\u0300" ) );
expect( "start = a0" ).to.parseAs( ruleRefGrammar( "a0" ) );
expect( "start = a\u203F" ).to.parseAs( ruleRefGrammar( "a\u203F" ) );
expect( "start = a\u200C" ).to.parseAs( ruleRefGrammar( "a\u200C" ) );
expect( "start = a\u200D" ).to.parseAs( ruleRefGrammar( "a\u200D" ) );
} );
// Unicode rules and reserved word rules are not tested.
// Canonical LiteralMatcher is "'abcd'".
it( "parses LiteralMatcher", function () {
expect( "start = 'abcd'" ).to.parseAs( literalGrammar( "abcd", false ) );
expect( "start = 'abcd'i" ).to.parseAs( literalGrammar( "abcd", true ) );
} );
// Canonical StringLiteral is "'abcd'".
it( "parses StringLiteral", function () {
expect( "start = \"\"" ).to.parseAs( literalGrammar( "", false ) );
expect( "start = \"a\"" ).to.parseAs( literalGrammar( "a", false ) );
expect( "start = \"abc\"" ).to.parseAs( literalGrammar( "abc", false ) );
@ -490,10 +601,12 @@ describe("PEG.js grammar parser", function() {
expect( "start = ''" ).to.parseAs( literalGrammar( "", false ) );
expect( "start = 'a'" ).to.parseAs( literalGrammar( "a", false ) );
expect( "start = 'abc'" ).to.parseAs( literalGrammar( "abc", false ) );
} );
// Canonical DoubleStringCharacter is "a".
it( "parses DoubleStringCharacter", function () {
expect( "start = \"a\"" ).to.parseAs( literalGrammar( "a", false ) );
expect( "start = \"\\n\"" ).to.parseAs( literalGrammar( "\n", false ) );
expect( "start = \"\\\n\"" ).to.parseAs( literalGrammar( "", false ) );
@ -501,10 +614,12 @@ describe("PEG.js grammar parser", function() {
expect( "start = \"\"\"" ).to.failToParse();
expect( "start = \"\\\"" ).to.failToParse();
expect( "start = \"\n\"" ).to.failToParse();
} );
// Canonical SingleStringCharacter is "a".
it( "parses SingleStringCharacter", function () {
expect( "start = 'a'" ).to.parseAs( literalGrammar( "a", false ) );
expect( "start = '\\n'" ).to.parseAs( literalGrammar( "\n", false ) );
expect( "start = '\\\n'" ).to.parseAs( literalGrammar( "", false ) );
@ -512,10 +627,12 @@ describe("PEG.js grammar parser", function() {
expect( "start = '''" ).to.failToParse();
expect( "start = '\\'" ).to.failToParse();
expect( "start = '\n'" ).to.failToParse();
} );
// Canonical CharacterClassMatcher is "[a-d]".
it( "parses CharacterClassMatcher", function () {
expect( "start = []" ).to.parseAs(
classGrammar( [], false, false )
);
@ -542,20 +659,24 @@ describe("PEG.js grammar parser", function() {
expect( "start = [\\\n]" ).to.parseAs(
classGrammar( [], false, false )
);
} );
// Canonical ClassCharacterRange is "a-d".
it( "parses ClassCharacterRange", function () {
expect( "start = [a-d]" ).to.parseAs( classGrammar( [ [ "a", "d" ] ], false, false ) );
expect( "start = [a-a]" ).to.parseAs( classGrammar( [ [ "a", "a" ] ], false, false ) );
expect( "start = [b-a]" ).to.failToParse( {
message: "Invalid character range: b-a."
} );
} );
// Canonical ClassCharacter is "a".
it( "parses ClassCharacter", function () {
expect( "start = [a]" ).to.parseAs( classGrammar( [ "a" ], false, false ) );
expect( "start = [\\n]" ).to.parseAs( classGrammar( [ "\n" ], false, false ) );
expect( "start = [\\\n]" ).to.parseAs( classGrammar( [], false, false ) );
@ -563,31 +684,39 @@ describe("PEG.js grammar parser", function() {
expect( "start = []]" ).to.failToParse();
expect( "start = [\\]" ).to.failToParse();
expect( "start = [\n]" ).to.failToParse();
} );
// Canonical LineContinuation is "\\\n".
it( "parses LineContinuation", function () {
expect( "start = '\\\r\n'" ).to.parseAs( literalGrammar( "", false ) );
} );
// Canonical EscapeSequence is "n".
it( "parses EscapeSequence", function () {
expect( "start = '\\n'" ).to.parseAs( literalGrammar( "\n", false ) );
expect( "start = '\\0'" ).to.parseAs( literalGrammar( "\x00", false ) );
expect( "start = '\\xFF'" ).to.parseAs( literalGrammar( "\xFF", false ) );
expect( "start = '\\uFFFF'" ).to.parseAs( literalGrammar( "\uFFFF", false ) );
expect( "start = '\\09'" ).to.failToParse();
} );
// Canonical CharacterEscapeSequence is "n".
it( "parses CharacterEscapeSequence", function () {
expect( "start = '\\n'" ).to.parseAs( literalGrammar( "\n", false ) );
expect( "start = '\\a'" ).to.parseAs( literalGrammar( "a", false ) );
} );
// Canonical SingleEscapeCharacter is "n".
it( "parses SingleEscapeCharacter", function () {
expect( "start = '\\''" ).to.parseAs( literalGrammar( "'", false ) );
expect( "start = '\\\"'" ).to.parseAs( literalGrammar( "\"", false ) );
expect( "start = '\\\\'" ).to.parseAs( literalGrammar( "\\", false ) );
@ -597,14 +726,17 @@ describe("PEG.js grammar parser", function() {
expect( "start = '\\r'" ).to.parseAs( literalGrammar( "\r", false ) );
expect( "start = '\\t'" ).to.parseAs( literalGrammar( "\t", false ) );
expect( "start = '\\v'" ).to.parseAs( literalGrammar( "\v", false ) );
} );
// Canonical NonEscapeCharacter is "a".
it( "parses NonEscapeCharacter", function () {
expect( "start = '\\a'" ).to.parseAs( literalGrammar( "a", false ) );
// The negative predicate is impossible to test with PEG.js grammar
// structure.
} );
// The EscapeCharacter rule is impossible to test with PEG.js grammar
@ -612,28 +744,37 @@ describe("PEG.js grammar parser", function() {
// Canonical HexEscapeSequence is "xFF".
it( "parses HexEscapeSequence", function () {
expect( "start = '\\xFF'" ).to.parseAs( literalGrammar( "\xFF", false ) );
} );
// Canonical UnicodeEscapeSequence is "uFFFF".
it( "parses UnicodeEscapeSequence", function () {
expect( "start = '\\uFFFF'" ).to.parseAs( literalGrammar( "\uFFFF", false ) );
} );
// Digit rules are not tested.
// Canonical AnyMatcher is ".".
it( "parses AnyMatcher", function () {
expect( "start = ." ).to.parseAs( anyGrammar() );
} );
// Canonical CodeBlock is "{ code }".
it( "parses CodeBlock", function () {
expect( "start = 'abcd' { code }" ).to.parseAs( actionGrammar( " code " ) );
} );
// Canonical Code is " code ".
it( "parses Code", function () {
expect( "start = 'abcd' {a}" ).to.parseAs( actionGrammar( "a" ) );
expect( "start = 'abcd' {abc}" ).to.parseAs( actionGrammar( "abc" ) );
expect( "start = 'abcd' {{a}}" ).to.parseAs( actionGrammar( "{a}" ) );
@ -641,41 +782,51 @@ describe("PEG.js grammar parser", function() {
expect( "start = 'abcd' {{}" ).to.failToParse();
expect( "start = 'abcd' {}}" ).to.failToParse();
} );
// Unicode character category rules and token rules are not tested.
// Canonical __ is "\n".
it( "parses __", function () {
expect( "start ='abcd'" ).to.parseAs( trivialGrammar );
expect( "start = 'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =\r\n'abcd'" ).to.parseAs( trivialGrammar );
expect( "start =/* comment */'abcd'" ).to.parseAs( trivialGrammar );
expect( "start = 'abcd'" ).to.parseAs( trivialGrammar );
} );
// Canonical _ is " ".
it( "parses _", function () {
expect( "a = 'abcd'\r\nb = 'efgh'" ).to.parseAs( twoRuleGrammar );
expect( "a = 'abcd' \r\nb = 'efgh'" ).to.parseAs( twoRuleGrammar );
expect( "a = 'abcd'/* comment */\r\nb = 'efgh'" ).to.parseAs( twoRuleGrammar );
expect( "a = 'abcd' \r\nb = 'efgh'" ).to.parseAs( twoRuleGrammar );
} );
// Canonical EOS is ";".
it( "parses EOS", function () {
expect( "a = 'abcd'\n;b = 'efgh'" ).to.parseAs( twoRuleGrammar );
expect( "a = 'abcd' \r\nb = 'efgh'" ).to.parseAs( twoRuleGrammar );
expect( "a = 'abcd' // comment\r\nb = 'efgh'" ).to.parseAs( twoRuleGrammar );
expect( "a = 'abcd'\nb = 'efgh'" ).to.parseAs( twoRuleGrammar );
} );
// Canonical EOF is the end of input.
it( "parses EOF", function () {
expect( "start = 'abcd'\n" ).to.parseAs( trivialGrammar );
} );
it( "reports unmatched brace", function () {
const text = "rule = \n 'x' { y \n z";
const errorLocation = {
start: { offset: 13, line: 2, column: 6 },
@ -685,5 +836,7 @@ describe("PEG.js grammar parser", function() {
.to.throw( "Unbalanced brace." )
.with.property( "location" )
.that.deep.equals( errorLocation );
} );
} );

Loading…
Cancel
Save