Implement initializers

redux
David Majda 14 years ago
parent 718bcf5f87
commit 66de889f4b

@ -285,7 +285,7 @@ PEG.Compiler = {
rule_ref: rule_ref:
function(node) { function(node) {
if (typeof(ast[node.name]) === "undefined") { if (typeof(ast.rules[node.name]) === "undefined") {
throw new PEG.GrammarError( throw new PEG.GrammarError(
"Referenced rule \"" + node.name + "\" does not exist." "Referenced rule \"" + node.name + "\" does not exist."
); );
@ -299,14 +299,14 @@ PEG.Compiler = {
function check(node) { checkFunctions[node.type](node); } function check(node) { checkFunctions[node.type](node); }
for (var rule in ast) { for (var rule in ast.rules) {
check(ast[rule]); check(ast.rules[rule]);
} }
}, },
/* Checks that the start rule is defined. */ /* Checks that the start rule is defined. */
function(ast, startRule) { function(ast, startRule) {
if (typeof(ast[startRule]) === "undefined") { if (typeof(ast.rules[startRule]) === "undefined") {
throw new PEG.GrammarError( throw new PEG.GrammarError(
"Missing \"" + startRule + "\" rule." "Missing \"" + startRule + "\" rule."
); );
@ -356,7 +356,7 @@ PEG.Compiler = {
"Left recursion detected for rule \"" + node.name + "\"." "Left recursion detected for rule \"" + node.name + "\"."
); );
} }
check(ast[node.name], appliedRules); check(ast.rules[node.name], appliedRules);
}, },
literal: nop, literal: nop,
@ -368,8 +368,8 @@ PEG.Compiler = {
checkFunctions[node.type](node, appliedRules); checkFunctions[node.type](node, appliedRules);
} }
for (var rule in ast) { for (var rule in ast.rules) {
check(ast[rule], []); check(ast.rules[rule], []);
} }
} }
], ],
@ -432,18 +432,18 @@ PEG.Compiler = {
replaceFunctions[node.type](node, from, to); replaceFunctions[node.type](node, from, to);
} }
for (var rule in ast) { for (var rule in ast.rules) {
replace(ast[rule], from, to); replace(ast.rules[rule], from, to);
} }
} }
for (var rule in ast) { for (var rule in ast.rules) {
if (isProxyRule(ast[rule])) { if (isProxyRule(ast.rules[rule])) {
replaceRuleRefs(ast, ast[rule].name, ast[rule].expression.name); replaceRuleRefs(ast, ast.rules[rule].name, ast.rules[rule].expression.name);
if (rule === startRule) { if (rule === startRule) {
startRule = ast[rule].expression.name; startRule = ast.rules[rule].expression.name;
} }
delete ast[rule]; delete ast.rules[rule];
} }
} }
@ -452,6 +452,10 @@ PEG.Compiler = {
], ],
_compileFunctions: { _compileFunctions: {
initializer: function(node) {
return node.code;
},
rule: function(node) { rule: function(node) {
var resultVar = PEG.Compiler.generateUniqueIdentifier("result"); var resultVar = PEG.Compiler.generateUniqueIdentifier("result");
@ -871,9 +875,13 @@ PEG.Compiler = {
startRule = newAstNadStartRule[1]; startRule = newAstNadStartRule[1];
} }
var initializerCode = ast.initializer !== null
? this.compileNode(ast.initializer)
: "";
var parseFunctionDefinitions = []; var parseFunctionDefinitions = [];
for (var rule in ast) { for (var rule in ast.rules) {
parseFunctionDefinitions.push(this.compileNode(ast[rule])); parseFunctionDefinitions.push(this.compileNode(ast.rules[rule]));
} }
var source = this.formatCode( var source = this.formatCode(
@ -998,6 +1006,8 @@ PEG.Compiler = {
" return { line: line, column: column };", " return { line: line, column: column };",
" }", " }",
" ", " ",
" ${initializerCode}",
" ",
" var result = parse_${startRule}({ reportMatchFailures: true });", " var result = parse_${startRule}({ reportMatchFailures: true });",
" ", " ",
" /*", " /*",
@ -1054,6 +1064,7 @@ PEG.Compiler = {
" return result;", " return result;",
"})()", "})()",
{ {
initializerCode: initializerCode,
parseFunctionDefinitions: parseFunctionDefinitions.join("\n\n"), parseFunctionDefinitions: parseFunctionDefinitions.join("\n\n"),
startRule: startRule startRule: startRule
} }

File diff suppressed because it is too large Load Diff

@ -1,8 +1,20 @@
grammar grammar
= __ rules:rule+ { = __ initializer:initializer? rules:rule+ {
var result = {}; var rulesConverted = {};
PEG.ArrayUtils.each(rules, function(rule) { result[rule.name] = rule; }); PEG.ArrayUtils.each(rules, function(rule) { rulesConverted[rule.name] = rule; });
return result;
return {
initializer: initializer !== "" ? initializer : null,
rules: rulesConverted
}
}
initializer
= code:action semicolon? {
return {
type: "initializer",
code: code
};
} }
rule rule

@ -379,6 +379,18 @@ test("actions", function() {
doesNotParse(notAMatchParser, "b"); doesNotParse(notAMatchParser, "b");
}); });
test("initializer", function() {
var variableDefinitionParser = PEG.buildParser(
'{ a = 42; }; start = "a" { return a; }'
);
parses(variableDefinitionParser, "a", 42);
var functionDefinitionparser = PEG.buildParser(
'{ function f() { return 42; } }; start = "a" { return f(); }'
);
parses(variableDefinitionParser, "a", 42);
});
test("rule references", function() { test("rule references", function() {
var parser = PEG.buildParser([ var parser = PEG.buildParser([
'start = static / dynamic', 'start = static / dynamic',

@ -20,6 +20,13 @@ global.grammarParserDoesNotParseWithMessage = function(input, message) {
module("Grammar Parser"); module("Grammar Parser");
function initializer(code) {
return {
type: "initializer",
code: code
};
}
function rule(name, displayName, expression) { function rule(name, displayName, expression) {
return { return {
type: "rule", type: "rule",
@ -119,7 +126,10 @@ var sequenceLiterals = sequence([literalAbcd, literalEfgh, literalIjkl]);
var choiceLiterals = choice([literalAbcd, literalEfgh, literalIjkl]); var choiceLiterals = choice([literalAbcd, literalEfgh, literalIjkl]);
function oneRuleGrammar(expression) { function oneRuleGrammar(expression) {
return { start: rule("start", null, expression) }; return {
initializer: null,
rules: { start: rule("start", null, expression) }
};
} }
var simpleGrammar = oneRuleGrammar(literal("abcd")); var simpleGrammar = oneRuleGrammar(literal("abcd"));
@ -144,19 +154,42 @@ function actionGrammar(action) {
return oneRuleGrammar(action_(literal("a"), action)); return oneRuleGrammar(action_(literal("a"), action));
} }
var initializerGrammar = {
initializer: initializer(" code "),
rules: {
a: rule("a", null, literalAbcd),
}
};
/* Canonical grammar is "a: \"abcd\"; b: \"efgh\"; c: \"ijkl\";". */ /* Canonical grammar is "a: \"abcd\"; b: \"efgh\"; c: \"ijkl\";". */
test("parses grammar", function() { test("parses grammar", function() {
grammarParserParses('a = "abcd"', { a: rule("a", null, literalAbcd) }); grammarParserParses(
'a = "abcd"',
{
initializer: null,
rules: { a: rule("a", null, literalAbcd) }
}
);
grammarParserParses('{ code }; a = "abcd"', initializerGrammar);
grammarParserParses( grammarParserParses(
'a = "abcd"; b = "efgh"; c = "ijkl"', 'a = "abcd"; b = "efgh"; c = "ijkl"',
{ {
a: rule("a", null, literalAbcd), initializer: null,
b: rule("b", null, literalEfgh), rules: {
c: rule("c", null, literalIjkl) a: rule("a", null, literalAbcd),
b: rule("b", null, literalEfgh),
c: rule("c", null, literalIjkl)
}
} }
); );
}); });
/* Canonical initializer is "{ code }". */
test("parses initializer", function() {
grammarParserParses('{ code }a = "abcd"', initializerGrammar);
grammarParserParses('{ code };a = "abcd"', initializerGrammar);
});
/* Canonical rule is "a: \"abcd\"". */ /* Canonical rule is "a: \"abcd\"". */
test("parses rule", function() { test("parses rule", function() {
grammarParserParses( grammarParserParses(
@ -166,7 +199,8 @@ test("parses rule", function() {
grammarParserParses( grammarParserParses(
'start "start rule" = "abcd" / "efgh" / "ijkl"', 'start "start rule" = "abcd" / "efgh" / "ijkl"',
{ {
start: rule("start", "start rule", choiceLiterals) initializer: null,
rules: { start: rule("start", "start rule", choiceLiterals) }
} }
); );
grammarParserParses( grammarParserParses(

Loading…
Cancel
Save