Skip to content

Instantly share code, notes, and snippets.

@rsms
Last active November 15, 2020 04:51
Show Gist options
  • Save rsms/fbf55ebde3491860640f to your computer and use it in GitHub Desktop.
Save rsms/fbf55ebde3491860640f to your computer and use it in GitHub Desktop.
Pattern-matching parse-time macros for Babel
macro { mul(...$args) sdf } -> { [not_matched, $args] }
macro { mul(...$args) } -> { [matched, $args] }
macro { foo $a x ...$b y } -> { mul($a, $b) + 3 }
lol()
console.log( foo 1 x 2, 3 y, 6 )
//macro { mul(...$args) sdf } -> { [not_matched, $args]
//macro { mul(...$args) } -> { [matched, $args]
//macro { foo $a x ...$b y } -> { mul($a, $b) + 3
lol();
console.log([matched, 1, 2, 3] + 3, 6);
// ... from end of babel-macro.js:
assertParses('foo();'
, 'foo()');
assertParsesSame('bar();'
, 'macro foo -> bar; foo()'
, 'macro foo -> bar\nfoo()'
, 'macro foo -> bar foo()'
)
assertParseThrows(/Unexpected token.*\(1:10\)/i,
'macro foo = bar foo()')
assertParsesSame('bar;'
, 'macro foo() -> bar; foo()'
, 'macro {foo()} -> bar; foo()'
, 'macro {foo()} -> {bar}; foo()'
, 'macro foo() -> {bar}; foo()'
, 'macro foo($) -> bar; foo(1)'
)
assertParsesSame('bar[1];'
, 'macro foo($) -> bar[$]; foo(1)'
, 'macro {foo($)} -> bar[$]; foo(1)'
, 'macro {foo $} -> bar[$]; foo 1'
)
// BUG fails when flow plugin is enabled, with "Unexpected token" at "*" during expansion:
var src =
'macro {mul $1 $2} -> ($1 * $2);\n'+
'mul 3 4';
assertParses('3 * 4;', src, { plugins: { jsx: 1, macro: 1 } } )
assertParses('3 * 4;', src, { plugins: { macro: 1 } } )
assertParseThrows(/Unexpected token.*\(1:25\)/i
, src, { plugins: { jsx: 1, macro: 1, flow: 1 } } )
assertParseThrows(/Unexpected token.*\(1:25\)/i
, src, { plugins: { macro: 1, flow: 1 } } )
assertParseThrows(/Unexpected token.*\(1:26\)/i
, 'macro {mul($1 $2)} -> ($1 * $2)'
, 'mul(3 4)'
, { plugins: { macro: 1, flow: 1 } } )
assertParses('r();'
, 'macro A -> {}; r(A)')
// deep expansion
assertParses('r(d(3,d));'
, 'macro D -> d'
, 'macro C -> D'
, 'macro B -> C(3, D)'
, 'macro A -> B'
, 'r(A)')
assertParses('r(d(2,1,d));'
, 'macro D -> d'
, 'macro C -> D'
, 'macro {B $2 $1} -> C($1, $2, D)'
, 'macro A -> {B 1 2}'
, 'r(A)')
assertParseThrows(/Cyclic macro expansion/i
, 'macro B -> C(A)'
, 'macro A -> B'
, 'a(A)')
assertParseThrows(/Cyclic macro expansion/i
, 'macro D -> B'
, 'macro C -> D'
, 'macro B -> C(3, D)'
, 'macro A -> B'
, 'r(A)')
// rest/spread vars
assertParses('d(1,2,3);'
, 'macro D(...$) -> { d($) }' // non-greedy, bounded rest
, 'D(1,2,3)') //< rest terminating at expected token ")"
assertParses('d();'
, 'macro D(...$) -> { d($) }' // non-greedy, bounded rest
, 'D()') //< empty rest terminating at expected token ")"
assertParses('d(1,2,3);'
, 'macro { D ...$ } -> { d($) }' // greedy rest
, 'D 1,2,3') //< rest terminating at EOF
assertParses('d();'
, 'macro { D ...$ } -> { d($) }' // greedy rest
, 'D') //< empty rest terminating at EOF
function time(label) {
console.time(label);
var t = Date.now();
time.end = function() {
process.stdout.write('\x1b[1;35m');
console.timeEnd(label);
process.stdout.write('\x1b[0;39m');
}
};
function timeavg(label, iterations, f) {
if (typeof iterations === 'function') {
f = iterations;
iterations = 1000;
}
var n = iterations, t = Date.now();
do { f(); } while (--n);
t = Date.now() - t;
console.log(color('1;35', label + ': ' + (t / iterations).toFixed(0) + 'ms/op')+
color('0;90', ' ('+t+'ms, ' + iterations + ' iterations)'));
}
// red, green, yellow, blue, magenta, cyan
var colors = {r:'1;31',g:'1;32',y:'1;33',b:'1;34',m:'1;35',c:'1;36'}
, colorSeqNext = 0
, colorSeq = Object.keys(colors)
, color;
if (process.stdout.isTTY) {
color = function(c, s) {
if (s === undefined) {
s = c;
c = colors[colorSeq[colorSeqNext++ % colorSeq.length]];
} else if (c in colors) {
c = colors[c];
}
return '\x1b['+c+'m' + s + '\x1b[0;39m';
};
} else {
color = function(c, s) { return s || c; };
}
var srcLineAt = function(source, p) {
var top, lineno = 1, start = p, end = p;
while (source[--start] !== '\n' && start !== -1) {}
top = start;
while (top !== -1) {
if (source[top--] === '\n') {
++lineno;
}
}
while (end < source.length) {
if (source[end++] === '\n') {
--end;
break;
}
}
if (end > source.length) {
end = source.length;
}
return {
offs: p,
endoffs: end,
source: source.substring(start+1, end),
line: lineno,
column: p - start,
}
}
function fmtsrcline(source, p, label, noCaret) {
var nsp = function(n) {
var s, sp = ' ';
if (n <= sp.length) {
return sp.substr(0, n);
} else {
s = '';
while (n--) { s += ' ' }
return s;
}
}
var padL = function(n, s) {
var s = String(s);
return nsp(n-s.length) + s;
}
var caret = function(column, label) {
var s = '^';
if (label) { s += ' ' + label; }
for (var n = column; --n;) { s = ' ' + s }
return color('m', s);
}
var margin = 7;
var m = srcLineAt(source, p), ch, endmark;
if (label) {
ch = m.source[m.column-1];
endmark = (m.endoffs >= source.length ? '<EOF>' : '\\n');
m.source = m.source.substr(0,m.column-1) +
color('44;1;37', ch || endmark) +
(m.source.substr(m.column) || (ch ? endmark : ''));
}
var s = color('90', padL(margin, m.line + ' | ')) + m.source;
if (!noCaret) {
s += '\n' + nsp(margin) + caret(m.column, (label ? (label + ': ' + color('g',m.offs)) : null));
}
return s;
}
function assert_fail() {
var e = new Error();
e.name = color('r','AssertionError');
var stack = e.stack.split(/\n/);
var m = stack[3].match(/\(([^:]+):(\d+):(\d+)\)$/);
if (m) {
stack[0] += ' at ' + require('path').basename(m[1]) + ':' + m[2] + ':' + m[3];
try {
var line = require('fs').readFileSync(m[1], 'utf8').split(/\n/)[parseInt(m[2])-1];
stack[1] = color('90', m[2] + ' | ') + line;
e.stack = stack.slice(0,2).join('\n');
// require('util').inspect(m) + e.stack;
} catch (_) { console.error('_', _) }
}
console.error(String(e.stack || e));
process.exit(4);
}
function assert(x) {
if (!x) {
assert_fail();
}
}
Map.prototype.inspect = function(depth, ctx) {
// console.log(depth, ctx);
var s = (ctx.colors ? color('m','Map{') : 'Map{');
var pre = ' '.substr(0,depth);
pre = '\n' + pre;
++ctx.depth;
for (var e of this) { s += pre + repr(e[0]) + ' -> ' + inspect(e[1],ctx); }
--ctx.depth;
if (s !== 'Map{') {
s += ' ';
}
return s+(ctx.colors ? color('m','}') : '}');
}
function fmtcallstack(offs, len, glue, lineprefix) {
if (!offs) { offs = 2 } else { offs += 2; } // skip this callsite and first "message" line
if (!len || len < 0) { len = 9000; }
if (!lineprefix) { lineprefix = ' '; }
return (new Error).stack.split(/\n/).slice(offs, offs+len).map(function(line, i) {
var isself, filename, lineno = -1, builtin;
line = lineprefix + line.trim().replace(/\(([^\)]+):(\d+):(\d+)\)$/, function (s, $1, $2, $3) {
isself = ($1 === __filename);
filename = $1;
builtin = !isself && (!filename || filename.indexOf('/') === -1);
lineno = parseInt($2);
if ($1.indexOf(__dirname) === 0) { $1 = $1.substr(__dirname.length+1); }
if (isself) { $1 = color('37',$1); } else { $1 = color('90',$1); }
return color('90', '(') + $1 + color('90', ':' + $2 + ':' + $3 + ')');
}).replace(/(at\s+)(.+)\.([^\.]+)(\s)/, function(m, $1, $2, $3, $4) {
return 'at ' + color('1;37', $2) + '.' + color(isself ? 'g':builtin?'y':'c', $3) + $4;
}).replace(/(at\s+)([^\.]+)(\s)/, function(m, $1, $2, $3) {
return 'at ' + color(isself ? 'g':builtin?'y':'c', $2) + $3;
});
if (filename && filename.indexOf('/') !== -1) {
try {
line += '\n ' + lineprefix +
color('90', require('fs').readFileSync(filename,'utf8').split(/\n/)[lineno-1].trim());
} catch (e) {}
}
return line;
}).join(glue || '\n');
}
time('imports');
var babel = require('babel-core');
var fs = require('fs');
var inspect = require('util').inspect;
// var sweet = require('sweet.js');
time.end();
function repr(obj, depth, colors) {
return inspect(obj, {depth:depth===undefined?4:depth, colors:colors===undefined?true:colors});
}
function fmtit(it) {
var s;
for (var v of it) {
s = (s ? s + ', ' : '') + repr(v);
}
return s;
}
time('readfile');
var srcCode = fs.readFileSync('./lab-babel-sweet-sample.js', 'utf8');
time.end();
var res;
var babylon = babel.babylon || babel.acorn;
var tokTypes = babylon.tokTypes;
// var types = [tokTypes.name, tokTypes.at, tokTypes.num];
// var values = [['a','b','c','d'], null, [1,2,3,4] ];
// var iterations = 10000;
// var objectMap;
//
// timeavg('Object build', iterations, function() {
// var m = {};
// for (var i = 0; i !== types.length; ++i) {
// var t = types[i];
// var v = values[i];
// var t_om = m[t.label];
// if (t===tokTypes.name||t===tokTypes.regexp||t===tokTypes.string||t===tokTypes.num) {
// if (!t_om) {
// m[t.label] = t_om = {};
// }
// for (var x in v) {
// t_om[v[x]] = true;
// }
// } else {
// m[t.label] = true;
// }
// }
// objectMap = m;
// });
//
// console.log('objectMap:', repr(objectMap))
//
// var mapMap;
// timeavg('Map build', iterations, function() {
// var m = new Map;
// for (var i = 0; i !== types.length; ++i) {
// var t = types[i];
// var v = values[i];
// var tm = m.get(t);
// if (t===tokTypes.name||t===tokTypes.regexp||t===tokTypes.string||t===tokTypes.num) {
// if (!tm) {
// m.set(t, tm = new Map);
// }
// for (var x in v) {
// tm.set(v[x], true);
// }
// } else {
// m.set(t, true);
// }
// }
// mapMap = m;
// });
//
// console.log('mapMap:', repr(mapMap))
// return;
function MacroContext() {
this._defScope = [null, null];
this._evalSet = null; // Set<Macro> containing all macros currently being expand-eval'd
}
MacroContext.prototype.pushMacroDefScope = function() {
this._defScope = [this._defScope, null];
}
MacroContext.prototype.popMacroDefScope = function() {
this._defScope = this._defScope[0];
}
MacroContext.prototype.defineMacro = function(m) {
// TODO: use a b-tree or trie instead of this hard-coded one-step-deep setup
var typeMap = this._defScope[1];
if (!typeMap) {
typeMap = this._defScope[1] = new Map;
}
assert(m.pattern.tokens.length > 0);
var tok0 = m.pattern.tokens[0];
// console.log('defineMacro tok0:', fmttok(tok0))
var t = tok0.type; //:BabylonTokenType
var node = typeMap.get(t);
var prev;
if (t===tokTypes.name || t===tokTypes.regexp || t===tokTypes.string || t===tokTypes.num) {
// identified by type + value
if (!node) {
typeMap.set(t, node = new Map);
node.set(tok0.value, m);
} else {
prev = node.get(tok0.value);
node.set(tok0.value, m);
}
} else {
if (node) {
prev = node;
}
typeMap.set(t, m);
}
return prev;
}
MacroContext.prototype.getMacro = function(type, value) {
var t = type
, m
, scope = this._defScope
;
if (t===tokTypes.name || t===tokTypes.regexp || t===tokTypes.string || t===tokTypes.num) {
while (1) {
if ((m = scope[1]) && (m = m.get(t)) && (m = m.get(value))) {
return m;
}
if (!(scope = scope[0])) {
return null;
}
}
} else {
while (1) {
if ((m = scope[1]) && (m = m.get(t))) {
return m;
}
if (!(scope = scope[0])) {
return null;
}
}
}
}
MacroContext.prototype.isTopLevel = function() {
return !this._defScope[0];
}
// console.log(babel.types); return;
// console.log(Object.keys(tokTypes).sort().map(function(k) {
// return k + ':\t' + inspect(tokTypes[k],{depth:9});
// }).join('\n')); return;
// var babylonTypes = require('babel-core/node_modules/babylon/lib/tokenizer/types');
// console.log(babylonTypes.keywords);
// babylonTypes.keywords['macro'] = tokTypes.macro;
var ccDollar = '$'.charCodeAt(0);
var ccGreaterThan = '>'.charCodeAt(0);
var TokenVarTag = Symbol('MacroTokenVarTag');
var TokenVar1 = Symbol('MacroTokenVar1'); // $x
var TokenVarN = Symbol('MacroTokenVarN'); // ...$x
var MatchingToken = Symbol('MatchingToken');
var MemoizedResult = Symbol('MemoizedResult');
var Empty = Symbol('Empty');
var Pattern = Symbol('Pattern');
var Body = Symbol('Body');
function dumpPState(s) {
var prevtokz = 10;
console.log(' previous tokens: '+(s.tokens.length > prevtokz ? '... ':'') +
s.tokens.slice(Math.max(0,s.tokens.length-prevtokz)).map(fmttok).join(' '))
console.log(' next token: '+fmttok(s))
console.log(fmtsrcline(s.input, s.start, 's.start'))
console.log(fmtsrcline(s.input, s.end, 's.end'))
console.log(fmtsrcline(s.input, s.pos, 's.pos'))
}
function fmttok(t) {
var tl;
if (t.type.label) {
tl = color('y', t.type.label[0].match(/[A-Za-z$]/) ? t.type.label : inspect(t.type.label));
} else {
tl = color('b', inspect(t.type));
}
return '('+tl + (
t.value === undefined ? '' :
' ' + (
t[TokenVarTag] ? (t[TokenVarTag] === TokenVarN ? color('90','...') : '') +
color('b',t.value) :
repr(t.value)
)
) +')';
}
function fmttoks(tokens) {
return tokens.map(fmttok).join(', ');
}
// fmtmacropat(p:MacroPattern, behaviour?:Pattern|Body, style?:Function):string
function fmtmacropat(p, behaviour, style) {
var source = '', toks = p.tokens;
if (style === undefined) { style = color; } else if (!style){style=function(a,b){return b||a}}
if (toks.length > 0) {
source = toks.map(function(t) {
if (t.value !== undefined) {
return (
t[TokenVarTag] ? (
t[TokenVarTag] === TokenVarN ? style('90','...') : ''
) + style('b', t.value) :
style('g',t.value)
);
}
return t.type.label;
}).join(' ');
}
return source ? style('90','{ ') + style(source) + style('90',' }') : style('90','{}');
}
// fmtmacro(m:MacroDef, style?:Function):string
function fmtmacro(m, style) {
if (style === undefined) { style = color; } else if (!style){style=function(a,b){return b||a}}
return fmtmacropat(m.pattern, Pattern, style) +
style('y',' -> ') + fmtmacropat(m.body, Body, style);
}
function tokeq(t1, t2) {
return (
t1.type === t2.type &&
t1.value === t2.value
);
}
function MacroParserPlugin(pp) {
function copytok(t) {
return {
type: t.type,
value: t.value,
start: t.start,
end: t.end,
loc: { start: t.startLoc, end: t.endLoc },
}
}
function rewindPState(s) {
//
}
if (!pp.pushDiagnostic) {
pp.pushDiagnostic = function(type, pos, message, endPos) {
var lineBreakG = /\r\n?|\n|\u2028|\u2029/g;
function getLineInfo(input, offset) {
for (var line = 1, cur = 0;;) {
lineBreakG.lastIndex = cur;
var match = lineBreakG.exec(input);
if (match && match.index < offset) {
++line;
cur = match.index + match[0].length;
} else {
return { line: line, column: offset - cur };
}
}
};
var loc = getLineInfo(this.input, pos);
var diag = {
type: type,
message: message + " (" + loc.line + ":" + loc.column + ")",
pos: pos,
loc: loc,
endPos: endPos,
};
if (!this.state.diagnostics) {
this.state.diagnostics = [diag];
} else {
this.state.diagnostics.push(diag);
}
};
}
pp.extend("parseTopLevel", function(inner) {
return function(file, program) {
this.macroCtx = new MacroContext();
this.state.diagnostics = null; // Diagnostic[]
enableMacroExpansionHook();
var r = inner.apply(this, arguments);
disableMacroExpansionHook();
//console.log('after parseTopLevel: macros:', repr(this.macroCtx._defScope, 1))
file.macros = this.macroCtx._defScope[1]; //Map|null
file.diagnostics = this.state.diagnostics;
return r;
}
});
pp.extend("parseBlock", function(inner) {
return function (allowStrict) {
// console.log('before parseBlock')
this.macroCtx.pushMacroDefScope();
var r = inner.apply(this, arguments);
this.macroCtx.popMacroDefScope();
// console.log('after parseBlock',
// 'macroCtx._defScope:',Object.keys(this.macroCtx._defScope[1]))
return r;
}
});
pp.extend("parseStatement", function(inner) {
return function() {
if (this.state.type === tokTypes.name && this.state.value === "macro") {
disableMacroExpansionHook();
var m = this.startNode();
this.nextToken(); // read next token and ignore "macro"
var noopNode = this.parseMacroDefinition(m);
enableMacroExpansionHook();
return noopNode;
}
if (macroExpansionHookCounter > 0) {
// We look to expand any macro here because the parser does not call nextToken
// when parsing the top-level (program). See parseTopLevel in babylon/parser/statements.js
console.log('@ PS call maybeExpandMacro')
this.maybeExpandMacro();
}
return inner.apply(this, arguments);
};
});
// function getCommentsAbove(s) {
// var t
// , index = s.tokens.length
// , comments = []
// , line = s.curLine
// ;
// while (index !== 0) {
// t = s.tokens[--index];
// if (t.type !== 'CommentLine') {
// break;
// }
// if (t.loc.end.line !== --line) {
// // comment is more than one line away. E.g.
// // //foo <--
// //
// // //bar
// // //baz
// break;
// }
// comments.push(t);
// }
// comments.reverse();
// return comments;
// }
pp.parseMacroTokens1 = function(behaviour, patternVars) {
var depth
, open
, close
, s = this.state
, vars = null // Map<string,Token>
, startPos = s.start
, openFirst = false
, tokenStartIndex = s.tokens.length
, tokenStartIndex1 = tokenStartIndex+1
, needsNewToken = true
, readToken = this.next.bind(this)
, nextToken = readToken
, skipToken = function(){ nextToken = readToken; }
, parseMacroVar
;
// var readToken2 = readToken;
// readToken = function() { g_dbg_pstate = s.clone(); readToken2(); }
parseMacroVar = function() {
// enters with guarantee: s.value[0] === '$'
// TODO: in body pattern, allow advanced spread of variables, like:
// macro { foo ...$a } -> { <... .bar($a)> }
// macro { list ...$a } -> { [<...$a,>] }
// foo 1 2 3
// list 1 2 3
// result:
// .bar(1).bar(2).bar(3)
// [1,2,3]
// TODO: Allow advanced rest var definitions:
// macro { foo(<...$a,>) } -> { <... .bar($a)> }
// foo(1,2,3)
// result:
// .bar(1).bar(2).bar(3)
//
// TODO: Allow token type-tags in variable definitions:
// macro { str $a<:number> } -> { ($a).toString(16) }
// macro { str $a<:string> } -> { $a }
// macro { str $a } -> { String($a) }
// str 23
// str "23"
// str [2,3]
// result:
// (23).toString(16)
// "23"
// String([2,3])
//
console.log('parseMacroVar s.value='+repr(s.value));
if (s.value.charCodeAt(1) === ccDollar) {
// Escaped dollar-sign match
// E.g. $$x -> {value:"$x", type:name}
console.log('parseMacroVar rewrite '+repr(s.value)+' to '+repr(s.value.substr(1)));
s.value = s.value.substr(1);
return;
}
// Macro variable
// E.g. $x -> {value:"$x", type:name, [TokenVarTag]:TokenVar1|TokenVarN}
var t, index = s.tokens.length;
if (behaviour === Pattern) {
if (vars) {
if (vars.has(s.value)) {
this.raise(s.start, 'Duplicate identifier "'+s.value+'" in macro pattern');
}
} else {
vars = new Map;
}
nextToken(); nextToken = skipToken;
t = s.tokens[index];
if (index !== tokenStartIndex &&
s.tokens[index-1].type === tokTypes.ellipsis) //< unreliable: ".../*comment*/$a"
{
// E.g. ...$x -> {value:"$x", type:name, [TokenVarTag]:TokenVarN}
t[TokenVarTag] = TokenVarN;
// Eat previous "..."
s.tokens.splice(index-1, 1);
} else {
// E.g. $x -> {value:"$x", type:name, [TokenVarTag]:TokenVar1}
t[TokenVarTag] = TokenVar1;
}
vars.set(t.value, t);
//console.log('vars set t', t.value, vars)
} else { // (behaviour === Body)
//console.log('parseMacroVar lookup s.value='+repr(s.value));
var ptok = patternVars.get(s.value)
if (ptok) {
nextToken(); nextToken = skipToken;
t = s.tokens[index];
ptok[MatchingToken] = t;
t[TokenVarTag] = TokenVar1;
// t._MatchingToken = ptok; t._TokenVarTag = 'TokenVar1'; // debug
}
}
}
// find open & close token types, or parse single token
console.log('1st:', fmttok(s));
switch (s.type) {
case tokTypes.bracketL: close = tokTypes.bracketR; break;
case tokTypes.braceL:
case tokTypes.dollarBraceL: close = tokTypes.braceR; break
case tokTypes.parenL: close = tokTypes.parenR; break;
case tokTypes.eof: {
if (behaviour === Pattern) {
this.raise(startPos, 'Missing "->" in macro definition')
} else {
this.raise(startPos, 'Missing macro body')
}
break;
}
case tokTypes.name: if (s.value.charCodeAt(0) === ccDollar) { parseMacroVar(); } break;
}
if (close) {
open = s.type;
openFirst = true; // I.e. first token is a grouping "open+close"
} else {
nextToken();
// Previous token was not open -- maybe the next is?
console.log('2nd:', fmttok(s));
switch (s.type) {
case tokTypes.bracketL: open = s.type; close = tokTypes.bracketR; break;
case tokTypes.braceL:
case tokTypes.dollarBraceL: open = s.type; close = tokTypes.braceR; break
case tokTypes.parenL: open = s.type; close = tokTypes.parenR; break;
case tokTypes.eof: if (behaviour === Pattern) { this.unexpected(); } break;
}
if (!close) {
console.log('exit with single token');
}
}
if (close) {
nextToken();
depth = 1;
loop2: while (1) {
console.log('nth:', fmttok(s));
switch (s.type) {
case open: ++depth; break;
case close: {
if (--depth === 0) {
if (openFirst && close === tokTypes.braceR) {
// The first token was "{" -- i.e. the pattern style is "{...}"
// eat and ignore the current token:
this.nextToken();
// remove the first token "{":
s.tokens.splice(tokenStartIndex,1)
} else {
// parse and include the current token
var tokindex = s.tokens.length;
nextToken();
console.log('tokindex: ['+tokindex+','+s.tokens.length+']')
}
break loop2;
}
break;
}
case tokTypes.name: if (s.value.charCodeAt(0) === ccDollar) { parseMacroVar(); } break;
case tokTypes.eof: {
if (behaviour === Pattern) {
this.raise(startPos, 'Missing terminating "'+close.label+'" in macro pattern')
} // else behaviour === Body: valid to have a macro be the last thing of input
break loop2;
}
}
nextToken();
}
}
console.log('exit at:');
dumpPState(s);
return {
tokens: s.tokens.splice(tokenStartIndex), // move tokens from s
vars: vars,
};
}
pp.parseMacroDefinition = function(m /*:Node*/) {
// MacroDefinition := "macro" <space>+ macroPattern "->" macroBody
//
// macroPattern := "{" Token macroExprToken* "}"
// | <macroExprToken where first must be Token>
//
// macroBody := "{" macroExprToken* "}"
// | macroExprToken
//
// macroExprToken := "(" macroExprToken ")"
// | "{" macroExprToken "}"
// | "${" macroExprToken "}"
// | "[" macroExprToken "]"
// | macroToken
// macroToken := (macroVar | Token)
// macroVar := "..."? "$" <any Identifier except "$">?
//
// E.g.
// macro x -> { y } # 'x()' -> 'y()'
// macro {x} -> { y } # 'x()' -> 'y()'
// macro x -> y() # 'x()' -> 'y()()'
// macro x -> {} # 'x()' -> '()'
// macro {x} -> {} # 'x' -> '()'
// macro \u007B -> { y \u007B } # '}' -> 'y }'
// macro {x 1} -> {y} # 'x 1' -> 'y'
// macro {x $} -> {y} # 'x 123' -> 'y'
var p = this;
var n = p.startNode(); // noop node that will be returned in-place of the macro
var s = p.state;
// Ignore comments while parsing macro
var orig_pushComment = p.pushComment;
p.pushComment = function(){};
try {
// macroPattern
m.pattern = p.parseMacroTokens1(Pattern);
console.log('m.pattern' +
'\n .tokens: ' + fmttoks(m.pattern.tokens) +
'\n .vars: '+(m.pattern.vars ? '{' + fmtit(m.pattern.vars.keys())+'}' : '-'));
// "=>"
// if (s.type === tokTypes.arrow) {
// p.nextToken(); // advance to next token w/o remembering "=>"
// } else {
// p.raise(s.start, 'Unexpected token (expected "=>")');
// }
// "->"
if (s.type === tokTypes.plusMin && s.input.charCodeAt(s.pos) === ccGreaterThan) {
s.start = s.pos; ++s.pos; s.type = tokTypes.relational; // p.nextToken();
p.nextToken();
} else {
p.raise(s.start, 'Unexpected token (expected "->")');
}
// macroBody
m.body = p.parseMacroTokens1(Body, m.pattern.vars);
console.log('m.body' +
'\n .tokens: ' + fmttoks(m.body.tokens))
} finally {
p.pushComment = orig_pushComment;
}
// optional ;
if (s.type === tokTypes.semi) {
p.nextToken();
}
// Finish our noop node before the macro node so that trailing comments remain with the AST
p.finishNode(n, "Noop");
p.finishNode(m, "MacroDefinition");
n.leadingComments = m.leadingComments;
n.trailingComments = m.trailingComments;
// Add comment to replacement node
var raw = p.input.substring(m.start, m.end);
var comment = {
type: raw.indexOf('\n') !== -1 ? "CommentBlock" : "CommentLine",
value: raw,
start: m.start,
end: m.end,
loc: {start: m.loc.start, end: m.loc.end},
range: [m.start, m.end],
};
if (n.leadingComments) {
n.leadingComments.push(comment);
} else if (n.trailingComments) {
n.trailingComments.unshift(comment);
} else {
n.leadingComments = [comment];
}
// Register macro
var prevMacro = p.macroCtx.defineMacro(m);
if (prevMacro) {
// Would fail here if we want:
// p.raise(p.state.start, 'Duplicate macro definition "'+m.id.name+'"');
p.pushDiagnostic('warn', m.start, 'Macro '+fmtmacro(m,false)+' redefined', m.end);
}
console.log('macrodef:', fmtmacro(m));
//process.exit(1);
// Assign empty body to noop block statement
n.body = [];
// We just performed nextToken to advance to the token that follows the macro def, but the
// macro-expansion hook is disabled while matching macros.
if (tokeq(s, m.pattern.tokens[0])) {
console.log('possible immediate expansion after definition')
}
return n;
};
var MISMATCH = Symbol('MISMATCH');
//
// opts:
// m: MacroDefinition
// token: Token { type: BabylonTokenType, value: String|null }
// persistToken(t:Token) -> Token
// nextToken() -> Token
// error(message:String)
// createSnapshot?() -> Snapshot
// restoreSnapshot?(Snapshot)
//
function matchMacroPattern(opts) {
// pass: 1 2 3 4
// mtoks: foo [ $a ]
// stoks: foo [ 1 ]
var m = opts.m;
var stok = opts.token;
function consumeSToks(mtokTerm, isRest, rewindLast) {
var depth = 0;
var termType = mtokTerm ? mtokTerm.type : tokTypes.semi;
var t, tokens = null; // Token[];
var snapshot;
console.log('consumeSToks: termType:', repr(termType.label))
loop: while (true) {
console.log(' consumeSToks stok:', fmttok(stok))
switch (stok.type) {
case termType: {
if (depth === 0) {
break loop;
}
// fallthrough
}
case tokTypes.bracketL:
case tokTypes.braceL:
case tokTypes.dollarBraceL:
case tokTypes.parenL: {
// TODO: support flow generics, eg. "Set<Map<int,string>>"
++depth;
break;
}
case tokTypes.bracketR:
case tokTypes.braceR:
case tokTypes.parenR: {
if (depth === 0 && isRest && termType === tokTypes.semi) {
// E.g.
// macro foo { ...$b } -> {}
// (foo 1 2 3)
// ^
break loop;
}
--depth;
break;
}
case tokTypes.eof: {
if (!isRest) {
opts.error('Unterminated macro match');
}
// Rest ended at EOF -- validation happens by receiver of return value
// if (!tokens) { tokens = []; } tokens[HasEOF] = true;
break loop;
}
}
t = opts.persistToken(stok);
if (!tokens) { tokens = [t]; } else { tokens.push(t); }
if (depth === 0 && !isRest) {
// Not a "..." and at the root -- we have taken exactly one stok
break;
}
if (rewindLast) {
// We have been asked to rewind the last token we parse. To do that, we need to
// take a snapshot of the state before advancing to the next token.
snapshot = opts.createSnapshot();
}
stok = opts.nextToken();
}
if (rewindLast && snapshot) {
// We have been asked to rewind (or undo) the last token
assert(snapshot);
opts.restoreSnapshot(snapshot);
}
return tokens;
}
var i = 0, L = m.pattern.tokens.length
, mtok
, mtokTerm
, stoks
, subs = null // Map<Token,Token[]> var substitutions
, skipNextToken = false
, varTag
, bodyToken
;
for (; i !== L; ++i) {
mtok = m.pattern.tokens[i];
console.log('mtok:', fmttok(mtok) + ', stok:', fmttok(stok))
if (varTag = mtok[TokenVarTag]) {
// Read source tokens and associate them with the variable
mtokTerm = m.pattern.tokens[i+1];
stoks = consumeSToks(
mtokTerm,
varTag === TokenVarN,
/*rewindLast=*/ opts.createSnapshot && mtok.isRest && i === L-1
);
if (varTag === TokenVarN) {
// Because "...$" consumes a terminating token, so it's already queued up
// at this point.
skipNextToken = true;
} else if (!stoks) {
return MISMATCH;
}
console.log('x mtok:', fmttok(mtok))
if (bodyToken = mtok[MatchingToken]) {
// Does appear in body -- associate so it can later be expanded
if (!subs) { subs = new Map }
subs.set(bodyToken, stoks || Empty);
// `Empty` means that bodyToken is replaced by nothing, E.g.
// macro A(...$) -> a($); A() => a()
}
console.log(fmttok(mtok)+' -> [' + (!stoks ? '' : stoks.map(fmttok).join(', ')) + ']');
} else {
// match tokens
if (!tokeq(mtok, stok)) {
console.log('MISMATCH', fmttok(mtok), '!==', fmttok(stok))
return MISMATCH;
}
}
if (skipNextToken) {
skipNextToken = false;
} else if (i < L-1) {
stok = opts.nextToken();
}
}
return subs;
}
pp.expandMacro = function(m, toks, cont) {
var p = this;
var s = p.state;
var toksIndex = 0;
var endAfterFinishNode = false;
var orig_nextToken = p.nextToken;
var pstateAfterMacroMatch = s.clone();
console.log('+++++++++++++++++++++++++++ begin expand +++++++++++++++++++++++++++');
// console.log(fmtcallstack());
p.nextToken = function expandMacro$nextToken() {
console.log('EXP next');
// console.log(fmtcallstack());
var t = toks[toksIndex++];
if (!t) {
console.log('EXP end');
p.state = pstateAfterMacroMatch;
// console.log('EXP s at end:'); dumpPState(p.state);
console.log('+++++++++++++++++++++++++++ end expand +++++++++++++++++++++++++++');
var r = (p.nextToken = orig_nextToken).call(p);
cont(true);
return r;
}
s.end = t.end;
s.start = t.start;
s.startLoc = { line: t.loc.start.line, column: t.loc.start.column };
s.pos = t.end;
s.type = t.type;
s.value = t.value;
console.log('EXP t: '+fmttok(t))
console.log('EXP s: '+fmttok(s))
// console.log('EXP s when leaving next:'); dumpPState(s);
}
p.nextToken();
}
pp.reportMacroMatchAttempt = function(m, startPos, endPos) {
this.pushDiagnostic(
'info',
startPos === undefined ? this.state.start : startPos,
'Attempted to expand macro '+fmtmacropat(m.pattern, Pattern, false),
endPos
);
console.log(this.state.diagnostics[this.state.diagnostics.length-1].message);
}
pp.maybeMatchMacroPattern = function(m) {
var p = this;
console.log('- - - - - - - - - - - begin match - - - - - - - - - - -')
dumpPState(p.state);
var subs = null;
if (m.pattern) {
// State:
// macro bar {$a $b} -> { ($a, $b) }
// foo bar 3 4 5
// ^-- state.pos
// ^----- state.start
//
var pStateBeforeMatch = p.state.clone();
subs = matchMacroPattern({
m: m,
token: p.state,
persistToken: copytok,
error: function(msg) { p.raise(p.state.start, msg) },
nextToken: function() { p.nextToken(); return p.state; },
createSnapshot: p.state.clone.bind(p.state),
restoreSnapshot: function(snapshot) { p.state = snapshot; },
});
if (subs === MISMATCH) {
// didn't match -- restore state
var endPos = p.state.end;
p.state = pStateBeforeMatch;
p.reportMacroMatchAttempt(m, p.state.start, endPos)
console.log('- - - - - - - - - - - end match nomatch - - - - - - - - - - -')
return MISMATCH;
}
// Note: match might have replaced p.state at this point
dumpPState(p.state);
console.log('- - - - - - - - - - - end match subs - - - - - - - - - - -')
// State:
// macro bar {$a $b} -> { ($a, $b) }
// foo bar 3 4 5
// ^-- state.pos
// ^--- state.start
//
} else {
dumpPState(p.state);
console.log('- - - - - - - - - - - end match simple - - - - - - - - - - -')
// Simple substitution macro w/o match
// State:
// macro bar { 1 }
// foo bar 3 4 5
// ^-- state.pos
// ^----- state.start
//
}
return subs;
}
function maybeExpandInnerMacro(macroCtx, p, m, parentM, stoks) {
if (macroCtx._evalSet) {
if (macroCtx._evalSet.has(parentM)) {
s = [];
for (v of macroCtx._evalSet) { s.push(v); }
s.push(parentM);
s = s.map(function(m){ return fmtmacro(m, false) });
p.raise(
p.state.start,
'Cyclic macro expansion ' + s.join(' --> ')
);
} else {
macroCtx._evalSet.add(parentM);
}
} else {
macroCtx._evalSet = new Set([parentM])
}
var matchSource
, subs
, numToksConsumed = 1
;
if (m.pattern.tokens.length > 1) {
// Note: If the number of tokens in macro pattern is just 1, then we have already
// matched that token (before this function was called.) So we only perform a full
// match for len>1.
// In the future if/when we support patterns with variables as the first token,
// the condition should include a check for if the one and only token is a variable,
// as it will be in need of substitution. E.g.
// if (m.pattern.tokens.length > 1 || TokenVarTag in m.pattern.tokens[0])
matchSource = {
m: m,
token: stoks[0],
_tokenIndex: 0,
persistToken: function(t) { return t },
error: function(msg) { p.raise(stoks[++this._tokenIndex].start, msg) },
nextToken: function() { return stoks[++this._tokenIndex] },
};
subs = matchMacroPattern(matchSource);
if (subs === MISMATCH) {
p.reportMacroMatchAttempt(m, stoks[0].start)
return false;
}
numToksConsumed += matchSource._tokenIndex;
}
var toks = evalMacro(macroCtx, p, m, subs);
console.log('EIM evalMacro ->', toks.map(fmttok).join(', '))
macroCtx._evalSet.delete(parentM);
return [0, numToksConsumed].concat(toks); // suitable for applying on Array.splice
}
function evalMacro(macroCtx, p, m, subs, scopeSet) {
// Note: This function is not recrusive itself, but relies on maybeExpandInnerMacro
// for cyclic expansion checks. I.e. we can't call evalMacro directly from evalMacro.
// memozied?
var memoized = m[MemoizedResult];
if (memoized) {
return memoized;
}
// expand substitutions
var toks;
if (subs) {
toks = [];
m.body.tokens.forEach(function(etok) {
var subToks = subs.get(etok);
if (subToks) {
// Empty means that etok is replaced by nothing, otherwise we have substitutes:
if (subToks !== Empty) {
assert(Array.isArray(subToks));
toks = toks.concat(subToks);
}
} else {
// Verbatim
toks.push(etok);
}
})
} else {
// No variables
toks = m.body.tokens;
}
// match & expand inner macros
var i = 0, L = toks.length, m2, t, r;
for (; i !== L; ++i) {
t = toks[i];
if (m2 = macroCtx.getMacro(t.type, t.value)) {
if (r = maybeExpandInnerMacro(macroCtx, p, m2, m, toks)) {
r[0] = i; // splice start index = first token replaced
toks.splice.apply(toks, r);
i += r[1]-1; // number-of-tokens consumed by maybeExpandInnerMacro
}
}
}
if (!subs) {
// memoize pure macro
m[MemoizedResult] = toks;
}
return toks;
}
pp.maybeExpandMacro = function() {
console.log('maybeExpandMacro')
var m = this.macroCtx.getMacro(this.state.type, this.state.value);
if (!m) {
return;
}
disableMacroExpansionHook();
var finalize = enableMacroExpansionHook;
// attempt match
var subs = this.maybeMatchMacroPattern(m);
if (subs === MISMATCH) {
return finalize();
}
// evaluate macro, returning the tokens which should replace the match
//console.log('subs:', subs);
var toks = evalMacro(this.macroCtx, this, m, subs);
console.log('expanded toks:', toks.map(fmttok).join(', '))
if (toks.length === 0) {
// No replacement tokens
// E.g. "macro { ... } -> {}"
this.nextToken();
finalize();
} else {
// produce `toks` for the parser
this.expandMacro(m, toks, finalize);
}
};
var orig_nextToken = pp.nextToken;
var macroExpansionHookCounter = 0;
var expandMacro$nextToken0 = function() {
orig_nextToken.apply(this, arguments);
this.maybeExpandMacro(); // HOTPATH
};
function enableMacroExpansionHook() {
if (++macroExpansionHookCounter === 1) {
console.log('macroExpansionHook:ENABLE ' + (new Error).stack.split(/\n/)[2].trim());
pp.nextToken = expandMacro$nextToken0;
} else console.log('macroExpansionHook:enable ' + (new Error).stack.split(/\n/)[2].trim());
}
function disableMacroExpansionHook() {
if (--macroExpansionHookCounter === 0) {
console.log('macroExpansionHook:DISABLE ' + (new Error).stack.split(/\n/)[2].trim());
pp.nextToken = orig_nextToken;
} else console.log('macroExpansionHook:disable ' + (new Error).stack.split(/\n/)[2].trim());
}
// var orig_finishToken = pp.finishToken;
// var inMaybeExpand = false;
// var expandMacro$finishToken = function() {
// console.log('finishToken', fmttok(this.state));
// orig_finishToken.apply(this, arguments);
// if (this.macroCtx && !inMaybeExpand) {
// console.log('macroCtx.getMacro', fmttok(this.state));
// var m = this.macroCtx.getMacro(this.state.type, this.state.value);
// if (m) {
// this.finishToken = orig_nextToken;
// console.log('call maybeExpandMacro')
// this.maybeExpandMacro(m, function(r) {
// console.log('expand cont', r)
// this.finishToken = expandMacro$finishToken;
// });
// }
// }
// };
// pp.finishToken = expandMacro$finishToken;
// pp.extend("parseMaybeAssign", function(inner) {
// return function(noIn, refShorthandDefaultPos, afterLeftParse) {
// var m = this.macroCtx.getMacro(this.state.type, this.state.value);
// if (m) {
// this.maybeExpandMacro(m);
// }
// return inner.apply(this, arguments);
// };
// });
}
var babylonParser = require('babel-core/node_modules/babylon/lib/parser');
babylonParser.plugins.macro = MacroParserPlugin;
function parse(code, options) {
var opts = {
allowImportExportEverywhere: false,
allowReturnOutsideFunction: false,
allowHashBang: true,
ecmaVersion: Infinity,
strictMode: false,
sourceType: 'module',
locations: true,
ranges: true,
plugins: {
jsx: true,
flow: true,
macro: true,
},
features: {},
};
var k;
for (k in babel.pipeline.transformers) {
opts.features[k] = true;
}
if (options) {
for (k in options) {
opts[k] = options[k];
}
}
var file = babylon.parse(code, opts);
return {
ast: file.program,
macros: file.macros,
diagnostics: file.diagnostics,
};
}
time('babel.parse');
try {
res = parse(srcCode);
time.end();
if (res.diagnostics) {
res.diagnostics.map(function(d) {
console.warn(d.type+':', d.message);
});
}
// res.ast.body[1].expression.object.innerComments = [];
// console.log('babel.parse -> {res.ast.body[1].expression} =>',
// repr(res.ast.body[1].expression,4));
// console.log('babel.parse =>', repr(res.ast,4));
} catch (err) {
time.end();
if (err instanceof SyntaxError && err.pos !== undefined && err.loc) {
var line = srcCode.split('\n')[err.loc.line-1];
var cur = ' ^'.substr(-err.loc.column-1);
console.error(err.message + '\n' + line + '\n' + cur +
err.stack.substr(err.stack.indexOf('\n')));
} else {
console.error(err.stack || String(err));
}
process.exit(1);
}
function codegen(ast, compact) {
return babel.transform.fromAst(ast, null, {
code: true, // output JavaScript code
ast: false, // output AST
compact: compact, // this.target.mode === TARGET_MODE_RELEASE, // "minify"
comments: !compact, //this.target.mode === TARGET_MODE_DEV,
externalHelpers: true,
modules: 'ignore',
experimental: true,
stage: 0, // 0=strawman 1=proposal 2=draft 3=candidate 4=finished. TODO config
nonStandard: true, // Enable support for JSX and Flow
loose: [], // disallows import/exports beyond head and return at root
// xxx
filename: 'foo.js',
inputSourceMap: null,//inSourceMap,
sourceMaps: true, // true|false|"inline" generate SourceMap
sourceRoot: '/src/foo',//srcfile.dir,
// See http://babeljs.io/docs/advanced/transformers/
blacklist: //this.target.disabledTransforms(
[
'es6.modules',
'validation.react',
'validation.undeclaredVariableCheck',
'utility.inlineEnvironmentVariables', // we do this ourselves
'reactCompat',
'strict', // just puts `"use strict"` in every source
'jscript',
// target_nodejs
'spec.protoToAssign', // x.__proto__ = y supported by v8, no need to shallow-copy
'es6.spec.blockScoping', // supported by v8
'es6.spec.symbols', // supported by v8
'regenerator', // b/c yield is supported by v8
],
optional: //this.target.transforms(
[
'es6.spec.blockScoping',
'es6.spec.symbols',
'es6.spec.templateLiterals',
'react',
'flow',
// target_nodejs
'asyncToGenerator', // async/await on yield. Requires Node.js >0.11.2
],
});
}
time('babel.transform.fromAst');
res = codegen(res.ast, /*compact=*/false);
time.end();
// console.log('metadata:', repr(code.metadata,3));
// console.log('map:', repr(code.map,3));
console.log('code:\n' + res.code);
// return
var A = require('assert');
// assertParses(expectedCode:string, sourceCode:string[]|string, parseOptions?:{})
function assertParses0(expectedCode, sourceCode, parseOptions) {
if (Array.isArray(sourceCode)) {
sourceCode = sourceCode.join('\n');
}
var ast = parse(sourceCode, parseOptions).ast;
var genCode = codegen(ast, /*compact=*/true).code;
A.equal(genCode.trim(), expectedCode.trim());
}
// assertParses(expectedCode:string, ...sourceCodes:string[]|string)
function assertParsesSame(expectedCode) {
Array.prototype.slice.call(arguments,1).forEach(function(sourceCodes) {
assertParses0(expectedCode, sourceCodes);
});
}
// assertParses(expectedCode:string, ...sourceCodeLine:string, parseOptions?:{})
function assertParses(expectedCode) {
var parseOptions = null, sourceCode;
if (arguments.length > 2 && typeof arguments[arguments.length-1] === 'object') {
parseOptions = arguments[arguments.length-1];
sourceCode = Array.prototype.slice.call(arguments,1,arguments.length-1);
} else {
sourceCode = Array.prototype.slice.call(arguments,1);
}
assertParses0(expectedCode, sourceCode, parseOptions);
}
// assertParseThrows(error:RegExp|string, ...code:string, parseOptions?:{})
function assertParseThrows(error, code) {
var parseOptions = null, sourceCode;
if (arguments.length > 2 && typeof arguments[arguments.length-1] === 'object') {
parseOptions = arguments[arguments.length-1];
sourceCode = Array.prototype.slice.call(arguments,1,arguments.length-1);
} else {
sourceCode = Array.prototype.slice.call(arguments,1);
}
sourceCode = sourceCode.join('\n');
A.throws(function() { parse(sourceCode, parseOptions); }, error);
}
assertParses('foo();'
, 'foo()');
assertParsesSame('bar();'
, 'macro foo -> bar; foo()'
, 'macro foo -> bar\nfoo()'
, 'macro foo -> bar foo()'
)
assertParseThrows(/Unexpected token.*\(1:10\)/i,
'macro foo = bar foo()')
assertParsesSame('bar;'
, 'macro foo() -> bar; foo()'
, 'macro {foo()} -> bar; foo()'
, 'macro {foo()} -> {bar}; foo()'
, 'macro foo() -> {bar}; foo()'
, 'macro foo($) -> bar; foo(1)'
)
assertParsesSame('bar[1];'
, 'macro foo($) -> bar[$]; foo(1)'
, 'macro {foo($)} -> bar[$]; foo(1)'
, 'macro {foo $} -> bar[$]; foo 1'
)
// BUG fails when flow plugin is enabled, with "Unexpected token" at "*" during expansion:
var src =
'macro {mul $1 $2} -> ($1 * $2);\n'+
'mul 3 4';
assertParses('3 * 4;', src, { plugins: { jsx: 1, macro: 1 } } )
assertParses('3 * 4;', src, { plugins: { macro: 1 } } )
assertParseThrows(/Unexpected token.*\(1:25\)/i
, src, { plugins: { jsx: 1, macro: 1, flow: 1 } } )
assertParseThrows(/Unexpected token.*\(1:25\)/i
, src, { plugins: { macro: 1, flow: 1 } } )
assertParseThrows(/Unexpected token.*\(1:26\)/i
, 'macro {mul($1 $2)} -> ($1 * $2)'
, 'mul(3 4)'
, { plugins: { macro: 1, flow: 1 } } )
assertParses('r();'
, 'macro A -> {}; r(A)')
// deep expansion
assertParses('r(d(3,d));'
, 'macro D -> d'
, 'macro C -> D'
, 'macro B -> C(3, D)'
, 'macro A -> B'
, 'r(A)')
assertParses('r(d(2,1,d));'
, 'macro D -> d'
, 'macro C -> D'
, 'macro {B $2 $1} -> C($1, $2, D)'
, 'macro A -> {B 1 2}'
, 'r(A)')
assertParseThrows(/Cyclic macro expansion/i
, 'macro B -> C(A)'
, 'macro A -> B'
, 'a(A)')
assertParseThrows(/Cyclic macro expansion/i
, 'macro D -> B'
, 'macro C -> D'
, 'macro B -> C(3, D)'
, 'macro A -> B'
, 'r(A)')
// rest/spread vars
assertParses('d(1,2,3);'
, 'macro D(...$) -> { d($) }' // non-greedy, bounded rest
, 'D(1,2,3)') //< rest terminating at expected token ")"
assertParses('d();'
, 'macro D(...$) -> { d($) }' // non-greedy, bounded rest
, 'D()') //< empty rest terminating at expected token ")"
assertParses('d(1,2,3);'
, 'macro { D ...$ } -> { d($) }' // greedy rest
, 'D 1,2,3') //< rest terminating at EOF
assertParses('d();'
, 'macro { D ...$ } -> { d($) }' // greedy rest
, 'D') //< empty rest terminating at EOF
console.log(color('g','PASS'));
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment