This commit is contained in:
Aleksey Chichenkov 2019-01-29 14:25:40 +03:00
parent 3cffcbd7e5
commit 2332120872
7 changed files with 536 additions and 2 deletions

View File

@ -12,6 +12,11 @@ var config = {
* Флаги для лемона, можно посмотреть в README.md
*/
flags: "-l",
/**
* Before use it, you must configurate header.tmpl and footer.tmpl
*/
require_templates: "require_js/"
};
module.exports = config;

View File

@ -12,6 +12,11 @@ var config = {
* Флаги для лемона, можно посмотреть в README.md
*/
flags: "-l",
/**
* Before use it, you must configurate header.tmpl and footer.tmpl
*/
require_templates: "require_js/"
};
module.exports = config;

View File

@ -50,10 +50,12 @@ var update_parser_y = function () {
var post_process_parser = function () {
var out_js = fs.readFileSync(parser_path + temp_fn_js, "utf8");
if(args["t"] !== undefined) {
if (args["t"] !== undefined) {
switch (args["t"]) {
case "web":
out_js = "(function(){\n" + out_js + "return LemonJS; \n})()";
var header = fs.readFileSync(config.require_templates + "header.tmpl", "utf8");
var footer = fs.readFileSync(config.require_templates + "footer.tmpl", "utf8");
out_js = header + "\n" + out_js + "\n" + footer;
break;
case "node":
out_js += "\n\n module.exports = LemonJS;";

49
output/parser.y Normal file
View File

@ -0,0 +1,49 @@
%name Parser
%token_prefix TOKEN_
%left OR.
%left AND.
%right NOT.
%include {
// include something
}
%code {
var _result = {
error: false
};
var LemonJS = function(_input) {
_result = Object.create(null);
var parser = new Parser();
var lexer = new Lexer(_input);
var token;
var lexemes = [];
while (token = lexer.next()) {
if(_result.error) {
return { success: false }
}
if (token.error === 0) {
console.log("PARSE", token.lexeme);
parser.parse(parser["TOKEN_" + token.lexeme], token);
lexemes.push(token);
}
}
parser.parse();
return {
success: true,
tree: _result.root_node,
lexemes: lexemes
};
};
}
%syntax_error {
_result.error = "true";
console.log("Syntax error");
}
&&REPLACER{output/rules.y}&&

462
output/rules.y Normal file
View File

@ -0,0 +1,462 @@
main ::= expr(A) . {
_result.root_node = A
}
main ::= literal(A) . {
_result.root_node = A
}
integer_literal(A) ::= INTEGER_LITERAL(B) . {
A = new tokens.integer_literal({
children: [
new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
})
]
});
}
literal(A) ::= integer_literal(B) . {
A = B;
}
float_literal(A) ::= FLOAT_LITERAL(B) . {
A = new tokens.float_literal({
children: [
new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
})
]
})
}
literal(A) ::= float_literal(B) . {
A = B;
}
bool_literal(A) ::= BOOL_LITERAL(B) . {
A = new tokens.bool_literal({
children: [
new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
})
]
})
}
literal(A) ::= bool_literal(B) . {
A = B;
}
string_literal(A) ::= STRING_LITERAL(B) . {
A = new tokens.string_literal({
children: [
new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
})
]
});
}
literal(A) ::= string_literal(B) . {
A = B;
}
id(A) ::= string_literal(B) . {
A = new tokens.id({
children: [B]
});
}
id(A) ::= ID(B) . {
A = new tokens.id({
children: [
new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
})
]
});
}
and(A) ::= expr(B) AND(C) expr(D) . {
A = new tokens.and({
lexpr: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
rexpr: D
})
}
or(A) ::= expr(B) OR(C) expr(D) . {
A = new tokens.or({
lexpr: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
rexpr: D
})
}
not(A) ::= NOT(C) expr(D) . {
A = new tokens.not({
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
rexpr: D
})
}
eq(A) ::= id(B) EQ(C) literal(D) . {
A = new tokens.eq({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
neq(A) ::= id(B) NEQ(C) literal(D) . {
A = new tokens.neq({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
gt(A) ::= id(B) GT(C) literal(D) . {
A = new tokens.gt({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
gte(A) ::= id(B) GTE(C) literal(D) . {
A = new tokens.gte({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
lt(A) ::= id(B) LT(C) literal(D) . {
A = new tokens.lt({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
lte(A) ::= id(B) LTE(C) literal(D) . {
A = new tokens.lte({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
like(A) ::= id(B) LIKE(C) literal(D) . {
A = new tokens.like({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
nlike(A) ::= id(B) NLIKE(C) literal(D) . {
A = new tokens.nlike({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
expr(A) ::= and(B) . {
A = B;
}
expr(A) ::= or(B) . {
A = B;
}
expr(A) ::= not(B) . {
A = B;
}
expr(A) ::= eq(B) . {
A = B;
}
expr(A) ::= neq(B) . {
A = B;
}
expr(A) ::= gt(B) . {
A = B;
}
expr(A) ::= gte(B) . {
A = B;
}
expr(A) ::= lt(B) . {
A = B;
}
expr(A) ::= lte(B) . {
A = B;
}
expr(A) ::= like(B) . {
A = B;
}
expr(A) ::= nlike(B) . {
A = B;
}
expr(A) ::= LCB(B) expr(C) RCB(D) . {
A = new tokens.sub_expr({
LCB: new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
}),
expr: C,
RCB: new tokens.LEXEME({
type: D.lexeme,
value: D.value,
start: D.start,
end: D.end
})
});
}
address_literal_content(A) ::= string_literal(B) . {
A = new tokens.address_literal_content({
children: [B]
});
}
address_literal_content(A) ::= address_literal_content(B) COMMA string_literal(C) . {
B.add(C);
A = B;
}
address_literal_content_or_empty(A) ::= address_literal_content(B) . {
A = B;
}
address_literal_content_or_empty(A) ::= . {
A = new tokens.address_literal_content({
children: []
});
}
address_literal(A) ::= ADDRESS(B) LSB(C) address_literal_content_or_empty(D) RSB(E) . {
A = new tokens.address_literal({
children: D.children,
keyword: new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
}),
LSB: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
RSB: new tokens.LEXEME({
type: E.lexeme,
value: E.value,
start: E.start,
end: E.end
})
});
}
literal(A) ::= address_literal(B) . {
A = B;
}
oid_literal_content(A) ::= id(B) . {
A = new tokens.oid_literal_content({
children: [B]
});
}
oid_literal_content(A) ::= oid_literal_content(B) DOT id(C) . {
B.add(C);
A = B;
}
oid_literal_content_or_empty(A) ::= oid_literal_content(B) . {
A = B;
}
oid_literal_content_or_empty(A) ::= . {
A = new tokens.oid_literal_content({
children: []
});
}
oid_literal(A) ::= OID(B) LSB(C) oid_literal_content_or_empty(D) RSB(E) . {
A = new tokens.oid_literal({
children: D.children,
keyword: new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
}),
LSB: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
RSB: new tokens.LEXEME({
type: E.lexeme,
value: E.value,
start: E.start,
end: E.end
})
});
}
literal(A) ::= oid_literal(B) . {
A = B;
}
time_diff_literal(A) ::= TIMEDIFF(KWD) LSB(B) integer_literal(DAYS) integer_literal(HH) COLON integer_literal(MM) COLON integer_literal(SS) integer_literal(MS) RSB(C) . {
A = new tokens.time_diff_literal({
keyword: new tokens.LEXEME({
type: KWD.lexeme,
value: KWD.value,
start: KWD.start,
end: KWD.end
}),
LSB: new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
}),
RSB: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
days: DAYS,
hours: HH,
minutes: MM,
seconds: SS,
microseconds: MS,
});
}
literal(A) ::= time_diff_literal(B) . {
A = B;
}
time_literal(A) ::= TIME(KWD) LSB(B) integer_literal(DAY) SLASH integer_literal(MONTH) SLASH integer_literal(YEAR) integer_literal(HH) COLON integer_literal(MM) COLON integer_literal(SS) integer_literal(MS) RSB(C) . {
A = new tokens.time_literal({
keyword: new tokens.LEXEME({
type: KWD.lexeme,
value: KWD.value,
start: KWD.start,
end: KWD.end
}),
LSB: new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
}),
RSB: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
day: DAY,
month: MONTH,
year: YEAR,
hours: HH,
minutes: MM,
seconds: SS,
microseconds: MS,
});
}
literal(A) ::= time_literal(B) . {
A = B;
}

3
require_js/footer.tmpl Normal file
View File

@ -0,0 +1,3 @@
return LemonJS;
})
})();

8
require_js/header.tmpl Normal file
View File

@ -0,0 +1,8 @@
(function(){
var deps = [
"external/re2js/lexer",
"external/parser/tokens"
];
define(deps, function(){
var tokens = require("external/parser/tokens");
var Lexer = require("external/re2js/lexer");