2019-01-28 12:08:59 +00:00
|
|
|
%name Parser
|
|
|
|
|
|
|
|
%token_prefix TOKEN_
|
|
|
|
|
|
|
|
%left OR.
|
|
|
|
%left AND.
|
|
|
|
%right NOT.
|
|
|
|
|
|
|
|
%include {
|
|
|
|
// include something
|
|
|
|
}
|
|
|
|
|
|
|
|
%code {
|
|
|
|
&&REPLACER{process.js}&&
|
|
|
|
}
|
|
|
|
|
|
|
|
%syntax_error {
|
|
|
|
console.log("Syntax error");
|
|
|
|
}
|
|
|
|
|
|
|
|
main ::= expr(A) . {
|
|
|
|
_result.root_node = A
|
|
|
|
}
|
|
|
|
|
2019-01-28 15:37:37 +00:00
|
|
|
main ::= literal(A) . {
|
|
|
|
_result.root_node = A
|
2019-01-28 12:08:59 +00:00
|
|
|
}
|
|
|
|
|
2019-01-28 15:37:37 +00:00
|
|
|
integer_literal(A) ::= INTEGER_LITERAL(B) . {
|
|
|
|
A = new tokens.integer_literal({
|
|
|
|
children: [
|
|
|
|
new tokens.LEXEME({
|
|
|
|
type: B.lexeme,
|
|
|
|
value: B.value,
|
|
|
|
start: B.start,
|
|
|
|
end: B.end
|
|
|
|
})
|
|
|
|
]
|
|
|
|
});
|
2019-01-28 12:08:59 +00:00
|
|
|
}
|
|
|
|
|
2019-01-28 15:37:37 +00:00
|
|
|
literal(A) ::= integer_literal(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
float_literal(A) ::= FLOAT_LITERAL(B) . {
|
|
|
|
A = new tokens.float_literal({
|
|
|
|
children: [
|
|
|
|
new tokens.LEXEME({
|
|
|
|
type: B.lexeme,
|
|
|
|
value: B.value,
|
|
|
|
start: B.start,
|
|
|
|
end: B.end
|
|
|
|
})
|
|
|
|
]
|
2019-01-28 12:08:59 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-01-28 15:37:37 +00:00
|
|
|
literal(A) ::= float_literal(B) . {
|
|
|
|
A = B;
|
2019-01-28 12:08:59 +00:00
|
|
|
}
|
|
|
|
|
2019-01-28 15:37:37 +00:00
|
|
|
bool_literal(A) ::= BOOL_LITERAL(B) . {
|
|
|
|
A = new tokens.bool_literal({
|
|
|
|
children: [
|
|
|
|
new tokens.LEXEME({
|
|
|
|
type: B.lexeme,
|
|
|
|
value: B.value,
|
|
|
|
start: B.start,
|
|
|
|
end: B.end
|
|
|
|
})
|
|
|
|
]
|
2019-01-28 12:08:59 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-01-28 15:37:37 +00:00
|
|
|
literal(A) ::= bool_literal(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
string_literal(A) ::= STRING_LITERAL(B) . {
|
|
|
|
A = new tokens.string_literal({
|
2019-01-28 12:08:59 +00:00
|
|
|
children: [
|
2019-01-28 15:37:37 +00:00
|
|
|
new tokens.LEXEME({
|
|
|
|
type: B.lexeme,
|
|
|
|
value: B.value,
|
|
|
|
start: B.start,
|
|
|
|
end: B.end
|
|
|
|
})
|
2019-01-28 12:08:59 +00:00
|
|
|
]
|
2019-01-28 15:37:37 +00:00
|
|
|
});
|
2019-01-28 12:08:59 +00:00
|
|
|
}
|
|
|
|
|
2019-01-28 15:37:37 +00:00
|
|
|
literal(A) ::= string_literal(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
id(A) ::= string_literal(B) . {
|
|
|
|
A = new tokens.id({
|
|
|
|
children: [B]
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
id(A) ::= ID(B) . {
|
|
|
|
A = new tokens.id({
|
2019-01-28 12:08:59 +00:00
|
|
|
children: [
|
2019-01-28 15:37:37 +00:00
|
|
|
new tokens.LEXEME({
|
|
|
|
type: B.lexeme,
|
|
|
|
value: B.value,
|
|
|
|
start: B.start,
|
|
|
|
end: B.end
|
|
|
|
})
|
2019-01-28 12:08:59 +00:00
|
|
|
]
|
2019-01-28 15:37:37 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
and(A) ::= expr(B) AND(C) expr(D) . {
|
|
|
|
A = new tokens.and({
|
|
|
|
lexpr: B,
|
|
|
|
op: new tokens.LEXEME({
|
|
|
|
type: C.lexeme,
|
|
|
|
value: C.value,
|
|
|
|
start: C.start,
|
|
|
|
end: C.end
|
|
|
|
}),
|
|
|
|
rexpr: D
|
2019-01-28 12:08:59 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-01-28 15:37:37 +00:00
|
|
|
or(A) ::= expr(B) OR(C) expr(D) . {
|
|
|
|
A = new tokens.or({
|
|
|
|
lexpr: B,
|
|
|
|
op: new tokens.LEXEME({
|
|
|
|
type: C.lexeme,
|
|
|
|
value: C.value,
|
|
|
|
start: C.start,
|
|
|
|
end: C.end
|
|
|
|
}),
|
|
|
|
rexpr: D
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
not(A) ::= NOT(C) expr(D) . {
|
|
|
|
A = new tokens.not({
|
|
|
|
op: new tokens.LEXEME({
|
|
|
|
type: C.lexeme,
|
|
|
|
value: C.value,
|
|
|
|
start: C.start,
|
|
|
|
end: C.end
|
|
|
|
}),
|
|
|
|
rexpr: D
|
2019-01-28 12:08:59 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-01-28 15:37:37 +00:00
|
|
|
eq(A) ::= id(B) EQ(C) literal(D) . {
|
|
|
|
A = new tokens.eq({
|
|
|
|
id: B,
|
|
|
|
op: new tokens.LEXEME({
|
|
|
|
type: C.lexeme,
|
|
|
|
value: C.value,
|
|
|
|
start: C.start,
|
|
|
|
end: C.end
|
|
|
|
}),
|
|
|
|
literal: D
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
neq(A) ::= id(B) NEQ(C) literal(D) . {
|
|
|
|
A = new tokens.neq({
|
|
|
|
id: B,
|
|
|
|
op: new tokens.LEXEME({
|
|
|
|
type: C.lexeme,
|
|
|
|
value: C.value,
|
|
|
|
start: C.start,
|
|
|
|
end: C.end
|
|
|
|
}),
|
|
|
|
literal: D
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
gt(A) ::= id(B) GT(C) literal(D) . {
|
|
|
|
A = new tokens.gt({
|
|
|
|
id: B,
|
|
|
|
op: new tokens.LEXEME({
|
|
|
|
type: C.lexeme,
|
|
|
|
value: C.value,
|
|
|
|
start: C.start,
|
|
|
|
end: C.end
|
|
|
|
}),
|
|
|
|
literal: D
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
gte(A) ::= id(B) GTE(C) literal(D) . {
|
|
|
|
A = new tokens.gte({
|
|
|
|
id: B,
|
|
|
|
op: new tokens.LEXEME({
|
|
|
|
type: C.lexeme,
|
|
|
|
value: C.value,
|
|
|
|
start: C.start,
|
|
|
|
end: C.end
|
|
|
|
}),
|
|
|
|
literal: D
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
lt(A) ::= id(B) LT(C) literal(D) . {
|
|
|
|
A = new tokens.lt({
|
|
|
|
id: B,
|
|
|
|
op: new tokens.LEXEME({
|
|
|
|
type: C.lexeme,
|
|
|
|
value: C.value,
|
|
|
|
start: C.start,
|
|
|
|
end: C.end
|
|
|
|
}),
|
|
|
|
literal: D
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
lte(A) ::= id(B) LTE(C) literal(D) . {
|
|
|
|
A = new tokens.lte({
|
|
|
|
id: B,
|
|
|
|
op: new tokens.LEXEME({
|
|
|
|
type: C.lexeme,
|
|
|
|
value: C.value,
|
|
|
|
start: C.start,
|
|
|
|
end: C.end
|
|
|
|
}),
|
|
|
|
literal: D
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
like(A) ::= id(B) LIKE(C) literal(D) . {
|
|
|
|
A = new tokens.like({
|
|
|
|
id: B,
|
|
|
|
op: new tokens.LEXEME({
|
|
|
|
type: C.lexeme,
|
|
|
|
value: C.value,
|
|
|
|
start: C.start,
|
|
|
|
end: C.end
|
|
|
|
}),
|
|
|
|
literal: D
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
nlike(A) ::= id(B) NLIKE(C) literal(D) . {
|
|
|
|
A = new tokens.nlike({
|
|
|
|
id: B,
|
|
|
|
op: new tokens.LEXEME({
|
|
|
|
type: C.lexeme,
|
|
|
|
value: C.value,
|
|
|
|
start: C.start,
|
|
|
|
end: C.end
|
|
|
|
}),
|
|
|
|
literal: D
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-01-28 12:08:59 +00:00
|
|
|
expr(A) ::= and(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
2019-01-28 15:37:37 +00:00
|
|
|
expr(A) ::= or(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
expr(A) ::= not(B) . {
|
|
|
|
A = B;
|
2019-01-28 12:08:59 +00:00
|
|
|
}
|
|
|
|
|
2019-01-28 15:37:37 +00:00
|
|
|
expr(A) ::= eq(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
expr(A) ::= neq(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
expr(A) ::= gt(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
expr(A) ::= gte(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
expr(A) ::= lt(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
expr(A) ::= lte(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
expr(A) ::= like(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
expr(A) ::= nlike(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
expr(A) ::= LCB(B) expr(C) RCB(D) . {
|
|
|
|
A = new tokens.sub_expr({
|
|
|
|
LCB: new tokens.LEXEME({
|
|
|
|
type: B.lexeme,
|
|
|
|
value: B.value,
|
|
|
|
start: B.start,
|
|
|
|
end: B.end
|
|
|
|
}),
|
|
|
|
expr: C,
|
|
|
|
RCB: new tokens.LEXEME({
|
|
|
|
type: D.lexeme,
|
|
|
|
value: D.value,
|
|
|
|
start: D.start,
|
|
|
|
end: D.end
|
|
|
|
})
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
address_literal_content(A) ::= string_literal(B) . {
|
|
|
|
A = new tokens.address_literal_content({
|
|
|
|
children: [B]
|
2019-01-28 12:08:59 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-01-28 15:37:37 +00:00
|
|
|
address_literal_content(A) ::= address_literal_content(B) COMMA string_literal(C) . {
|
|
|
|
B.add(C);
|
2019-01-28 12:08:59 +00:00
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
address_literal_content_or_empty(A) ::= address_literal_content(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
address_literal_content_or_empty(A) ::= . {
|
2019-01-28 15:37:37 +00:00
|
|
|
A = new tokens.address_literal_content({
|
|
|
|
children: []
|
2019-01-28 12:08:59 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-01-28 15:37:37 +00:00
|
|
|
address_literal(A) ::= ADDRESS(B) LSB(C) address_literal_content_or_empty(D) RSB(E) . {
|
|
|
|
A = new tokens.address_literal({
|
|
|
|
children: D.children,
|
|
|
|
keyword: new tokens.LEXEME({
|
|
|
|
type: B.lexeme,
|
|
|
|
value: B.value,
|
|
|
|
start: B.start,
|
|
|
|
end: B.end
|
|
|
|
}),
|
|
|
|
LSB: new tokens.LEXEME({
|
|
|
|
type: C.lexeme,
|
|
|
|
value: C.value,
|
|
|
|
start: C.start,
|
|
|
|
end: C.end
|
|
|
|
}),
|
|
|
|
RSB: new tokens.LEXEME({
|
|
|
|
type: E.lexeme,
|
|
|
|
value: E.value,
|
|
|
|
start: E.start,
|
|
|
|
end: E.end
|
|
|
|
})
|
2019-01-28 12:08:59 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
literal(A) ::= address_literal(B) . {
|
2019-01-28 15:37:37 +00:00
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
oid_literal_content(A) ::= id(B) . {
|
|
|
|
A = new tokens.oid_literal_content({
|
2019-01-28 12:08:59 +00:00
|
|
|
children: [B]
|
|
|
|
});
|
|
|
|
}
|
2019-01-28 15:37:37 +00:00
|
|
|
|
|
|
|
oid_literal_content(A) ::= oid_literal_content(B) DOT id(C) . {
|
|
|
|
B.add(C);
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
oid_literal_content_or_empty(A) ::= oid_literal_content(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
oid_literal_content_or_empty(A) ::= . {
|
|
|
|
A = new tokens.oid_literal_content({
|
|
|
|
children: []
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
oid_literal(A) ::= OID(B) LSB(C) oid_literal_content_or_empty(D) RSB(E) . {
|
|
|
|
A = new tokens.oid_literal({
|
|
|
|
children: D.children,
|
|
|
|
keyword: new tokens.LEXEME({
|
|
|
|
type: B.lexeme,
|
|
|
|
value: B.value,
|
|
|
|
start: B.start,
|
|
|
|
end: B.end
|
|
|
|
}),
|
|
|
|
LSB: new tokens.LEXEME({
|
|
|
|
type: C.lexeme,
|
|
|
|
value: C.value,
|
|
|
|
start: C.start,
|
|
|
|
end: C.end
|
|
|
|
}),
|
|
|
|
RSB: new tokens.LEXEME({
|
|
|
|
type: E.lexeme,
|
|
|
|
value: E.value,
|
|
|
|
start: E.start,
|
|
|
|
end: E.end
|
|
|
|
})
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
literal(A) ::= oid_literal(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|
|
|
|
time_diff_literal(A) ::= TIMEDIFF(KWD) LSB(B) integer_literal(DAYS) integer_literal(HH) COLON integer_literal(MM) COLON integer_literal(SS) integer_literal(MS) RSB(C) . {
|
|
|
|
A = new tokens.time_diff_literal({
|
|
|
|
keyword: new tokens.LEXEME({
|
|
|
|
type: KWD.lexeme,
|
|
|
|
value: KWD.value,
|
|
|
|
start: KWD.start,
|
|
|
|
end: KWD.end
|
|
|
|
}),
|
|
|
|
LSB: new tokens.LEXEME({
|
|
|
|
type: B.lexeme,
|
|
|
|
value: B.value,
|
|
|
|
start: B.start,
|
|
|
|
end: B.end
|
|
|
|
}),
|
|
|
|
RSB: new tokens.LEXEME({
|
|
|
|
type: C.lexeme,
|
|
|
|
value: C.value,
|
|
|
|
start: C.start,
|
|
|
|
end: C.end
|
|
|
|
}),
|
|
|
|
days: DAYS,
|
|
|
|
hours: HH,
|
|
|
|
minutes: MM,
|
|
|
|
seconds: SS,
|
|
|
|
microseconds: MS,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
literal(A) ::= time_diff_literal(B) . {
|
|
|
|
A = B;
|
|
|
|
}
|
|
|
|
|