LemonJS/example/rules.y

430 lines
8.5 KiB
Plaintext

main ::= expr(A) . {
_result.root_node = A
}
main ::= literal(A) . {
_result.root_node = A
}
integer_literal(A) ::= INTEGER_LITERAL(B) . {
A = new tokens.integer_literal({
children: [
new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
})
]
});
}
literal(A) ::= integer_literal(B) . {
A = B;
}
float_literal(A) ::= FLOAT_LITERAL(B) . {
A = new tokens.float_literal({
children: [
new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
})
]
})
}
literal(A) ::= float_literal(B) . {
A = B;
}
bool_literal(A) ::= BOOL_LITERAL(B) . {
A = new tokens.bool_literal({
children: [
new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
})
]
})
}
literal(A) ::= bool_literal(B) . {
A = B;
}
string_literal(A) ::= STRING_LITERAL(B) . {
A = new tokens.string_literal({
children: [
new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
})
]
});
}
literal(A) ::= string_literal(B) . {
A = B;
}
id(A) ::= string_literal(B) . {
A = new tokens.id({
children: [B]
});
}
id(A) ::= ID(B) . {
A = new tokens.id({
children: [
new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
})
]
});
}
and(A) ::= expr(B) AND(C) expr(D) . {
A = new tokens.and({
lexpr: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
rexpr: D
})
}
or(A) ::= expr(B) OR(C) expr(D) . {
A = new tokens.or({
lexpr: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
rexpr: D
})
}
not(A) ::= NOT(C) expr(D) . {
A = new tokens.not({
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
rexpr: D
})
}
eq(A) ::= id(B) EQ(C) literal(D) . {
A = new tokens.eq({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
neq(A) ::= id(B) NEQ(C) literal(D) . {
A = new tokens.neq({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
gt(A) ::= id(B) GT(C) literal(D) . {
A = new tokens.gt({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
gte(A) ::= id(B) GTE(C) literal(D) . {
A = new tokens.gte({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
lt(A) ::= id(B) LT(C) literal(D) . {
A = new tokens.lt({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
lte(A) ::= id(B) LTE(C) literal(D) . {
A = new tokens.lte({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
like(A) ::= id(B) LIKE(C) literal(D) . {
A = new tokens.like({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
nlike(A) ::= id(B) NLIKE(C) literal(D) . {
A = new tokens.nlike({
id: B,
op: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
literal: D
});
}
expr(A) ::= and(B) . {
A = B;
}
expr(A) ::= or(B) . {
A = B;
}
expr(A) ::= not(B) . {
A = B;
}
expr(A) ::= eq(B) . {
A = B;
}
expr(A) ::= neq(B) . {
A = B;
}
expr(A) ::= gt(B) . {
A = B;
}
expr(A) ::= gte(B) . {
A = B;
}
expr(A) ::= lt(B) . {
A = B;
}
expr(A) ::= lte(B) . {
A = B;
}
expr(A) ::= like(B) . {
A = B;
}
expr(A) ::= nlike(B) . {
A = B;
}
expr(A) ::= LCB(B) expr(C) RCB(D) . {
A = new tokens.sub_expr({
LCB: new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
}),
expr: C,
RCB: new tokens.LEXEME({
type: D.lexeme,
value: D.value,
start: D.start,
end: D.end
})
});
}
address_literal_content(A) ::= string_literal(B) . {
A = new tokens.address_literal_content({
children: [B]
});
}
address_literal_content(A) ::= address_literal_content(B) COMMA string_literal(C) . {
B.add(C);
A = B;
}
address_literal_content_or_empty(A) ::= address_literal_content(B) . {
A = B;
}
address_literal_content_or_empty(A) ::= . {
A = new tokens.address_literal_content({
children: []
});
}
address_literal(A) ::= ADDRESS(B) LSB(C) address_literal_content_or_empty(D) RSB(E) . {
A = new tokens.address_literal({
children: D.children,
keyword: new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
}),
LSB: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
RSB: new tokens.LEXEME({
type: E.lexeme,
value: E.value,
start: E.start,
end: E.end
})
});
}
literal(A) ::= address_literal(B) . {
A = B;
}
oid_literal(A) ::= OID_LITERAL(B) . {
A = new tokens.oid_literal({
keyword: new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
}),
LSB: B.lsb,
RSB: B.rsb
});
}
literal(A) ::= oid_literal(B) . {
A = B;
}
time_diff_literal(A) ::= TIMEDIFF(KWD) LSB(B) integer_literal(DAYS) integer_literal(HH) COLON integer_literal(MM) COLON integer_literal(SS) integer_literal(MS) RSB(C) . {
A = new tokens.time_diff_literal({
keyword: new tokens.LEXEME({
type: KWD.lexeme,
value: KWD.value,
start: KWD.start,
end: KWD.end
}),
LSB: new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
}),
RSB: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
days: DAYS,
hours: HH,
minutes: MM,
seconds: SS,
microseconds: MS,
});
}
literal(A) ::= time_diff_literal(B) . {
A = B;
}
time_literal(A) ::= TIME(KWD) LSB(B) integer_literal(DAY) SLASH integer_literal(MONTH) SLASH integer_literal(YEAR) integer_literal(HH) COLON integer_literal(MM) COLON integer_literal(SS) integer_literal(MS) RSB(C) . {
A = new tokens.time_literal({
keyword: new tokens.LEXEME({
type: KWD.lexeme,
value: KWD.value,
start: KWD.start,
end: KWD.end
}),
LSB: new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
}),
RSB: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
day: DAY,
month: MONTH,
year: YEAR,
hours: HH,
minutes: MM,
seconds: SS,
microseconds: MS,
});
}
literal(A) ::= time_literal(B) . {
A = B;
}