fix rules and literal

This commit is contained in:
Aleksey Chichenkov 2019-01-30 13:13:05 +03:00
parent 5e8c6bef71
commit 7f8ea12ffb
6 changed files with 353 additions and 225 deletions

View file

@ -21,23 +21,60 @@
var token;
var lexemes = [];
while (token = lexer.next()) {
if(_result.error) {
return { success: false }
switch(token.error){
case 0:
console.log("PARSE", token.lexeme);
parser.parse(parser["TOKEN_" + token.lexeme], token);
lexemes.push(token);
break;
case 1:
return {
success: false,
message: "Found unknown symbol on position",
error: 1,
token: token
};
case 2:
return {
success: false,
message: "Not found close quote",
error: 2,
token: token
};
case 3:
return {
success: false,
message: "Unexpected symbol in oid structure",
error: 3,
token: token
};
case 4:
return {
success: false,
message: "Not found close bracket for Oid",
error: 3,
token: token
};
}
if (token.error === 0) {
console.log("PARSE", token.lexeme);
parser.parse(parser["TOKEN_" + token.lexeme], token);
lexemes.push(token);
if(_result.error) {
return {
success: false,
message: "Syntax error",
error: 0
}
}
}
parser.parse();
return {
success: true,
tree: _result.root_node,
lexemes: lexemes
};
if (_result.root_node !== undefined) {
return {
success: true,
tree: _result.root_node,
lexemes: lexemes
};
} else {
return { success: false }
}
};
}

View file

@ -346,48 +346,16 @@ literal(A) ::= address_literal(B) . {
A = B;
}
oid_literal_content(A) ::= id(B) . {
A = new tokens.oid_literal_content({
children: [B]
});
}
oid_literal_content(A) ::= oid_literal_content(B) DOT id(C) . {
B.add(C);
A = B;
}
oid_literal_content_or_empty(A) ::= oid_literal_content(B) . {
A = B;
}
oid_literal_content_or_empty(A) ::= . {
A = new tokens.oid_literal_content({
children: []
});
}
oid_literal(A) ::= OID(B) LSB(C) oid_literal_content_or_empty(D) RSB(E) . {
oid_literal(A) ::= OID_LITERAL(B) . {
A = new tokens.oid_literal({
children: D.children,
keyword: new tokens.LEXEME({
type: B.lexeme,
value: B.value,
start: B.start,
end: B.end
}),
LSB: new tokens.LEXEME({
type: C.lexeme,
value: C.value,
start: C.start,
end: C.end
}),
RSB: new tokens.LEXEME({
type: E.lexeme,
value: E.value,
start: E.start,
end: E.end
})
LSB: B.lsb,
RSB: B.rsb
});
}