Skip to content

Commit

Permalink
Merge pull request #78 from HalflingHelper/iff-precedence
Browse files Browse the repository at this point in the history
Parser!!!!
  • Loading branch information
jsiek authored Jan 24, 2025
2 parents 41256e3 + ea211d6 commit fa13a63
Show file tree
Hide file tree
Showing 6 changed files with 123 additions and 49 deletions.
26 changes: 15 additions & 11 deletions Deduce.lark
Original file line number Diff line number Diff line change
Expand Up @@ -9,22 +9,24 @@ WS: /[ \t\f\r\n]/+
LINECOMMENT: "//" /[^\n]*/
COMMENT: /\/\*([^\*]|\*+[^\/])*\*+\//

?term: term "iff" term_log -> iff_formula
| term "<=>" term_log -> iff_formula
| term "" term_log -> iff_formula
?term: term_iff

?term_iff: term_iff "iff" term_log -> iff_formula
| term_iff "<=>" term_log -> iff_formula
| term_iff "" term_log -> iff_formula
| term_log

?term_log: term_log "and" term_compare -> and_formula
| term_log "or" term_compare -> or_formula
?term_log: term_log "and" term_equal -> and_formula
| term_log "or" term_equal -> or_formula
| term_log ":" type -> annote_type
| "if" term_log "then" term_log "else" term_log -> conditional
| "if" term_log "then" term_log -> if_then_formula
| term_equal

?term_equal: term_equal "=" term_compare -> equal
| term_equal "" term_compare -> not_equal
| term_equal "/=" term_compare -> not_equal
| term_compare

?term_compare: term_compare "=" term_add -> equal
| term_compare "" term_add -> not_equal
| term_compare "/=" term_add -> not_equal
| term_compare "<" term_add -> less
?term_compare: term_compare "<" term_add -> less
| term_compare ">" term_add -> greater
| term_compare "" term_add -> less_equal
| term_compare "<=" term_add -> less_equal
Expand Down Expand Up @@ -99,6 +101,8 @@ ident: IDENT -> ident
| "(" term ")" -> paren
| "[" term_list "]" -> list_literal
| "#" term "#" -> mark
| "if" term "then" term "else" term -> conditional
| "if" term "then" term -> if_then_formula
| "%" type

?term_list: -> empty
Expand Down
2 changes: 1 addition & 1 deletion abstract_syntax.py
Original file line number Diff line number Diff line change
Expand Up @@ -1553,7 +1553,7 @@ class All(Formula):

def copy(self):
x, t = self.var
return All(self.location, (x, t.copy()), self.pos, self.body.copy())
return All(self.location, self.typeof, (x, t.copy()), self.pos, self.body.copy())

def __str__(self):
v, t = self.var
Expand Down
107 changes: 70 additions & 37 deletions rec_desc_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ def get_deduce_directory():

mult_operators = {'*', '/', '%', '∘', '.o.'}
add_operators = {'+', '-', '∪', '|', '∩', '&', '⨄', '.+.', '++' }
compare_operators = {'<', '>', '≤', '<=', '>', '≥', '>=', '⊆', '(=', '∈', 'in'}
equal_operators = {'=', '≠', '!='}
compare_operators = {'<', '>', '≤', '<=', '≥', '>=', '⊆', '(=', '∈', 'in'}
equal_operators = {'=', '≠', '/='}
iff_operators = {'iff', "<=>", "⇔"}

to_unicode = {'.o.': '∘', '|': '∪', '&': '∩', '.+.': '⨄', '<=': '≤', '>=': '≥',
Expand All @@ -53,10 +53,16 @@ def init_parser():
token_list = []

def current_token():
return token_list[current_position]
if end_of_file():
error(meta_from_tokens(token_list[-1], token_list[-1]),
'Expected a token, got end of file')
return token_list[current_position]

def next_token():
return token_list[current_position + 1]
if current_position + 1 >= len(token_list):
error(meta_from_tokens(token_list[-1], token_list[-1]),
'Expected a token, got end of file')
return token_list[current_position + 1]

def previous_token():
return token_list[current_position - 1]
Expand Down Expand Up @@ -360,10 +366,13 @@ def parse_term_hi():
token = current_token()
if token.type != 'RSQB':
error(meta_from_tokens(current_token(),current_token()),
'expected closing bracket \']\', not\n\t' + current_token().value)
'expected closing bracket "]" at end of list literal, not\n\t' + current_token().value)
advance()
return listToNodeList(meta_from_tokens(token,token), lst_terms)


elif token.type == 'DEFINE':
return parse_define_term()

else:
try:
name = parse_identifier()
Expand Down Expand Up @@ -407,9 +416,9 @@ def parse_call():
advance()
args = parse_term_list()
if current_token().type != 'RPAR':
msg = f'expected closing parenthesis ")", not\n\t{current_token().value}'
if current_token().type == 'IDENT':
msg += '\nPerhaps you forgot a comma?'
msg = 'expected closing parenthesis ")", not\n\t' \
+ current_token().value \
+ '\nPerhaps you forgot a comma?'
error(meta_from_tokens(start_token, previous_token()), msg)
term = Call(meta_from_tokens(start_token, current_token()), None,
term, args)
Expand Down Expand Up @@ -499,7 +508,7 @@ def parse_term_equal():
if opr == '=':
term = Call(meta_from_tokens(token, previous_token()), None,
eq, [term,right])
elif opr == '≠' or opr == '!=':
elif opr == '≠' or opr == '/=':
term = IfThen(meta, None,
Call(meta, None, eq, [term,right]),
Bool(meta, None, False))
Expand Down Expand Up @@ -528,32 +537,40 @@ def parse_term_logic():

return term

def parse_term_iff():
token = current_token()
term = parse_term_logic()
while (not end_of_file()) and (current_token().value in iff_operators):
opr = current_token().type
advance()
right = parse_term_logic()
loc = meta_from_tokens(token, previous_token())
left_right = IfThen(loc, None, term.copy(), right.copy())
right_left = IfThen(loc, None, right.copy(), term.copy())
term = And(loc, None, [left_right, right_left])

if (not end_of_file()) and current_token().type == 'COLON':
advance()
typ = parse_type()
term = TAnnote(meta_from_tokens(token, previous_token()), None,
term, typ)

return term

def parse_term():
if end_of_file():
error(meta_from_tokens(previous_token(),previous_token()),
'expected a term, not end of file')

token = current_token()
term = parse_term_iff()

if token.type == 'DEFINE':
return parse_define_term()
else:
term = parse_term_logic()
if (not end_of_file()) and (current_token().value in iff_operators):
advance()
right = parse_term_logic()
loc = meta_from_tokens(token, previous_token())
left_right = IfThen(loc, None, term.copy(), right.copy())
right_left = IfThen(loc, None, right.copy(), term.copy())
term = And(loc, None, [left_right, right_left])

if (not end_of_file()) and current_token().type == 'COLON':
advance()
typ = parse_type()
term = TAnnote(meta_from_tokens(token, previous_token()), None,
term, typ)

return term
if not end_of_file() and current_token().type == 'COLON':
advance()
typ = parse_type()
term = TAnnote(meta_from_tokens(token, previous_token()), None,
term, typ)
return term

def parse_define_term():
while_parsing = 'while parsing\n' \
Expand Down Expand Up @@ -591,12 +608,12 @@ def parse_assumption():
else:
return label,None

proof_keywords = {'apply', 'arbitrary',
proof_keywords = {'apply', 'arbitrary', 'assume',
'cases', 'choose', 'conclude', 'conjunct',
'definition',
'enable', 'equations', 'extensionality',
'have', 'induction', 'obtain',
'reflexive', 'rewrite',
'enable', 'equations', 'evaluate', 'extensionality',
'have', 'induction', 'injective', 'obtain',
'recall', 'reflexive', 'rewrite',
'suffices', 'suppose', 'switch', 'symmetric',
'transitive'}

Expand Down Expand Up @@ -1085,6 +1102,11 @@ def parse_have():
start_token = current_token()
token = start_token
advance()

if end_of_file():
error(meta_from_tokens(start_token, start_token),
'expected an identifier or colon after "have", not end of file')

if current_token().type != 'COLON':
try:
label = parse_identifier()
Expand All @@ -1094,13 +1116,20 @@ def parse_have():
+ current_token().value)
else:
label = '_'
if current_token().type != 'COLON':

if end_of_file():
error(meta_from_tokens(start_token, start_token),
'expected a colon after label of "have", not end of file')
elif current_token().type != 'COLON':
error(meta_from_tokens(current_token(), current_token()),
'expected a colon after label of "have", not\n\t' \
+ current_token().value)
advance()
proved = parse_term()
if current_token().type == 'BY':
if end_of_file():
error(meta_from_tokens(start_token, start_token),
'expected the keyword "by" after formula of "have", not end of file')
elif current_token().type == 'BY':
advance()
because = parse_proof()
else:
Expand Down Expand Up @@ -1246,8 +1275,12 @@ def parse_theorem():
try:
name = parse_identifier()
except Exception as e:
error(meta_from_tokens(current_token(), current_token()),
'expected name of theorem, not:\n\t' + current_token().value)
if end_of_file():
error(meta_from_tokens(start_token, start_token),
'expected name of theorem, not end of file')
else:
error(meta_from_tokens(current_token(), current_token()),
'expected name of theorem, not:\n\t' + current_token().value)

if current_token().type != 'COLON':
error(meta_from_tokens(current_token(), current_token()),
Expand Down
1 change: 1 addition & 0 deletions test/should-error/deep_error.pf.err
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
./test/should-error/deep_error.pf:1.28-1.34: expected closing parenthesis ")", not
2
Perhaps you forgot a comma?
./test/should-error/deep_error.pf:1.28-1.34: while parsing function call
term ::= term "(" term_list ")"

Expand Down
9 changes: 9 additions & 0 deletions test/should-pass/bicond3.pf
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
theorem bicond_commute : all P : bool, Q : bool.
if P <=> Q then Q <=> P
proof
arbitrary P : bool, Q : bool
switch P {
case true assume prop_t { . }
case false assume prop_f { . }
}
end
27 changes: 27 additions & 0 deletions test/should-pass/bicond4.pf
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
theorem bicond_trans : all P : bool, Q : bool, R : bool.
if (P <=> Q) and (Q <=> R) then P <=> R
proof
arbitrary P : bool, Q : bool, R : bool
switch P {
case true assume prop_t {
switch Q {
case true assume prop_t {
.
}
case false assume prop_f {
.
}
}
}
case false assume prop_f {
switch Q {
case true assume prop_t {
.
}
case false assume prop_f {
.
}
}
}
}
end

0 comments on commit fa13a63

Please sign in to comment.