X-Git-Url: http://git.annexia.org/?a=blobdiff_plain;f=src%2Fparser.mly;h=7146a999a47ee21ee00874b45f4572201902cdc2;hb=81394242a7d6a47fad6d84ce33d488d38de2647a;hp=8b7d8a188e96cbf85805e406b0c25db81b2c3198;hpb=84972c63d0dbc5a0da05ae08a5b99a9ad81baadc;p=goals.git diff --git a/src/parser.mly b/src/parser.mly index 8b7d8a1..7146a99 100644 --- a/src/parser.mly +++ b/src/parser.mly @@ -21,11 +21,13 @@ open Utils open Printf -(* This is initialized with Lexer.read once the program - * starts. Doing this avoids a circular dependency caused - * by include files. +(* There are several circular dependencies between the lexer + * (caused by includes) and eval. These references break + * the circular dependencies. They are initialized when + * the program starts, hence are never really None. *) let lexer_read = ref None +let eval_substitute = ref None let find_on_include_path filename = if not (Filename.is_implicit filename) then filename @@ -40,27 +42,31 @@ let find_on_include_path filename = ) let do_include env loc filename optflag file = - let filename = Eval.substitute env loc filename in + let eval_substitute = + match !eval_substitute with None -> assert false | Some f -> f in + let filename = eval_substitute env loc filename in let filename = find_on_include_path filename in if optflag && not (Sys.file_exists filename) then env else ( let fp = open_in filename in let lexbuf = Lexing.from_channel fp in lexbuf.lex_curr_p <- { lexbuf.lex_curr_p with pos_fname = filename }; - let reader = + let lexer_read = match !lexer_read with None -> assert false | Some r -> r in - let env' = file reader lexbuf in + let env' = file lexer_read lexbuf in close_in fp; Ast.Env.merge env env' ) %} (* Tokens. *) -%token CODE +%token CODE %token COLON %token COMMA %token EQUALS %token EOF +%token EXPRESSION +%token FUNCTION %token GOAL %token ID %token INCLUDE @@ -68,10 +74,14 @@ let do_include env loc filename optflag file = %token LEFT_PAREN %token LET %token OPTINCLUDE +%token PURE +%token RETURNING %token RIGHT_ARRAY %token RIGHT_PAREN %token SEMICOLON %token STRING +%token STRING_KEYWORD +%token STRINGS %token TACTIC %token TACTIC_KEYWORD @@ -108,6 +118,10 @@ stmt: let name, params = $1 in name, Ast.EGoalDefn ($loc, (params, [], [], Some $2)) } + | option(PURE) FUNCTION ID params_decl return_decl EQUALS CODE + { + $3, Ast.EFuncDefn ($loc, ($4, $5, $1 <> None, $7)) + } | TACTIC_KEYWORD TACTIC params_decl EQUALS CODE { $2, Ast.ETacticDefn ($loc, ($3, $5)) @@ -124,13 +138,18 @@ params_decl: ; param_decl: | ID { $1 } +return_decl: + | { RetExpr } + | RETURNING EXPRESSION { RetExpr } + | RETURNING STRINGS { RetStrings } + | RETURNING STRING_KEYWORD { RetString } patterns: | separated_list(COMMA, pattern) { $1 } ; pattern: | STRING { Ast.PTactic ($loc, "*file", [$1]) } - | ID pattern_params { Ast.PTactic ($loc, $1, $2) } + | TACTIC pattern_params { Ast.PTactic ($loc, $1, $2) } ; pattern_params: | LEFT_PAREN separated_list(COMMA, pattern_param) RIGHT_PAREN { $2 } @@ -140,15 +159,21 @@ pattern_param: ; expr: - | ID params { Ast.ECallGoal ($loc, $1, $2) } + | ID params { Ast.ECall ($loc, $1, $2) } | ID { Ast.EVar ($loc, $1) } | TACTIC params { Ast.ETacticCtor ($loc, $1, $2) } | STRING { Ast.ESubsts ($loc, $1) } | LEFT_ARRAY barelist RIGHT_ARRAY { Ast.EList ($loc, $2) } ; barelist: - | separated_list(COMMA, expr) { $1 } + | right_flexible_list(COMMA, expr) { $1 } ; params: | LEFT_PAREN separated_list(COMMA, expr) RIGHT_PAREN { $2 } ; + +(* http://gallium.inria.fr/blog/lr-lists/ *) +right_flexible_list(delim, X): + | (* nothing *) { [] } + | x = X { [x] } + | x = X delim xs = right_flexible_list(delim, X) { x :: xs }