X-Git-Url: http://git.annexia.org/?a=blobdiff_plain;f=src%2Fparser.mly;h=7146a999a47ee21ee00874b45f4572201902cdc2;hb=881b2e9b7bad0da8f44418e9e6558710db5ce690;hp=c3148cae191b50a466fa4cf09ab48d2e7037c45d;hpb=976bb1b35d77f3058df3c25c1e2a4767147d606b;p=goals.git diff --git a/src/parser.mly b/src/parser.mly index c3148ca..7146a99 100644 --- a/src/parser.mly +++ b/src/parser.mly @@ -21,11 +21,13 @@ open Utils open Printf -(* This is initialized with Lexer.read once the program - * starts. Doing this avoids a circular dependency caused - * by include files. +(* There are several circular dependencies between the lexer + * (caused by includes) and eval. These references break + * the circular dependencies. They are initialized when + * the program starts, hence are never really None. *) let lexer_read = ref None +let eval_substitute = ref None let find_on_include_path filename = if not (Filename.is_implicit filename) then filename @@ -40,27 +42,30 @@ let find_on_include_path filename = ) let do_include env loc filename optflag file = - let filename = Eval.substitute env loc filename in + let eval_substitute = + match !eval_substitute with None -> assert false | Some f -> f in + let filename = eval_substitute env loc filename in let filename = find_on_include_path filename in if optflag && not (Sys.file_exists filename) then env else ( let fp = open_in filename in let lexbuf = Lexing.from_channel fp in lexbuf.lex_curr_p <- { lexbuf.lex_curr_p with pos_fname = filename }; - let reader = + let lexer_read = match !lexer_read with None -> assert false | Some r -> r in - let env' = file reader lexbuf in + let env' = file lexer_read lexbuf in close_in fp; Ast.Env.merge env env' ) %} (* Tokens. *) -%token CODE +%token CODE %token COLON %token COMMA %token EQUALS %token EOF +%token EXPRESSION %token FUNCTION %token GOAL %token ID @@ -69,10 +74,14 @@ let do_include env loc filename optflag file = %token LEFT_PAREN %token LET %token OPTINCLUDE +%token PURE +%token RETURNING %token RIGHT_ARRAY %token RIGHT_PAREN %token SEMICOLON %token STRING +%token STRING_KEYWORD +%token STRINGS %token TACTIC %token TACTIC_KEYWORD @@ -109,9 +118,9 @@ stmt: let name, params = $1 in name, Ast.EGoalDefn ($loc, (params, [], [], Some $2)) } - | FUNCTION ID params_decl EQUALS CODE + | option(PURE) FUNCTION ID params_decl return_decl EQUALS CODE { - $2, Ast.EFuncDefn ($loc, ($3, $5)) + $3, Ast.EFuncDefn ($loc, ($4, $5, $1 <> None, $7)) } | TACTIC_KEYWORD TACTIC params_decl EQUALS CODE { @@ -129,13 +138,18 @@ params_decl: ; param_decl: | ID { $1 } +return_decl: + | { RetExpr } + | RETURNING EXPRESSION { RetExpr } + | RETURNING STRINGS { RetStrings } + | RETURNING STRING_KEYWORD { RetString } patterns: | separated_list(COMMA, pattern) { $1 } ; pattern: | STRING { Ast.PTactic ($loc, "*file", [$1]) } - | ID pattern_params { Ast.PTactic ($loc, $1, $2) } + | TACTIC pattern_params { Ast.PTactic ($loc, $1, $2) } ; pattern_params: | LEFT_PAREN separated_list(COMMA, pattern_param) RIGHT_PAREN { $2 } @@ -152,8 +166,14 @@ expr: | LEFT_ARRAY barelist RIGHT_ARRAY { Ast.EList ($loc, $2) } ; barelist: - | separated_list(COMMA, expr) { $1 } + | right_flexible_list(COMMA, expr) { $1 } ; params: | LEFT_PAREN separated_list(COMMA, expr) RIGHT_PAREN { $2 } ; + +(* http://gallium.inria.fr/blog/lr-lists/ *) +right_flexible_list(delim, X): + | (* nothing *) { [] } + | x = X { [x] } + | x = X delim xs = right_flexible_list(delim, X) { x :: xs }