diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index b9f2dc501893fab4d0f5a52233ddda8d09606ebd..6ac9cfded50fa140b7d3b22a6477a4a78fe9144a 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -27,7 +27,7 @@ tests_with_recompilation: script: - rm -f Src/COLIBRI/lib/v7/x86_64_linux/* Src/COLIBRI/simplex_ocaml.pl #OCaml dependencies - - opam depext --install "dune.2.7.1" "ocplib-simplex.0.4" "zarith.1.9.1" "parsexp.v0.14.0" "menhir.20200624" "fmt.0.8.8" "spelll.0.3" "uutf.1.0.2" "gen.0.5.3" + - opam depext --install "dune.2.7.1" "ocplib-simplex.0.4" "zarith.1.9.1" "parsexp.v0.14.0" "menhir.20200624" "fmt.0.8.8" "spelll.0.3" "uutf.1.0.2" "gen.0.5.3" "pp_loc" #OCaml compilation - make ECLIPSEBIN=$(pwd)/Bin/ECLIPSE_V7.0_45/ #Bundle in bundle directory diff --git a/Src/COLIBRI/check_lin_expr.pl b/Src/COLIBRI/check_lin_expr.pl index 6497dd8ba0f894d1a80977a8f4b4b9e6433d4c33..7bf49367c397a63d476b7d786f2e59d6fa3a05ea 100755 --- a/Src/COLIBRI/check_lin_expr.pl +++ b/Src/COLIBRI/check_lin_expr.pl @@ -5,7 +5,7 @@ :- setval(depth_lin_expr,5)@eclipse. :- import timeout/3 from timeout. -%check_exists_lin_expr_giving_diff_args(Type,A,B,Stop) :- !. +% check_exists_lin_expr_giving_diff_args(Type,A,B,Stop) :- !. check_exists_lin_expr_giving_diff_args(Type,A,B,Stop) :- % A REVOIR POUR Invalid/peterson_vt.lus įa boucle sur get_sum_giving ! var(Stop), @@ -13,13 +13,16 @@ check_exists_lin_expr_giving_diff_args(Type,A,B,Stop) :- Type == real), once (var(A); var(B)), + getval(use_delta,1)@eclipse, + getval(use_simplex,1)@eclipse, !, (block(timeout(try_check_exists_lin_expr_giving_diff_args(Type,A,B,Stop), 0.5, %1.0Inf, true), Tag, - (call(spy_here)@eclipse, + (exit_block(Tag), + call(spy_here)@eclipse, try_check_exists_lin_expr_giving_diff_args(Type,A,B,Stop))) -> true diff --git a/Src/COLIBRI/col_solve.pl b/Src/COLIBRI/col_solve.pl index adbbfba84527852250b1616658d9ad3a8a3289e0..a2e8486a6bd48daf018d883ff77bca53647c1caa 100644 --- a/Src/COLIBRI/col_solve.pl +++ b/Src/COLIBRI/col_solve.pl @@ -610,7 +610,9 @@ smt_solve_bis0(Test,FILE,TO,Code) :- (nonvar(Code) -> true ; save_goal_before_check_sat(Goal), - setval(time_limit,TO), + (var(TO) -> + setval(time_limit,0) + ; setval(time_limit,TO)), statistics(session_time,Start), setval(start_solve,Start), % Essai pour smt_comp : pas de delta/simplex apres DDSteps @@ -622,8 +624,9 @@ smt_solve_bis0(Test,FILE,TO,Code) :- -> smt_disable_delta_check ; smt_enable_delta_check), + getval(time_limit,NTO), block((timeout(not not call(Goal), - TO, + NTO, exit_block(timeout_col)) -> getval(diag_code,(Diag,Code)) @@ -696,15 +699,16 @@ save_goal_before_check_sat(Goal) :- get_type_decl(NGoal0,Decl,NGoal), % call(Decl), keep_ground_goals(NGoal,NGoal1,GCG,EGCG), - term_variables(NGoal1,LVars), + sort(NGoal1,NGoal2), + term_variables(NGoal2,LVars), +% my_term_variables(NGoal2,LVars), % Les CGVars ne contiennent que des variables % attribuees (donc celles de Decl normalement ?) -% copy_term(NGoal1,CNGoal1,CGVars), - setval(cgoals,NGoal1), - getval(cgoals,CNGoal1), -% copy_term(NGoal1,CNGoal1), - term_variables(CNGoal1,CLVars), + setval(cgoals,NGoal2), + getval(cgoals,CNGoal2), + term_variables(CNGoal2,CLVars), +% my_term_variables(CNGoal2,CLVars), call(Decl), (foreach(MV,LVars), foreach(CV,CLVars), @@ -712,15 +716,27 @@ save_goal_before_check_sat(Goal) :- (get_variable_type(MV,Type) -> OTV = [(MV,Type,CV)|ITV] ; OTV = ITV)), -% protected_unify(CNGoal1,EGCG), -/* - (foreach([V|CV],CGVars), - foreach((V,Type,CV),TCGVars) do - get_variable_type(V,Type)), -*/ -% setval(gsat,GCG-TCGVars). setval(gsat,CNGoal1-TCGVars). +:- export my_term_variables/2. +my_term_variables(Term,Vars) :- + my_term_variables0(Term,Vars0,EVars0), + EVars0 = [], + sort(Vars0,Vars). + +my_term_variables0(Term,Vars,EVars) :- + var(Term), + !, + Vars = [Term|EVars]. +my_term_variables0([],Vars,EVars) ?- !, + EVars = Vars. +my_term_variables0([Arg|Args],Vars,EVars) ?- !, + my_term_variables0(Arg,Vars,EVars0), + my_term_variables0(Args,EVars0,EVars). +my_term_variables0(Term,Vars,EVars) :- + Term =.. [_|Args], + my_term_variables0(Args,Vars,EVars). + keep_ground_goals([G|Goals],NGoals,GrGoals,EndGrGoals) ?- !, (ground(G) -> GrGoals = [G|EGrGoals], @@ -799,8 +815,8 @@ smt_test_CI(TO,Size) :- smt_test(TO,Size,CI) :- %StrDir = "./colibri_tests/colibri/tests/", %StrDir = "./colibri_tests/colibri/tests/sat/", - % 0 (sans real/float->int!) 1 TO sur newton à 15s mais pas a 24s - %StrDir = "./colibri_tests/colibri/tests/unsat/", %0 + % 0 (sans real/float->int!) + StrDir = "./colibri_tests/colibri/tests/unsat/", %0 %StrDir = "./colibri_tests/colibri/tests/unknown/", %StrDir = "./colibri_tests/colibri/tests/timeout/", @@ -899,7 +915,7 @@ smt_test(TO,Size,CI) :- %StrDir = "./QF_BV/2019-Mann/",% 2/2 %StrDir = "./QF_BV/2019-Wolf-fmbench/",% 14/14 %StrDir = "./QF_BV/20200415-Yurichev/",% 1/1 - %StrDir = "./QF_BV/bmc-bv-svcomp14/",% 16/32 + %StrDir = "./QF_BV/bmc-bv-svcomp14/",% 18/24 (8 I) 5s %StrDir = "./QF_BV/calypto/",% 20/23 %StrDir = "./QF_BV/challenge/",% 2/2 %StrDir = "./QF_BV/check2/",% 0/6 @@ -913,31 +929,41 @@ smt_test(TO,Size,CI) :- %StrDir = "./QF_BV/pipe/",% 1/1 %StrDir = "./QF_BV/pspace/",% 23/86 %StrDir = "./QF_BV/rubik/", % 7/7 - %StrDir = "./QF_BV/RWS/", % 19/20 + %StrDir = "./QF_BV/RWS/", % 19/19 (1 I) %StrDir = "./QF_BV/sage/", % 123/26607 (15 coredump) - %StrDir = "./QF_BV/spear/", % 1325/1695 - %StrDir = "./QF_BV/stp_samples/", % 43/426 - %StrDir = "./QF_BV/tacas07/", % 4/5 - %StrDir = "./QF_BV/uclid/", % 392/414 + %StrDir = "./QF_BV/sage/app1/", % 31/2676 + %StrDir = "./QF_BV/sage/app2/", % 1/1346 + %StrDir = "./QF_BV/sage/app5/", % 2/1103 + %StrDir = "./QF_BV/sage/app6/", % 0/1345 + %StrDir = "./QF_BV/sage/app7/", % 11/4813 + %StrDir = "./QF_BV/sage/app8/", % 15/2756 + %StrDir = "./QF_BV/sage/app9/", % 16/3301 + %StrDir = "./QF_BV/sage/app10/", % 0/51 + %StrDir = "./QF_BV/sage/app11/", % 611 too big + %StrDir = "./QF_BV/sage/app12/", % 12/5784 + %StrDir = "./QF_BV/spear/", % 501/807 (888 I) + %StrDir = "./QF_BV/stp_samples/", % 45/426 + %StrDir = "./QF_BV/tacas07/", % 3/3 (2 I) + %StrDir = "./QF_BV/uclid/", % 0/4 (410 I) %StrDir = "./QF_BV/uclid_contrib_smtcomp09/", % 7/7 - %StrDir = "./QF_BV/uum/", % 0/8 - %StrDir = "./QF_BV/VS3/", % 11/11 - %StrDir = "./QF_BV/wienand-cav2008/", % 5/18 - %StrDir = "./QF_BV/asp/", % 388/391 + %StrDir = "./QF_BV/uum/", % 8/8 + %StrDir = "./QF_BV/VS3/", % 10/10 (1 I) + %StrDir = "./QF_BV/wienand-cav2008/", % 3/15 (3 I) + %StrDir = "./QF_BV/asp/", % 35/35 (356 I) %StrDir = "./QF_BV/bench_ab/",% 0/285 %StrDir = "./QF_BV/bmc-bv/", % 8/30 (2 coredump) %StrDir = "./QF_BV/bmc-bv-svcomp14/", % 16/32 %StrDir = "./QF_BV/brummayerbiere/", % 44/52 - %StrDir = "./QF_BV/brummayerbiere2/", % 38/65 - %StrDir = "./QF_BV/brummayerbiere3/", % 79/79 + %StrDir = "./QF_BV/brummayerbiere2/", % 30/65 + %StrDir = "./QF_BV/brummayerbiere3/", % 73/73 (6 I) %StrDir = "./QF_BV/brummayerbiere4/", % 0/10 %StrDir = "./QF_BV/bruttomesso/", % 975/976 - %StrDir = "./QF_BV/crafted/", % 5/21 + %StrDir = "./QF_BV/crafted/", % 6/21 %----------------------------------------------------------------------- %StrDir = "./QF_AUFBVFP/20210301-Alive2/",% 1 %----------------------------------------------------------------------- - StrDir = "QF_ABVFP/20170428-Liew-KLEE/imperial_svcomp_float-benchs_svcomp_mea8000.x86_64/", %0 + %StrDir = "QF_ABVFP/20170428-Liew-KLEE/imperial_svcomp_float-benchs_svcomp_mea8000.x86_64/", %0 %StrDir = "QF_ABVFP/20170428-Liew-KLEE/imperial_synthetic_non_terminating_klee_bug.x86_64/", % 0 %StrDir = "./QF_ABVFP/20170428-Liew-KLEE/aachen_real_gmp_gmp_klee_mul.x86_64/", % 3 (bitwuzla 0) @@ -974,7 +1000,7 @@ smt_test(TO,Size,CI) :- %StrDir = "./QF_FP/20170501-Heizmann-UltimateAutomizer/", % 0 %StrDir = "./QF_FP/20190429-UltimateAutomizerSvcomp2019/",% 0 (bitwuzla 0) %StrDir = "./QF_FP/ramalho/", % 0 (cvc4 19)(bitwuzla 17) - %StrDir = "./QF_FP/griggio/", % 51 (min_solve, sans lin_solve ni ls_reduce..)(39) + %StrDir = "./QF_FP/griggio/", % 49 (min_solve, sans lin_solve ni ls_reduce..)(39) %(cvc4 89)(bitwuzla 74) LES DD DEMARRENT TROP VITE ? %StrDir = "./QF_FP/schanda/spark/", % 6! (min_solve avec X =< (X+Y)/2 =< Y) (ncvc4 8)(bitwuzla 3) %StrDir = "./QF_FP/wintersteiger/", % tout OK @@ -1264,16 +1290,16 @@ smt_unit_test(TO) :- smt_unit_test_CI(TO) :- smt_unit_test(TO,1). +% pour limiter les bv à 256, sinon pbs de TO pas dÊclenchÊs ! +:- setval(unit_tests,0)@eclipse. + smt_unit_test(TO,CI) :- + setval(unit_tests,1)@eclipse, setval(bug,[]), - StrDir = "./colibri_tests/colibri/tests/sat/", %0 (sans real/float-> - % int!) des TOs sur - % newton en CI - %StrDir = "./colibri_tests/colibri/tests/unsat/", %0 + StrDir = "./colibri_tests/colibri/tests/sat/", % 0 + %StrDir = "./colibri_tests/colibri/tests/unsat/", % 0 - %StrDir = "./QF_LIA_sat/20180326-Bromberger/", - %StrDir = "./QF_BVFP/20170428-Liew-KLEE/imperial_svcomp_float-benchs_svcomp_mea8000.x86_64/", %StrDir = "./smtlib_schanda-master/random/", % tout OK ou unsupported %StrDir = "./smtlib_schanda-master/random/fp.from.real/", %StrDir = "./smtlib_schanda-master/random/fp.fma/", @@ -1282,98 +1308,103 @@ smt_unit_test(TO,CI) :- %StrDir = "./smtlib_schanda-master/nyxbrain/", %StrDir = "./smtlib_schanda-master/spark_2014/", %------------------------------------------------------------------------ - %StrDir = "./QF_ABVFPLRA/20190429-UltimateAutomizerSvcomp2019/",% 2-3 (4) - %StrDir = "./QF_ABVFPLRA/20170501-Heizmann-UltimateAutomizer/",% 0-1 (0) + %StrDir = "./QF_ABVFPLRA/20190429-UltimateAutomizerSvcomp2019/",% 0 + %StrDir = "./QF_ABVFPLRA/20170501-Heizmann-UltimateAutomizer/",% 0 %------------------------------------------------------------------------ %StrDir = "./QF_ABVFP/20170428-Liew-KLEE/", % 79 TO (69 sans simplex) (cvc4 76) - %StrDir = "./QF_ABVFP/20170428-Liew-KLEE/aachen_real_gmp_gmp_klee_mul.x86_64/", %StrDir = "./QF_ABVFP/20170501-Heizmann-UltimateAutomizer/", % 0 TO - - %StrDir = "./QF_ABVFP/20170428-Liew-KLEE/imperial_synthetic_count_klee_bug.x86_64/", % 6 TO/9! (cvc4 0) - %StrDir = "./QF_ABVFP/20170428-Liew-KLEE/imperial_synthetic_count_klee.x86_64/", % 3/87! (0) - %StrDir = "./QF_ABVFP/20170428-Liew-KLEE/aachen_real_sorting_full_sym_floats.x86_64/", % 2 - %StrDir = "./QF_ABVFP/20170428-Liew-KLEE/aachen_real_sorting_doubles.x86_64/", % 10/58 %------------------------------------------------------------------------ %StrDir = "./QF_BVFP/", %StrDir = "./QF_BVFP/20170428-Liew-KLEE/", % 33-76 () %StrDir = "./QF_BVFP/20170501-Heizmann-UltimateAutomizer/", % 0 TO - %StrDir = "./QF_BVFP/20190429-UltimateAutomizerSvcomp2019/",% 1 %StrDir = "./QF_BVFP/ramalho/", % 0 TO %StrDir = "./QF_BVFP/schanda/spark/", % 1 TO %------------------------------------------------------------------------ - %StrDir = "./QF_UFFP/schanda/spark/",% 0 TO + %StrDir = "./QF_UFFP/schanda/spark/",% 0/2 TO %------------------------------------------------------------------------ - %StrDir = "./QF_FPLRA/20170501-Heizmann-UltimateAutomizer/",% 0 TO - %StrDir = "./QF_FPLRA/20190429-UltimateAutomizerSvcomp2019/",% 0 TO - %StrDir = "./QF_FPLRA/schanda/spark/",% 1-0 TO - %StrDir = "./QF_FPLRA/2019-Gudemann/",% 2 (11) + %StrDir = "./QF_FPLRA/20170501-Heizmann-UltimateAutomizer/",% 0/3 TO + %StrDir = "./QF_FPLRA/20190429-UltimateAutomizerSvcomp2019/",% 0/8 TO + %StrDir = "./QF_FPLRA/schanda/spark/",% 0/2 TO + %StrDir = "./QF_FPLRA/2019-Gudemann/",% 2/13 (11) %------------------------------------------------------------------------ - %StrDir = "./QF_BVFPLRA/20170501-Heizmann-UltimateAutomizer/",% 0 TO - %StrDir = "./QF_BVFPLRA/20190429-UltimateAutomizerSvcomp2019/",% 15-11 (22) - %StrDir = "./QF_BVFPLRA/2019-Gudemann/",% 0 TO + %StrDir = "./QF_BVFPLRA/20170501-Heizmann-UltimateAutomizer/",% 0/15 TO + %StrDir = "./QF_BVFPLRA/20190429-UltimateAutomizerSvcomp2019/",% 8/ + %152 + % + % (11 u) + %StrDir = "./QF_BVFPLRA/2019-Gudemann/",% 0/1 TO %------------------------------------------------------------------------ - %StrDir = "./QF_FP/20170501-Heizmann-UltimateAutomizer/", % 0 TO - %StrDir = "./QF_FP/ramalho/",% 6-2 T0 - %StrDir = "./QF_FP/griggio/", % 59 TO en 24s, 51 en 60s (cvc4 90 en 60s) + %StrDir = "./QF_FP/20170501-Heizmann-UltimateAutomizer/", % 0/2 TO + %StrDir = "./QF_FP/ramalho/",% 0/38 T0 + %StrDir = "./QF_FP/griggio/", % 50 TO en 24s (cvc4 90 en 60s) %StrDir = "./QF_FP/schanda/spark/",% 7 TO %StrDir = "./QF_FP/wintersteiger/", % 0 TO %------------------------------------------------------------------------ - %StrDir = "QF_AX/", + %StrDir = "QF_AX/", % 14/551 %StrDir = "QF_AX/storeinv/", %StrDir = "QF_AX/swap/", %StrDir = "QF_AX/storecomm/", %StrDir = "QF_AX/cvc/", %---------------------------------------------------------------------- %StrDir = "./QF_BV/", - %StrDir = "./QF_BV/20170501-Heizmann-UltimateAutomizer/", + %StrDir = "./QF_BV/20170501-Heizmann-UltimateAutomizer/",% 0/1 - %StrDir = "./QF_BV/20170531-Hansen-Check/", %OK - %StrDir = "./QF_BV/2017-BuchwaldFried/", + %StrDir = "./QF_BV/20170531-Hansen-Check/", % 0/3 + %StrDir = "./QF_BV/2017-BuchwaldFried/", % 3/4 %StrDir = "./QF_BV/2018-Goel-hwbench/", - %StrDir = "./QF_BV/2019-Mann/", - %StrDir = "./QF_BV/2019-Wolf-fmbench/", - %StrDir = "./QF_BV/20200415-Yurichev/", - %StrDir = "./QF_BV/bmc-bv-svcomp14/", - %StrDir = "./QF_BV/calypto/", - %StrDir = "./QF_BV/challenge/", - %StrDir = "./QF_BV/check2/", - %StrDir = "./QF_BV/dwp_formulas/", - %StrDir = "./QF_BV/ecc/", % NULL comparÊ a cvc4 - %StrDir = "./QF_BV/float/", - %StrDir = "./QF_BV/galois/", % 2 BUGS / 4 fich (unknown) - %StrDir = "./QF_BV/gulwani-pldi08/", - %StrDir = "./QF_BV/log-slicing/", - %StrDir = "./QF_BV/mcm/", - %StrDir = "./QF_BV/pipe/", - %StrDir = "./QF_BV/pspace/", - %StrDir = "./QF_BV/rubik/", - %StrDir = "./QF_BV/RWS/", + %StrDir = "./QF_BV/2019-Mann/", %2/2 + %StrDir = "./QF_BV/2019-Wolf-fmbench/", % too big + %StrDir = "./QF_BV/20200415-Yurichev/", % 1/1 + %StrDir = "./QF_BV/bmc-bv-svcomp14/", % 8/24 (8 I) + %StrDir = "./QF_BV/calypto/", % 20/23 + %StrDir = "./QF_BV/challenge/", % 2/2 + %StrDir = "./QF_BV/check2/", % 0/6 + %StrDir = "./QF_BV/dwp_formulas/", % 0/332 + %StrDir = "./QF_BV/ecc/", % 0/1 (7 I) NULL comparÊ a cvc4 + %StrDir = "./QF_BV/float/", % 78/89 (120 I) + %StrDir = "./QF_BV/galois/", % 4/4 + %StrDir = "./QF_BV/gulwani-pldi08/", % 5/6 + %StrDir = "./QF_BV/log-slicing/", % 208 (unsupported ou TO sans limite) + %StrDir = "./QF_BV/mcm/", % 108/108 + %StrDir = "./QF_BV/pipe/", % 1/1 + %StrDir = "./QF_BV/pspace/", % 23/86 + %StrDir = "./QF_BV/rubik/", % 6/6 (1 I) + %StrDir = "./QF_BV/RWS/", % 19/19 (1 I) + %StrDir = "./QF_BV/sage/", - %StrDir = "./QF_BV/sage2/", - %StrDir = "./QF_BV/spear/", - %StrDir = "./QF_BV/stp/", - %StrDir = "./QF_BV/stp_samples/", - %StrDir = "./QF_BV/tacas07/", - %StrDir = "./QF_BV/uclid/", - %StrDir = "./QF_BV/uclid_contrib_smtcomp09/", - %StrDir = "./QF_BV/uum/", - %StrDir = "./QF_BV/VS3/", - %StrDir = "./QF_BV/wienand-cav2008/", + %StrDir = "./QF_BV/sage/app1/", % 31/2676 + %StrDir = "./QF_BV/sage/app2/", % 1/1346 + %StrDir = "./QF_BV/sage/app5/", % 2/1103 + %StrDir = "./QF_BV/sage/app6/", % 0/1345 + %StrDir = "./QF_BV/sage/app7/", % 11/4813 (3850 I) + %StrDir = "./QF_BV/sage/app8/", % 15/2756 + %StrDir = "./QF_BV/sage/app9/", % 16/3301 + %StrDir = "./QF_BV/sage/app10/", % 0/51 + %StrDir = "./QF_BV/sage/app11/", % 611 too big + %StrDir = "./QF_BV/sage/app12/", % 12/5784 + %StrDir = "./QF_BV/spear/", % 501/807 (888 I) + %StrDir = "./QF_BV/stp_samples/", % 45/426 + %StrDir = "./QF_BV/tacas07/", % 3/3 (2 I) + %StrDir = "./QF_BV/uclid/", % 0/4 (410 I) + %StrDir = "./QF_BV/uclid_contrib_smtcomp09/", 7/7 + %StrDir = "./QF_BV/uum/", % 8/8 + %StrDir = "./QF_BV/VS3/", % 10/10 (1 I) + %StrDir = "./QF_BV/wienand-cav2008/", % 3/15 (3 I) - %StrDir = "./QF_BV/asp/", - %StrDir = "./QF_BV/bench_ab/",%OK - %StrDir = "./QF_BV/bmc-bv/", - %StrDir = "./QF_BV/bmc-bv-svcomp14/", - %StrDir = "./QF_BV/brummayerbiere/", - %StrDir = "./QF_BV/brummayerbiere2/", - %StrDir = "./QF_BV/brummayerbiere3/", - %StrDir = "./QF_BV/brummayerbiere4/", + %StrDir = "./QF_BV/asp/", % 35/35 (356 I) + %StrDir = "./QF_BV/bench_ab/", % 0/285 + %StrDir = "./QF_BV/bmc-bv/", % 7/26 (4 I) + %StrDir = "./QF_BV/bmc-bv-svcomp14/", % 8/24 (8 I) + %StrDir = "./QF_BV/brummayerbiere/", % 38/52 + %StrDir = "./QF_BV/brummayerbiere2/", % 30/65 (22 u) + %StrDir = "./QF_BV/brummayerbiere3/", % 73/73 (6 I) + %StrDir = "./QF_BV/brummayerbiere4/", % 0/10 %StrDir = "./QF_BV/bruttomesso/", % plein de TO - %StrDir = "./QF_BV/crafted/", + %StrDir = "./QF_BV/crafted/", % 6/21 %----------------------------------------------------------------------- - %StrDir = "QF_ABV/bench_ab/", - %StrDir = "QF_ABV/bmc-arrays/", + %StrDir = "QF_ABV/bench_ab/", % 0/117 + %StrDir = "QF_ABV/bmc-arrays/", % 12/20 (19 I) %StrDir = "QF_ABV/brummayerbiere/", %TROP DUR %StrDir = "QF_ABV/btfnt/", %TROP DUR %StrDir = "QF_ABV/calc2/", %TROP DUR @@ -1383,6 +1414,47 @@ smt_unit_test(TO,CI) :- %StrDir = "QF_ABV/jager/", %StrDir = "QF_ABV/klee-selected-smt2/", + + %StrDir = "QF_ALIA/", % 63/114 (1 I) + %StrDir = "QF_ALIA/qlock2/", % 45/52 + %StrDir = "QF_ALIA/cvc/", % 4/5 + %StrDir = "QF_ALIA/UltimateAutomizer/",% 4/8 + %StrDir = "QF_ALIA/UltimateAutomizer2/",% 1/4 (1 I) + %StrDir = "QF_ALIA/piVC/", % 6/15 + %StrDir = "QF_ALIA/ios/", % 0/30 + + %StrDir = "QF_NIA/", + %StrDir = "QF_NIA/20170427-VeryMax/", %Que des TO + %StrDir = "QF_NIA/20170427-VeryMax/ITS/", %Que des TO + %StrDir = "QF_NIA/20170427-VeryMax/CInteger/", %Que des TO + %StrDir = "bugs_CInteger/", % 4/8 + %StrDir = "QF_NIA/AProVE/", % 1484/2406 (3 I) + %StrDir = "bugs_NIA/", % 6/6 + %StrDir = "QF_NIA/calypto/", % 6/117 + %StrDir = "QF_NIA/LassoRanker/",% 52/106 + %StrDir = "QF_NIA/LCTES/",% 2/2 + %StrDir = "QF_NIA/leipzig/", % 126/167 + %StrDir = "QF_NIA/mcm/",% 162/162 (24 I) + %StrDir = "QF_NIA/UltimateAutomizer/",% 0/7 + %StrDir = "QF_NIA/UltimateLassoRanker/",% 0/4 + + %StrDir = "QF_LIA/20180326-Bromberger/more_slacked/CAV_2009_benchmarks/coef-size/smt/size-10/", % 5/6 + %StrDir = "QF_LIA/20180326-Bromberger/more_slacked/cut_lemmas/10-vars/",% 4/7 + %StrDir = "QF_LIA/20180326-Bromberger/unbd-sage/unbd010v15c/",% 25/25 + %StrDir = "QF_LIA/cut_lemmas/", % 90/93 + %StrDir = "QF_LIA/wisa/", % 5/5 + %StrDir = "QF_LIA/arctic-matrix/",% 5/5 (95 I) + %StrDir = "QF_LIA/check/", % 0/5 + %StrDir = "QF_LIA/RTCL/", % 0/2 + %StrDir = "QF_LIA/miplib2003/", % too big + %StrDir = "QF_LIA/rings_preprocessed/", % 288/294 + %StrDir = "QF_LIA/mathsat/", % 51/91 + %StrDir = "QF_LIA/CAV_2009_benchmarks/smt/", % 508/540 + %StrDir = "QF_LIA/rings/", % 278/294 + %StrDir = "QF_LIA/fft/", % 7/9 + + %StrDir = "QF_ANIA/",% 0/1 (7 I) + os_file_name(StrDir,OS_Examples), ((StrDir == "./QF_FP/wintersteiger/"; StrDir == "./QF_BVFP/schanda/spark/") @@ -1392,18 +1464,30 @@ smt_unit_test(TO,CI) :- (SubdirList0 == [] -> SubdirList = ["."] ; SubdirList = SubdirList0), + MaxSize = 300000, % pour QF_FP/griggio (foreach(Dir,SubdirList), fromto([],IS,OS,SmtFileList), - param(StrDir) do + fromto(0,ITB,OTB,TooBig), + fromto(0,IMS,OMS,MaxSizeSeen), + param(StrDir,MaxSize) do append_strings(StrDir,Dir,SubDir), os_file_name(SubDir,OS_SubDir), read_directory(OS_SubDir, "*.smt2", _, SmtList), (foreach(F,SmtList), fromto(IS,I,O,OS), - param(SubDir) do + fromto(ITB,IITB,OITB,OTB), + fromto(IMS,IIMS,OOMS,OMS), + param(SubDir,MaxSize) do concat_string([SubDir,"/",F],PF), os_file_name(PF,OS_PF), - O = [OS_PF|I])), + get_file_info(OS_PF,size,FSize), + OOMS is max(IIMS,FSize), + (FSize > MaxSize -> + % Trop gros + OITB is IITB + 1, + O = I + ; OITB = IITB, + O = [OS_PF|I]))), initNbCodes, setval(nbFile,0), setval(nbTO,0), @@ -1425,16 +1509,14 @@ smt_unit_test(TO,CI) :- getval(nbFile,NbF), getval(nbTO,NbTO), writeln(output,F:NbF/NFs-'TO':NbTO), - garbage_collect, setval(cpt_solve,0)@colibri, setval(use_delta,UD)@eclipse, setval(use_simplex,US)@eclipse, + + %set_flag(occur_check,on), + % On passe en mode comptage du nombre de steps - setval(step_stats,1)@eclipse, - setval(step_limit,0)@eclipse, - setval(nb_steps,0)@eclipse, - setval(simplex_steps,0)@eclipse, - setval(show_steps,1)@eclipse, + show_stats, not not (seed(0), smt_solve_bis(Test,F,TO,Code), setval(init_code,Code)), @@ -1446,10 +1528,17 @@ smt_unit_test(TO,CI) :- ; (Code == 3 -> incval(nbTO) ; true)), + garbage_collect, fail ; true), getval(nbFile,NbFile), writeln(output,"Files":NbFile), + (TooBig > 0 -> + write(output,"Ignored Too Big Files (> "), + write(output,MaxSize), + writeln(output,"k)":TooBig), + writeln(output,"MaxSizeSeen":MaxSizeSeen) + ; true), getval(nb0,Nb0), writeln(output,"Unsat":Nb0), getval(nb1,Nb1), @@ -1468,6 +1557,13 @@ smt_unit_test(TO,CI) :- (foreach(B,Bugs) do writeln(output,B))). +show_stats :- + setval(step_stats,1)@eclipse, + setval(step_limit,0)@eclipse, + setval(nb_steps,0)@eclipse, + setval(simplex_steps,0)@eclipse, + setval(show_steps,1)@eclipse. + smt_unit_test1 :- setval(bug,[]), setval(cpt_solve,0)@colibri, diff --git a/Src/COLIBRI/colibri_tests/colibri/tests/sat/zero_neg_plus.smt2 b/Src/COLIBRI/colibri_tests/colibri/tests/sat/zero_neg_plus.smt2 new file mode 100644 index 0000000000000000000000000000000000000000..4e39c693fda8a512e856325e12d3e18cd0c0e70e --- /dev/null +++ b/Src/COLIBRI/colibri_tests/colibri/tests/sat/zero_neg_plus.smt2 @@ -0,0 +1,18 @@ +(set-info :status sat) +;; produced by colibri.drv ;; +(set-logic ALL) +(set-info :smt-lib-version 2.6) + +(declare-const zero_plus Float32) + +(declare-const zero_neg Float32) + +(declare-fun magic (Float32) Int) + +(assert (fp.eq zero_plus zero_neg)) + +(assert (distinct (magic zero_neg) 0)) + +(assert (not (not (= (magic zero_plus) 0)))) + +(check-sat) diff --git a/Src/COLIBRI/lib/v5/x86_64_linux/simplex_ocaml.so b/Src/COLIBRI/lib/v5/x86_64_linux/simplex_ocaml.so index 4d7ac03419986218d31ea13c44fb060f0b3b6ff0..bec729e0ca7dbb18e8884ee510ea2215fcb5f331 100755 Binary files a/Src/COLIBRI/lib/v5/x86_64_linux/simplex_ocaml.so and b/Src/COLIBRI/lib/v5/x86_64_linux/simplex_ocaml.so differ diff --git a/Src/COLIBRI/lib/v7/x86_64_linux/simplex_ocaml.so b/Src/COLIBRI/lib/v7/x86_64_linux/simplex_ocaml.so index 00d3dd117f59319695bcb4c6703aa8f53e51fdf2..e21f065b1a885dc8f825299fd60a0ca254ed85f7 100755 Binary files a/Src/COLIBRI/lib/v7/x86_64_linux/simplex_ocaml.so and b/Src/COLIBRI/lib/v7/x86_64_linux/simplex_ocaml.so differ diff --git a/Src/COLIBRI/notify.pl b/Src/COLIBRI/notify.pl index 72d457b023d9b6f72ea7471407ec70d699896eec..1840361c3a7b547a3703c44232340546f327b44f 100755 --- a/Src/COLIBRI/notify.pl +++ b/Src/COLIBRI/notify.pl @@ -330,46 +330,47 @@ sleeping_suspensions_sharing_other_vars(CstrX,BoundX,CstrY,BoundY,_Y,L) :- :- mode sleeping_suspensions_and_vars(++,++,-,+,-,-,-). sleeping_suspensions_and_vars([],LS1,PLS,AV,SV,[],NLS1) :- - sleeping_suspensions_and_vars1(LS1,PLS,AV,SV,NLS1). + sleeping_suspensions_and_vars1(LS1,PLS,AV,SV,NLS1). sleeping_suspensions_and_vars([S|LS],LS1,PLS,AV,SV,NLS,NLS1) :- - (get_sleeping_suspension_vars(S,V) -> - PLS = [(S,V)|EPLS], - NAV = [V|AV], - NLS = [S|ENLS], - sleeping_suspensions_and_vars(LS,LS1,EPLS,NAV,SV,ENLS,NLS1) - ; sleeping_suspensions_and_vars(LS,LS1,PLS,AV,SV,NLS,NLS1)). + (get_sleeping_suspension_vars(S,V) -> + PLS = [(S,V)|EPLS], + NAV = [V|AV], + NLS = [S|ENLS], + sleeping_suspensions_and_vars(LS,LS1,EPLS,NAV,SV,ENLS,NLS1) + ; sleeping_suspensions_and_vars(LS,LS1,PLS,AV,SV,NLS,NLS1)). + :- mode sleeping_suspensions_and_vars1(++,-,+,-,-). sleeping_suspensions_and_vars1([],[],AccuV,SV,[]) :- - term_variables(AccuV,SV). + term_variables(AccuV,SV). sleeping_suspensions_and_vars1([Susp|LSusp],PairLSusp,AccuV,SV,NLSusp) :- - (get_sleeping_suspension_vars(Susp,V) -> - PairLSusp = [(Susp,V)|EPairLSusp], - NAccuV = [V|AccuV], - NLSusp = [Susp|ENLSusp], - sleeping_suspensions_and_vars1(LSusp,EPairLSusp,NAccuV,SV,ENLSusp) - ; sleeping_suspensions_and_vars1(LSusp,PairLSusp,AccuV,SV,NLSusp)). + (get_sleeping_suspension_vars(Susp,V) -> + PairLSusp = [(Susp,V)|EPairLSusp], + NAccuV = [V|AccuV], + NLSusp = [Susp|ENLSusp], + sleeping_suspensions_and_vars1(LSusp,EPairLSusp,NAccuV,SV,ENLSusp) + ; sleeping_suspensions_and_vars1(LSusp,PairLSusp,AccuV,SV,NLSusp)). :- mode get_sleeping_suspension_vars(++,-). get_sleeping_suspension_vars(S,V) :- - get_suspension_data(S,goal,G), - term_variables(G,V). + get_suspension_data(S,goal,G), + term_variables(G,V). :- mode sleeping_suspensions_sharing_vars(++,++,++,?,?). sleeping_suspensions_sharing_vars([],_, L,L, []). sleeping_suspensions_sharing_vars([V|LV],PL, L,NL, LS) :- - sleeping_suspensions_sharing_var(V,PL,NPL, L,L1, LS,EndLS), - sleeping_suspensions_sharing_vars(LV,NPL, L1,NL, EndLS). + sleeping_suspensions_sharing_var(V,PL,NPL, L,L1, LS,EndLS), + sleeping_suspensions_sharing_vars(LV,NPL, L1,NL, EndLS). :- mode sleeping_suspensions_sharing_var(?,++,-, ++,-, -,-). sleeping_suspensions_sharing_var(V,PL,NPL, L,NL, LS,EndLS) :- - ((member_begin_end((S,VS),PL,NPL,EndNPL,EndPL), - occurs(V,VS)) - -> - LS = [S|LS1], - sleeping_suspensions_sharing_var(V,EndPL,EndNPL, [S|L],NL, LS1,EndLS) - ; NPL = PL, - NL = L, - LS = EndLS). + ((member_begin_end((S,VS),PL,NPL,EndNPL,EndPL), + occurs(V,VS)) + -> + LS = [S|LS1], + sleeping_suspensions_sharing_var(V,EndPL,EndNPL, [S|L],NL, LS1,EndLS) + ; NPL = PL, + NL = L, + LS = EndLS). diff --git a/Src/COLIBRI/realarith.pl b/Src/COLIBRI/realarith.pl index a0310370c3372b0a4d4fb46e016dbbd46fbe73f3..98979d0520ede166597183d6d225f71df6f37df6 100644 --- a/Src/COLIBRI/realarith.pl +++ b/Src/COLIBRI/realarith.pl @@ -10190,19 +10190,17 @@ mult_real_sign(real,A,B,C) :- !, prod_sign(SB,SC,SA), set_sign(real,A,SA) ; true))). +% float/double mult_real_sign(Type,A,B,C) :- (get_sign(A,SA) -> (get_sign(B,SB) -> prod_sign(SA,SB,SC), set_sign(Type,C,SC) - ; ((not_zero(C), - get_sign(C,SC)) - -> + ; (get_sign(C,SC) -> prod_sign(SA,SC,SB), set_sign(Type,B,SB) ; true)) - ; ((not_zero(C), - get_sign(B,SB), + ; ((get_sign(B,SB), get_sign(C,SC)) -> prod_sign(SB,SC,SA), @@ -15911,7 +15909,7 @@ power_real(Type,A,2,B) ?- !, power_real(Type,A,N,B) :- integer(N), N >= 0, - N =< 2^53, + %N =< 2^53, mreal:set_typed_intervals(A,Type,[-1.0Inf..1.0Inf]), mreal:set_typed_intervals(B,Type,[-1.0Inf..1.0Inf]), ensure_not_NaN((A,B)), @@ -15923,7 +15921,9 @@ power_real(Type,A,N,B) :- protected_unify(B = 1.0) ; (N == 1 -> protected_unify(A = B) - ; same_float_int_number_status(Type,A,B), + ; (is_float_int_number(A) -> + launch_float_int_number(B) + ; true), power_real_interval(Type,A,N,B), (mod(N,2,0) -> mreal:set_typed_intervals(B,Type,[0.0..1.0Inf]) @@ -15933,7 +15933,9 @@ power_real(Type,A,N,B) :- power_real1(Type,A,N,B) :- get_priority(Prio), set_priority(1), - same_float_int_number_status(Type,A,B), + (is_float_int_number(A) -> + launch_float_int_number(B) + ; true), power_real_bis(Type,A,N,B), set_priority(Prio), wake_if_other_scheduled(Prio). @@ -16843,7 +16845,9 @@ square_real(Type,A,B) :- ; set_lazy_domain(Type,A), set_lazy_domain(Type,B), check_no_float_error(Type,(A,B)), - same_float_int_number_status(Type,A,B), + (is_float_int_number(A) -> + launch_float_int_number(B) + ; true), square_real_interval(Type,A,B), square_real1(Type,A,B)). @@ -16887,7 +16891,9 @@ square_real1(Type,A,B) :- square_real_bis(Type,Val1,Val) :- save_cstr_suspensions((Val1,Val)), - same_float_int_number_status(Type,Val1,Val), + (is_float_int_number(Val1) -> + launch_float_int_number(Val) + ; true), mreal:dvar_remove_smaller(Val,0.0), square_real_2_args_equal(Type,Val1,Val,Continue0), (var(Continue0) -> diff --git a/Src/COLIBRI/simplex_ocaml/Dockerfile b/Src/COLIBRI/simplex_ocaml/Dockerfile index 42786f45b7073dacde5b561848a45082f2d72705..4e3b7f80ed6be1840c3c8897e5abf7bc952876bc 100644 --- a/Src/COLIBRI/simplex_ocaml/Dockerfile +++ b/Src/COLIBRI/simplex_ocaml/Dockerfile @@ -3,4 +3,6 @@ FROM ocaml/opam@sha256:e570e5dd74bb4986e022d5c25fe42579288d11fb0b60df13794632a8f RUN [ "opam", "depext", "--install", "dune.2.7.1", "ocplib-simplex.0.4", "zarith.1.9.1", "parsexp.v0.14.0", "menhir.20200624", "fmt.0.8.8", "spelll.0.3", "uutf.1.0.2", "gen.0.5.3" ] +RUN [ "opam", "install", "pp_loc" ] + RUN [ "mkdir", "/home/opam/build" ] diff --git a/Src/COLIBRI/simplex_ocaml/Makefile b/Src/COLIBRI/simplex_ocaml/Makefile index 1df2bd3e37b54863dd174cbaf2652ec1f11fc38c..32bebcdbd0bca7eeb17d53bd5d73d43151639b44 100644 --- a/Src/COLIBRI/simplex_ocaml/Makefile +++ b/Src/COLIBRI/simplex_ocaml/Makefile @@ -6,9 +6,6 @@ export ECLIPSEBIN all: dune build simplex_ocaml_mod.so simplex_ocaml_mod_v7.so simplex_ocaml.pl - # cp _build/default/simplex_ocaml.pl .. - # cp _build/default/simplex_ocaml_mod.so ../lib/v5/x86_64_linux/simplex_ocaml.so - # cp _build/default/simplex_ocaml_mod_v7.so ../lib/v7/x86_64_linux/simplex_ocaml.so all_version: dune build --workspace docker-dune-workspace simplex_ocaml_mod.so simplex_ocaml_mod_v7.so simplex_ocaml.pl diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/.gitignore b/Src/COLIBRI/simplex_ocaml/dolmen/.gitignore index 4726e721e4062ccebfe71ee49476ce47503c1412..e320f22e0756905a22522d2243b11a7140eece44 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/.gitignore +++ b/Src/COLIBRI/simplex_ocaml/dolmen/.gitignore @@ -1,5 +1,6 @@ -# Source directory (doc, build dir and bin) +# Generated directories _build/ +_coverage/ src/_build/ # Generated files @@ -7,10 +8,14 @@ src/_build/ *.install dolmen doc/index.html +syntax.messages.updated # ocamlbuild targets *.byte *.native +# opam local switch +_opam + # swap files *.swp diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/HACKING.md b/Src/COLIBRI/simplex_ocaml/dolmen/HACKING.md index 87697951201ad8f0f51ca404004ed416ccbdd65b..766b395e5b502adb5fc9ef6d7334b430c169c4c5 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/HACKING.md +++ b/Src/COLIBRI/simplex_ocaml/dolmen/HACKING.md @@ -4,60 +4,180 @@ This file is incomplete, but may have interesting tips if you want to start hacking on dolmen. If you're having troubles, don't hesitate to contact me directly, ^^ -## Hand-written error messages +## Build locally + +You need to have [ocaml](https://ocaml.org/) installed to compile dolmen. +Additionally, some ocaml dependencies are needed. The easiest way to +get a working ocaml installation, and install the dependencies is to +use [opam](https://opam.ocaml.org/). +See [this page](https://opam.ocaml.org/doc/Install.html#Using-your-distribution-39-s-package-system) +for information about how to install opam on your system. + +Once you have opam installed, you can use the following commands to isntall +the needed dependencies to build dolmen: +``` +opam pin add -n . +opam install --deps-only . +``` + +You can then build the project with +``` +make +``` + + +## Tests + +Most tests are setup through dune files, associated with tune "runtest" alias. + +To run the tests: +``` +make test +``` + +To promote the results of tests (if e.g. you have changed some error message, +or added new tests): +``` +make test-promote +``` + + +### Unit tests + +There a few (though not that many), unit tests. These are basically to test +internal functions of the library, in order to check internal assumptions +that may otherwise not be easy to check with complete parsing/typing tests. + +Follow the example in `tests/unit/bitv/` to add more unit tests. + + +### Binary tests + +There are quite a lot of tests for parsing, typing and other features that make +use of the `dolmen` binary to check the behaviour of the whole library. Some of +these tests verify that some well-formed inputs are correctly parsed/typed, but +most of the tests are there to check the behaviour in case of an error. It is +planned that in the long term, each error message should have a corresponding +test case (see coverage tests). + +These tests are found in the subfolders `tests/` (except for `unit` which +is reserved for unit tests of the library functions). These tests are run by +dune, and a script is used to generate the dune files, so that adding a new +test is as easy as creating a file with an extension recognized by dolmen +under the `tests` folder (in any subfolder, since they are scanned recursively +by the script). + +The check performed for each of these test files is the following: +- given a test file `foo.ext`, there must exists a `foo.expected` corresponding + file (if it does not exists, it will be created as an empty file by the + script) +- for both incremental mode and full mode, do the following + + run dolmen on the input file, using additional options from the flags.dune + file (one option per line, with no space) + + if `foo.expected` is empty, check that dolmen exited with error code 0, + else, check that dolmen exited with error code 0 or 1 + + compare the output of dolmen with the `foo.expected` file + +### Coverage tests + +This is done using `bisect_ppx`. Once you have it installed (usually via opam), +just run `make coverage` and it should open the coverage report in a browser +window for you (via xdg-open). + + +## Hand-written syntax error messages Dolmen now has support for customizing syntax error messages, using menhir's mechanism for syntax errors. If you want to contribute and/or help, you might -first want to read [menhir's manual](http://cambium.inria.fr/~fpottier/menhir/manual.html#sec67) -about the error mecanism (and/or LR(1) parsers in general if you're not -familiar with them). +first want to read [menhir's +manual](http://cambium.inria.fr/~fpottier/menhir/manual.html#sec67) about the +error mecanism (and/or LR(1) parsers in general if you're not familiar with +them). + +### Messages files and build rules ### Adding new error messages Adding new error messages is as simple as editing the `syntax.messages` -file, and then re-building the project. +file, and then re-building the project. Each error message in the +`syntax.messages` is composed of the following: +- an input sentence (in the form of a list of tokens) that makes the parser + automaton go into some error state +- a comment detailing the state of the parser at that point, most notably + the production being reduced/recognized; this, together with the `parseFoo.mly` + file, should help one understand why the error happens, and what token would + allow the parser to continue at that point +- the error message properly. Dolmen currently has the following convention for how + messages are written: +``` +XXX +production parsed (on a single line) +what is expected at that point, +possibly on multiple lines +``` + with on the first line an error message number (typically with 3 number, i.e. `042`), + on the second line the name of the construction being parsed (e.g. a term, a sort, + a function definition, etc...), and finally on the lines after that a description + of what would have been acceptable inputs (warning: there must be no blank lines, + else menhir will have trouble parsing the file). -### Messages files and build rules +Each syntax error must be exercised by at least one test file, typically in the +following file: +`tests/parsing/language/version/errors/XXX_some_informal_description_of_the_error.ext`. +It is always better to have more than one test case, for instance, by varying the token +that produces the error (in the `syntax.messages` file, the last token of each sentence +is assured to produce an error, but it is most ot the time not the only one; dolmen already +has a mechanism to inform the user of what token was read, so the error message written +should be agnostic of what the bad token is). + +Note the following: dolmen currently has two modes of reading files: incremental and full. +In full mode, the whole file is read and parsed, and then each sentence sent for processing, +whereas in incremental mode, each sentence is read, parsed and processed before the next one +is read. Due to this, all parsers in dolmen have two main entry point: one entry point for +parsing a whole file, and one for parsing a single input sentence[1]. This will often +make it so that there are the same sentences twice in a `*.messages` file, once for +for entry point of the file and once for the entry point of a single sentence. These +two error cases must share the exact same error message (i.e. same error number, prodution +parsed, etc...). This coherence is automatically and always checked by the tests which use +both incremental mode and full mode and expect the same output in both cases. -Not all languages in dolmen currently have the build rules needed to have -customized error messages, but it should be fairly easy to add them. For that, -you'll likely want to use the smtlib's support as reference. Concretely, for -a language `foobar`, this means comparing `src/language/foobar/dune` with -`src/language/smtlib2/v2.6/dune` and copying the missing parts. - -Once setup, the build for error messages define a few different build targets, -that are described later. However, due to some limitations of dune (and/or my -own knowledge of it), not everything has been automated, but the manual -interventions should be limited to copying a file now and then. The files related -to error messages are the following: -- `syntax.messages` the checked-out file containing the customized/hand-written - error messages. It must follow the menhir `.messages` file format in order - for the build to succeed. -- `new.messages`: a useful tagret that allows one to ask menhir to generate - a template `.messages` file. This target should only be used once at the - start of setting up error messages, in order to copy it into a checked-out - `syntax.messages`. The `new.messages` file is not meant to be committed, - it is only a temporary file promoted by dune into the build treee for - convenience. -- `updated.messages`: a target to ask menhir to generate an updated - `.messages` file after a change in then parser/grammar of the language. - It should retain all of the messages from `syntax.messages` that are still - valid/relevant for the updated grammar, easing the process of updating the - error messages when updating a grammar. - -Additionally, the dune file defines rule so that when running the tests, -menhir checks that the `syntax.messages` file is up-to-date with the -current syntax. +On syntax error message numbers (i.e. the `XXX` in error messages): +- for uniformity, they are all on 3 numbers, which should be enough for all reasonable languages + (mor than 1000 hand-written error messages seem a lot), but there should be no real limitation +- as much as possible, the error numbers should be contiguous starting from 0, however + because editing messages by hand is already quite tiresome (and changing an error number + means renaming test files that exercize it), it's not a problem is some numbers + are unused/skipped + +[1]: sadly, some languages (currently only the alt-ergo native format) cannot support + incremental parsing because it is not possible to know when a sentence ends, except + when another sentence begins. Concretely, this means such languages would have + end-of-stream conflicts according to menhir. For those languages, inremental mode + is not supported, but this should be an exception. ### Changing the grammar -When changing the grammar/parser for a language that has hand-writtent error -messages, you'll need to update the `syntax.messages` file according to the -new grammar. The simplest for that is to ask dune to build the `updated.messages` -file for the givne language, and then copy it to overwrite the current -`syntax.messages` file (this process can not easily be automated by dune because -of the circular dependency it creates: `syntax.messages` actually depends on itself -in this case). +When changing the grammar/parser for a language that has hand-written error +messages, you'll need to update the `syntax.messages` file according to the new +grammar. To do so, the simplest way is to run `dune build @runtest`, which will +check that the `syntax.messages` is correct with regards to the new grammar, +and if not, will print a diff to update the `syntax.messages` file. Running +`dune build @runtest --promote` will promote the diff and change the +`syntax.messages` file. + + +## Release workflow +Release checklist/workflow: +- update version number in `VERSION` +- update `CHANGES.md` with the new version number +- run `dune-release lint` +- run `dune-release tag` +- run `dune-release distrib` +- run `dune-release publish distrib` +- run `dune-release opam pkg` +- run `dune-release opam submit` +- add line for new version in `doc/index.txt` +- run `./doc/script/release` to update the doc on github-pages diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/Makefile b/Src/COLIBRI/simplex_ocaml/dolmen/Makefile index 705b15d002c684eeb3e5dddc929f16821a904846..9f56dff7f5e0e8ca0eb7fe6b603959c95dde214d 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/Makefile +++ b/Src/COLIBRI/simplex_ocaml/dolmen/Makefile @@ -1,31 +1,42 @@ # copyright (c) 2014, guillaume bury -COMP=dune FLAGS= BINDIR=_build/install/default/bin -all: dune +all: build watch: dune build $(FLAGS) -w @check -dune: +build: dune build $(FLAGS) @install +top: + dune utop + doc: dune build $(FLAGS) @doc -test: dune +gentests: $(wildcard tests/**/*) dune exec -- tools/gentests.exe tests/ + +test: gentests dune build $(FLAGS) @runtest -test-promote: - dune exec -- tests/gentests.exe tests/ +promote: gentests -dune build $(FLAGS) @runtest dune promote $(FLAGS) +coverage: + $(MAKE) clean + dune runtest --instrument-with bisect_ppx --force + bisect-ppx-report html + @xdg-open _coverage/index.html + bisect-ppx-report summary clean: - $(COMP) clean + find . -name '*.coverage' | xargs rm -f + rm -rf _coverage + dune clean -.PHONY: all watch dune bin test doc clean +.PHONY: all watch dune doc gentests test promote coverage clean diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/README.md b/Src/COLIBRI/simplex_ocaml/dolmen/README.md index 2837f56672df7bf04e33bf63dfdbc415e7020837..7d93fd7a2692f6f4c8bf38729dfb54b2fac43467 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/README.md +++ b/Src/COLIBRI/simplex_ocaml/dolmen/README.md @@ -1,12 +1,46 @@ -# Dolmen [](https://travis-ci.org/Gbury/dolmen) +# Dolmen    -A library providing flexible parsers for input languages. +A library providing flexible parsers and typecheckers for languages used in automated deduction. -## LICENSE +LICENSE +------- BSD2, see file LICENSE. -## Goals +Documentation +------------- + +Online documentation for the libraries can be found at <http://gbury.github.io/dolmen>. +There is also [a tutorial](https://github.com/Gbury/dolmen/tree/master/doc/tuto.md). + + +Installation +------------ + +The main method of installation is to use [opam](https://opam.ocaml.org/). +See [this page](https://opam.ocaml.org/doc/Install.html#Using-your-distribution-39-s-package-system) +for information about how to install opam on your system. Once you have installed +and configured opam, you can use the following command to install the dolmen +cli and lsp binaries: + +``` +opam install dolmen_bin dolmen_lsp +``` + +Additionally, pre-built binaries for Linux and MacOs can be found +on the release pages (starting from the v0.6 release, see +[the latest release](https://github.com/Gbury/dolmen/releases/latest)). + + +The libraries can be installed using: + +``` +opam install dolmen dolmen_type dolmen_loop +``` + + +Goals +----- The Dolmen project aims at providing an assortiment of tools to help handle languages that are used in automated deduction and formal logic. @@ -19,20 +53,18 @@ More precisely, the Dolmen project provides: - A binary (which is using the above libraries), to parse and typecheck input files. This could be used to check a file against its language specification, and/or obtain detailed errors. See [the bin doc](https://github.com/Gbury/dolmen/tree/master/doc/bin.md) -- A LSP server so that the feature sof the above binary can also be used inside +- A LSP server so that the features of the above binary can also be used inside your favorite editor. See [the dolmen lsp doc](https://github.com/Gbury/dolmen/tree/master/doc/lsp.md) -## Documentation -Online documentation for the libraries can be found at <http://gbury.github.io/dolmen>. -There is also [a tutorial](https://github.com/Gbury/dolmen/tree/master/doc/tuto.md). - -## Supported languages +Supported languages +------------------- Currently the following parsers are working: +- ae (alt-ergo format) (parsing only) - dimacs - iCNF - smtlib @@ -42,33 +74,7 @@ Currently the following parsers are working: The following parsers are either work in progress, or soon to be work in progress: -- alt-ergo native format +- alt-ergo format - coq - dedukti -## Build & Install - -You need [opam](https://opam.ocaml.org/) to install the developpement version of dolmen. -You can installations instructions for opam [here](https://opam.ocaml.org/doc/Install.html). - -To install all packages provided by dolmen: - - opam pin add https://github.com/Gbury/dolmen.git - -Manually, you'll need to first install the dependencies needed by dolmen, the easiest way -is to pin your local copy of dolmen like this: - - # At the root of your local dolmen repository - opam pin add --no-action ./ - opam install --deps-only dolmen dolmen_type dolmen_loop dolmen_bin dolmen_lsp - -Once the dependencies have been installed, you can build the project with: - - make - -## Future work - -- Adding new languages -- Improve error reporting -- Dynamic detection of input language - diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/VERSION b/Src/COLIBRI/simplex_ocaml/dolmen/VERSION index bd73f47072b1fe4b9914ec14a7f6d47fcc8f816a..5a2a5806df6e909afe3609b5706cb1012913ca0e 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/VERSION +++ b/Src/COLIBRI/simplex_ocaml/dolmen/VERSION @@ -1 +1 @@ -0.4 +0.6 diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/doc/scripts/index.html b/Src/COLIBRI/simplex_ocaml/dolmen/doc/scripts/index.html index 32624ed2252f9398332199b96cb4ba4d0bc4997a..0d02e2aaad6433f986adb45e74c59a62574686eb 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/doc/scripts/index.html +++ b/Src/COLIBRI/simplex_ocaml/dolmen/doc/scripts/index.html @@ -4,7 +4,7 @@ <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en"> <head> <meta http-equiv="Content-Type" content="application/xhtml+xml; charset=UTF-8" /> -<meta name="generator" content="AsciiDoc 8.6.10" /> +<meta name="generator" content="AsciiDoc 9.1.0" /> <title>Dolmen</title> <style type="text/css"> /* Shared CSS for AsciiDoc xhtml11 and html5 backends */ @@ -436,7 +436,7 @@ thead, p.table.header { p.table { margin-top: 0; } -/* Because the table frame attribute is overriden by CSS in most browsers. */ +/* Because the table frame attribute is overridden by CSS in most browsers. */ div.tableblock > table[frame="void"] { border-style: none; } @@ -762,6 +762,16 @@ asciidoc.install(); <a href="0.4">0.4</a> </p> </li> +<li> +<p> +<a href="0.5">0.5</a> +</p> +</li> +<li> +<p> +<a href="0.6">0.6</a> +</p> +</li> </ul></div> </div> </div> @@ -770,7 +780,7 @@ asciidoc.install(); <div id="footer"> <div id="footer-text"> Last updated - 2019-01-14 17:05:24 CET + 2021-04-30 15:03:59 CEST </div> </div> </body> diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/doc/scripts/index.txt b/Src/COLIBRI/simplex_ocaml/dolmen/doc/scripts/index.txt index 4f0245927d6f8bb40b1e20228dc9731964775ea4..d457fccf5ecf60757e4ceee9a6fda2604bdeac3b 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/doc/scripts/index.txt +++ b/Src/COLIBRI/simplex_ocaml/dolmen/doc/scripts/index.txt @@ -7,4 +7,6 @@ Guillaume Bury <guillaume.bury@gmail.com> * link:0.1[] * link:0.2[] * link:0.4[] +* link:0.5[] +* link:0.6[] diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/doc/scripts/release b/Src/COLIBRI/simplex_ocaml/dolmen/doc/scripts/release index a0a605a1cad7f4ac9a2763a8bbf7fefb716deaba..e23a4b6161e0d74e09d5a2a10009b19f376e6e99 100755 --- a/Src/COLIBRI/simplex_ocaml/dolmen/doc/scripts/release +++ b/Src/COLIBRI/simplex_ocaml/dolmen/doc/scripts/release @@ -8,7 +8,9 @@ git checkout master # Generate documentation make doc -(cd doc && asciidoc index.txt) +(cd doc/scripts && asciidoc index.txt) +git add doc/scripts/index.html +git commit -m "Update doc index" # Checkout gh-pages git checkout gh-pages @@ -18,11 +20,11 @@ git pull mkdir -p ./$version # Copy doc to the right locations -cp doc/index.html ./ +git show master:doc/scripts/index.html &> index.html cp -r _build/default/_doc/_html/* ./$version/ # Add potentially new pages -git add ./$version/* +git add ./$version git add ./index.html # Commit it all & push diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/dolmen.opam b/Src/COLIBRI/simplex_ocaml/dolmen/dolmen.opam index ce3297dea3a17cfc908b2c6c32d8d979ea3957e5..d0091cc2c9271eb953a1734ee5bb9c5bb9b9fc1b 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/dolmen.opam +++ b/Src/COLIBRI/simplex_ocaml/dolmen/dolmen.opam @@ -3,29 +3,31 @@ name: "dolmen" version: "dev" maintainer: "Guillaume Bury <guillaume.bury@gmail.com>" authors: "Guillaume Bury <guillaume.bury@gmail.com>" -license: "BSD-2-clauses" +license: "BSD-2-Clause" build: [ - ["dune" "subst"] {pinned} + ["dune" "subst"] {dev} ["dune" "build" "-p" name "-j" jobs] ] depends: [ - "ocaml" {>= "4.02.3"} - "menhir" {>= "20180703"} - "dune" { >= "2.0" } + "ocaml" {>= "4.08"} + "menhir" {>= "20180703" & ( ! with-test | ( >= "20201201" & < "20211125" ) ) } + "dune" { >= "2.7" } "fmt" { >= "0.8.7" } "seq" + "odoc" { with-doc } + "qcheck" { with-test } ] -tags: [ "parser" "tptp" "logic" "smtlib" "dimacs" ] +tags: [ "parser" "logic" "tptp" "smtlib" "dimacs" ] homepage: "https://github.com/Gbury/dolmen" dev-repo: "git+https://github.com/Gbury/dolmen.git" bug-reports: "https://github.com/Gbury/dolmen/issues" doc: "http://gbury.github.io/dolmen" -synopsis: "A parser library" +synopsis: "A parser library for automated deduction" description: "Dolmen is a parser library. It currently targets languages used in automated theorem provers, -but may be extended ot other domains. +but may be extended to other domains. Dolmen provides functors that takes as arguments a representation of terms and statements, and returns a module that can parse files (or streams of tokens) into the provided representation diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_bin.opam b/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_bin.opam index 0b7efea914f5d97e478324a9501efcb4b20574ac..9288a26b1904a09161e857468d70e9427faa5a3c 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_bin.opam +++ b/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_bin.opam @@ -1,25 +1,29 @@ -opam-version: "1.2" +opam-version: "2.0" name: "dolmen_bin" version: "dev" -author: "Guillaume Bury" -maintainer: "guillaume.bury@gmail.com" -license: "BSD-2-clauses" +maintainer: "Guillaume Bury <guillaume.bury@gmail.com>" +authors: "Guillaume Bury <guillaume.bury@gmail.com>" +license: "BSD-2-Clause" build: [ - ["dune" "subst"] {pinned} + ["dune" "subst"] {dev} ["dune" "build" "-p" name "-j" jobs] ] depends: [ "ocaml" {>= "4.02.3"} - "dolmen" - "dolmen_type" - "dolmen_loop" - "dune" + "dolmen" {= version } + "dolmen_type" {= version } + "dolmen_loop" {= version } + "dune" { >= "2.7" } "fmt" "cmdliner" + "odoc" { with-doc } ] -tags: [ "logic" "computation" "automated theorem prover" "lsp" "language server protocol"] +depopts: [ + "memtrace" +] +tags: [ "logic" "computation" "automated theorem prover" "logic" "smtlib" "tptp"] homepage: "https://github.com/Gbury/dolmen" -dev-repo: "https://github.com/Gbury/dolmen.git" +dev-repo: "git+https://github.com/Gbury/dolmen.git" bug-reports: "https://github.com/Gbury/dolmen/issues" doc: "http://gbury.github.io/dolmen" diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_loop.opam b/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_loop.opam index 9e29c419764116fede88f7c8dd414902ba4e37c0..201510d53a00e964a9766bd7018f5b7b3fdd3841 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_loop.opam +++ b/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_loop.opam @@ -1,23 +1,25 @@ -opam-version: "1.2" +opam-version: "2.0" name: "dolmen_loop" version: "dev" -author: "Guillaume Bury" -maintainer: "guillaume.bury@gmail.com" -license: "BSD-2-clauses" +maintainer: "Guillaume Bury <guillaume.bury@gmail.com>" +authors: "Guillaume Bury <guillaume.bury@gmail.com>" +license: "BSD-2-Clause" build: [ - ["dune" "subst"] {pinned} + ["dune" "subst"] {dev} ["dune" "build" "-p" name "-j" jobs] ] depends: [ "ocaml" {>= "4.02.3"} - "dolmen" - "dolmen_type" + "dolmen" {= version } + "dolmen_type" {= version } + "dune" { >= "2.7" } "gen" - "dune" + "odoc" { with-doc } + "pp_loc" ] tags: [ "logic" "computation" "automated theorem prover" ] homepage: "https://github.com/Gbury/dolmen" -dev-repo: "https://github.com/Gbury/dolmen.git" +dev-repo: "git+https://github.com/Gbury/dolmen.git" bug-reports: "https://github.com/Gbury/dolmen/issues" doc: "http://gbury.github.io/dolmen" diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_lsp.opam b/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_lsp.opam index 9b88793de5dece4c76f6ff852f9e1ab61d501b70..84b4e332dbf9d5e0180794dee3304dd3c6a21d2f 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_lsp.opam +++ b/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_lsp.opam @@ -1,26 +1,29 @@ -opam-version: "1.2" +opam-version: "2.0" name: "dolmen_lsp" version: "dev" -author: "Guillaume Bury" -maintainer: "guillaume.bury@gmail.com" -license: "BSD-2-clauses" +maintainer: "Guillaume Bury <guillaume.bury@gmail.com>" +authors: "Guillaume Bury <guillaume.bury@gmail.com>" +license: "BSD-2-Clause" build: [ - ["dune" "subst"] {pinned} + ["dune" "subst"] {dev} ["dune" "build" "-p" name "-j" jobs] ] depends: [ "ocaml" {>= "4.02.3"} - "dolmen" - "dolmen_type" - "dolmen_loop" - "lsp" - "dune" + "dolmen" {= version } + "dolmen_type" {= version } + "dolmen_loop" { = version } + "dune" { >= "2.7" } "ocaml-syntax-shims" + "odoc" { with-doc } + "logs" + "lsp" + "linol" { >= "0.4" & < "0.5" } + "linol-lwt" { >= "0.4" & < "0.5" } ] -pin-depends: [ "lsp.dev" "git+https://github.com/ocaml/ocaml-lsp.git#5782d1da" ] tags: [ "logic" "computation" "automated theorem prover" "lsp" "language server protocol"] homepage: "https://github.com/Gbury/dolmen" -dev-repo: "https://github.com/Gbury/dolmen.git" +dev-repo: "git+https://github.com/Gbury/dolmen.git" bug-reports: "https://github.com/Gbury/dolmen/issues" doc: "http://gbury.github.io/dolmen" diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_type.opam b/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_type.opam index dc845dc3c6cf3cfa21cde007386d78ec0405533f..231cae9b84c978a737a2f6eb5191e80f3a8fc15b 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_type.opam +++ b/Src/COLIBRI/simplex_ocaml/dolmen/dolmen_type.opam @@ -1,22 +1,24 @@ -opam-version: "1.2" +opam-version: "2.0" name: "dolmen_typecheck" version: "dev" -author: "Guillaume Bury" -maintainer: "guillaume.bury@gmail.com" -license: "BSD-2-clauses" +maintainer: "Guillaume Bury <guillaume.bury@gmail.com>" +authors: "Guillaume Bury <guillaume.bury@gmail.com>" +license: "BSD-2-Clause" build: [ - ["dune" "subst"] {pinned} + ["dune" "subst"] {dev} ["dune" "build" "-p" name "-j" jobs] ] depends: [ "ocaml" {>= "4.02.3"} - "dolmen" + "dolmen" {= version } + "dune" { >= "2.7" } "spelll" - "dune" + "uutf" + "odoc" { with-doc } ] -tags: [ "logic" "type" "typechecking" "first order" ] +tags: [ "logic" "type" "typechecking" "first order" "polymorphism" ] homepage: "https://github.com/Gbury/dolmen" -dev-repo: "https://github.com/Gbury/dolmen.git" +dev-repo: "git+https://github.com/Gbury/dolmen.git" bug-reports: "https://github.com/Gbury/dolmen/issues" doc: "http://gbury.github.io/dolmen" diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/dune-project b/Src/COLIBRI/simplex_ocaml/dolmen/dune-project index fd2b76d53dd30d21d711f7dc871e605d0da46a6e..5bd82bd6217cfaded6794073d4efb22893f6d7e6 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/dune-project +++ b/Src/COLIBRI/simplex_ocaml/dolmen/dune-project @@ -1,3 +1,3 @@ -(lang dune 2.0) +(lang dune 2.7) (name dolmen) (using menhir 2.0) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/dune index 3e439a4d475adece96b153d801458c013e1036e0..9db6fb49823d9fc20e98509cea95a0613eb3c95e 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/dune @@ -9,6 +9,10 @@ ; dolmen deps dolmen dolmen.intf dolmen.std dolmen_type dolmen_loop + ; Memtrace dependency + (select memory_profiler.ml from + (memtrace -> memory_profiler.memtrace.ml) + ( -> memory_profiler.missing.ml)) ) ) @@ -20,7 +24,7 @@ ; Install the man page (install - (files dolmen.1) (section man) + (files dolmen.1) (package dolmen_bin) ) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/errors.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/errors.ml index f0b8328a34ae9750a91921054e4aae2c23bbc7c1..aa06877fb1a14641c7944bc2a6f5c9f421166b17 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/errors.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/errors.ml @@ -1,105 +1,56 @@ -(* Some helper functions *) -(* ************************************************************************ *) - -let prelude (st : Loop.State.t) = - match st.input_lang with - | None -> "prompt> @?" - | Some l -> - Format.asprintf "(%s)# @?" (Dolmen_loop.Logic.string_of_language l) - -let prelude_space st = - String.make (String.length (prelude st) - 8) ' ' - -(* Location functions *) -(* ************************************************************************ *) - (* Exceptions *) (* ************************************************************************* *) let exn st = function + (* Internal exception used for jumping. + This should only be used ot raise a state that has just been + 'errored' (i.e. where State.error has been called on the state), + which, for the cli binary, means that we should have already exited. *) + | Dolmen_loop.State.Error st -> st + (* Sigint, potentially wrapped by the typechecker *) - | Loop.Pipeline.Sigint - | Dolmen_loop.Typer.T.Typing_error ( - Dolmen_loop.Typer.T.Error (_, _, Dolmen_loop.Typer.T.Uncaught_exn ( - Loop.Pipeline.Sigint, _))) -> + | Dolmen_loop.Pipeline.Sigint -> Format.pp_print_flush Format.std_formatter (); - Loop.State.error st "User Interrupt" + Format.pp_print_flush Format.err_formatter (); + Loop.State.error st Dolmen_loop.Report.Error.user_interrupt () (* Timeout, potentially wrapped by the typechecker *) - | Loop.Pipeline.Out_of_time - | Dolmen_loop.Typer.T.Typing_error ( - Dolmen_loop.Typer.T.Error (_, _, Dolmen_loop.Typer.T.Uncaught_exn ( - Loop.Pipeline.Out_of_time, _))) -> + | Dolmen_loop.Pipeline.Out_of_time -> Format.pp_print_flush Format.std_formatter (); - Loop.State.error st "Time limit reached" + Loop.State.error st Dolmen_loop.Report.Error.timeout () - | Loop.Pipeline.Out_of_space - | Dolmen_loop.Typer.T.Typing_error ( - Dolmen_loop.Typer.T.Error (_, _, Dolmen_loop.Typer.T.Uncaught_exn ( - Loop.Pipeline.Out_of_space, _))) -> + | Dolmen_loop.Pipeline.Out_of_space -> Format.pp_print_flush Format.std_formatter (); - Loop.State.error st "Memory limit reached" - - (* Parsing errors *) - | Dolmen.Std.Loc.Uncaught (loc, exn) -> - let file = Dolmen_loop.State.input_file_loc st in - Loop.State.error ~loc:{ file; loc; } st "%s" (Printexc.to_string exn) - | Dolmen.Std.Loc.Lexing_error (loc, lex) -> - let file = Dolmen_loop.State.input_file_loc st in - Loop.State.error ~loc:{ file; loc; } st "Lexing error: invalid character '%s'" lex - | Dolmen.Std.Loc.Syntax_error (loc, msg) -> - let file = Dolmen_loop.State.input_file_loc st in - Loop.State.error ~loc: { file; loc; } st "%t@." msg - - - (* Typing errors *) - | Dolmen_loop.Typer.T.Typing_error ( - Dolmen_loop.Typer.T.Error (env, fragment, _err) as error) -> - let loc = Dolmen_loop.Typer.T.fragment_loc env fragment in - if st.context then - Format.eprintf "@[<hv 2>While typing:@ @[<hov>%a@]@]@." - Loop.Typer.print_fragment (env, fragment); - Loop.State.error ~loc st "%a" - Loop.Typer.report_error error - - (* State errors *) - | Dolmen_loop.State.File_not_found (loc, dir, f) -> - if dir = "." then - Loop.State.error ~loc st "File not found: '%s'" f - else - Loop.State.error ~loc st "File not found: '%s' in directory '%s'" f dir - | Dolmen_loop.State.Input_lang_changed (l, l') -> - Loop.State.error st "Input language changed from %s to %s (probably because of an include statement)" - (Dolmen_loop.Logic.string_of_language l) - (Dolmen_loop.Logic.string_of_language l') + Format.pp_print_flush Format.err_formatter (); + Loop.State.error st Dolmen_loop.Report.Error.spaceout () (* Internal Dolmen Expr errors *) - | Dolmen.Std.Expr.Bad_ty_arity (c, l) -> - let pp_sep fmt () = Format.fprintf fmt ";@ " in - Loop.State.error st "@[<hv>Internal error: Bad arity for type constant '%a',@ which was provided arguments:@ [@[<hv>%a@]]@]" - Dolmen.Std.Expr.Print.ty_const c (Format.pp_print_list ~pp_sep Dolmen.Std.Expr.Ty.print) l - | Dolmen.Std.Expr.Bad_term_arity (c, tys, ts) -> + | Dolmen.Std.Expr.Ty.Bad_arity (c, l) -> let pp_sep fmt () = Format.fprintf fmt ";@ " in - Loop.State.error st "@[<hv>Internal error: Bad arity for type constant '%a',@ which was provided arguments:@ [@[<hv>%a;@ %a@]]@]" - Dolmen.Std.Expr.Print.term_const c - (Format.pp_print_list ~pp_sep Dolmen.Std.Expr.Ty.print) tys - (Format.pp_print_list ~pp_sep Dolmen.Std.Expr.Term.print) ts + Loop.State.error st Dolmen_loop.Report.Error.internal_error + (Format.dprintf + "@[<hv>Internal error: Bad arity for type constant '%a',\ + @ which was provided arguments:@ [@[<hv>%a@]]@]" + Dolmen.Std.Expr.Print.ty_cst c + (Format.pp_print_list ~pp_sep Dolmen.Std.Expr.Ty.print) l) | Dolmen.Std.Expr.Type_already_defined c -> - Loop.State.error st "@[<hv>Internal error: Type constant '%a' was already defined earlier,@ cannot re-define it.@]" - Dolmen.Std.Expr.Print.id c - + Loop.State.error st Dolmen_loop.Report.Error.internal_error + (Format.dprintf + "@[<hv>Internal error: Type constant '%a' was already defined earlier,\ + @ cannot re-define it.@]" + Dolmen.Std.Expr.Print.id c) | Dolmen.Std.Expr.Term.Wrong_type (t, ty) -> - Loop.State.error st "@[<hv>Internal error: A term of type@ %a@ was expected but instead got a term of type@ %a@]" - Dolmen.Std.Expr.Ty.print ty Dolmen.Std.Expr.Ty.print (Dolmen.Std.Expr.Term.ty t) - - (* File format auto-detect *) - | Dolmen_loop.Logic.Extension_not_found ext -> - Loop.State.error st "@[<hv>The following extension was not recognized: '%s'.@ %s" ext - "Please use a recognised extension or specify an input language on the command line" + Loop.State.error st Dolmen_loop.Report.Error.internal_error + (Format.dprintf + "@[<hv>Internal error: A term of type@ %a@ was expected \ + but instead got a term of type@ %a@]" + Dolmen.Std.Expr.Ty.print ty + Dolmen.Std.Expr.Ty.print (Dolmen.Std.Expr.Term.ty t)) (* Generic catch-all *) - | e -> Loop.State.error st "@[<hv>Unhandled exception:@ %s@]" (Printexc.to_string e) - + | exn -> + let bt = Printexc.get_raw_backtrace () in + Loop.State.error st Dolmen_loop.Report.Error.uncaught_exn (exn, bt) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/loop.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/loop.ml index c29e6d7b7cb8e738932d46b6eac846c27dba3f90..d443abc8ce0c4bbf2da1f2aaccedf0612150ff64 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/loop.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/loop.ml @@ -9,6 +9,6 @@ module Header = Dolmen_loop.Headers.Pipe(State) module Typer = struct module T = Dolmen_loop.Typer.Make(State) include T - include Dolmen_loop.Typer.Pipe(Dolmen.Std.Expr)(State)(T) + include Dolmen_loop.Typer.Pipe(Dolmen.Std.Expr)(Dolmen.Std.Expr.Print)(State)(T) end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/main.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/main.ml index a16a73a386a445186fd7fcd9a4d3f59eb8b8b528..5a1c335b8ce380df36c28602d9646bbb11998407 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/main.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/main.ml @@ -1,43 +1,42 @@ (* This file is free software, part of dolmen. See file "LICENSE" for more information *) -let handle_exn st exn = + +(* Debug printing *) +(* ************** *) + +let debug_parsed_pipe st c = + if st.Loop.State.debug then + Format.eprintf "[parsed] @[<hov>%a@]@." + Dolmen.Std.Statement.print c; + st, c + +let debug_typed_pipe st stmt = if st.Loop.State.debug then - Format.eprintf "%a@\n%s@." - Loop.State.debug st - (Printexc.to_string exn); - let () = Errors.exn st exn in - exit 1 + Format.eprintf "[typed] @[<hov>%a@]@\n@." + Loop.Typer.print stmt; + st, stmt + + +(* Run dolmen (regular use) *) +(* ************************ *) + +let handle_exn st exn = + let _st = Errors.exn st exn in + exit 125 let finally st e = match e with | None -> st - | Some exn -> handle_exn st exn + | Some (bt,exn) -> + (* Print the backtrace if requested *) + if Printexc.backtrace_status () then + Printexc.print_raw_backtrace stdout bt; + handle_exn st exn -let debug_pipe st c = +let run st = if st.Loop.State.debug then - Format.eprintf "%a@\n%a@." - Loop.State.debug st - Dolmen.Std.Statement.print c; - st, c - -let () = - let man = [ - `S Options.common_section; - `P "Common options for the dolmen binary"; - `S Options.gc_section; - `P "Options to fine-tune the gc, only experts should use these."; - `S Cmdliner.Manpage.s_bugs; - `P "You can report bugs at https://github.com/Gbury/dolmen/issues"; - `S Cmdliner.Manpage.s_authors; - `P "Guillaume Bury <guillaume.bury@gmail.com>" - ] in - let info = Cmdliner.Term.info ~man ~version:"0.1" "dolmen" in - let st = match Cmdliner.Term.eval (Options.state, info) with - | `Version | `Help -> exit 0 - | `Error `Parse | `Error `Term | `Error `Exn -> exit 2 - | `Ok opt -> opt - in + Dolmen.Std.Expr.Print.print_index := true; let st, g = try Loop.Parser.parse [] st with exn -> handle_exn st exn @@ -46,10 +45,11 @@ let () = let open Loop.Pipeline in run ~finally g st ( (fix (op ~name:"expand" Loop.Parser.expand) ( - (op ~name:"debug" debug_pipe) + (op ~name:"debug-parsed" debug_parsed_pipe) @>>> (op ~name:"headers" Loop.Header.inspect) @>>> (op ~name:"typecheck" Loop.Typer.typecheck) - @>|> (op (fun st _ -> st, ())) @>>> _end + @>|> (op ~name:"debug-typed" debug_typed_pipe) + @>>> (op (fun st _ -> st, ())) @>>> _end ) ) ) @@ -58,3 +58,83 @@ let () = let _st = Dolmen_loop.State.flush st () in () +(* Warning/Error list *) +(* ****************** *) + +let list conf = + let open Dolmen_loop in + let l = + List.sort (fun r r' -> + String.compare (Report.T.mnemonic r) (Report.T.mnemonic r') + ) (Report.T. list ()) + in + let pp_kind fmt = function + | `All -> + Format.fprintf fmt "%-15s" "group" + | `Error _ -> + Format.fprintf fmt "%-15s" "error" + | `Warning Report.Any_warn w -> + Format.fprintf fmt "w:%-13s" + (Report.Warning.Status.to_string (Report.Conf.status conf w)) + in + Format.printf "%-30s%-15s%-15s%s@\n%s@\n" + "mnemonic" "kind" "category" "description" + (String.make 100 '-'); + List.iter (fun t -> + Format.printf "%-30s%a%-15s%s@\n" + (Report.T.mnemonic t) pp_kind t + (Report.T.category t) (Report.T.name t) + ) l + + +(* Warning/Error documentation *) +(* *************************** *) + +let doc conf t = + let open Dolmen_loop in + let pp_status fmt = function + | `All | `Error _ -> () + | `Warning Report.Any_warn w -> + Format.fprintf fmt "@ By default: %a" + Report.Warning.Status.print (Report.Conf.status conf w) + in + Format.printf + "@[<v>@ %s@ @ kind: %s@ Category: %s@ Mnemonic: %s%a@ @ @[<hov> %t@]@]@." + (Report.T.name t) + (Report.T.kind t) + (Report.T.category t) + (Report.T.mnemonic t) + pp_status t + (Report.T.doc t) + + +(* Main code *) +(* ********* *) + +let () = + let version = "0.1" in + let exits = + List.map (fun code -> + let retcode, doc = Dolmen_loop.Code.descr code in + Cmdliner.Term.exit_info ~doc retcode + ) (Dolmen_loop.Code.errors ()) + @ Cmdliner.Term.default_exits + in + let cli_term = ( + Options.cli, + Cmdliner.Term.info "dolmen" + ~exits ~man:Man.cli ~version) + in + match Cmdliner.Term.eval cli_term with + | `Version | `Help -> + exit 0 + | `Error `Parse | `Error `Term | `Error `Exn -> + exit Cmdliner.Term.exit_status_cli_error + | `Ok Run { state } -> + run state + | `Ok Doc { report; conf; } -> + doc conf report + | `Ok List_reports { conf; } -> + list conf + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/man.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/man.ml new file mode 100644 index 0000000000000000000000000000000000000000..1cdfe6d465510e9e4f734e1c52578cdffd7108d2 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/man.ml @@ -0,0 +1,36 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +(* Manpage *) +(* ************************************************************************* *) + +let cli = [ + `S Cmdliner.Manpage.s_description; + `P "Dolmen is a tool to parse and type input files that contain problem \ + used in automated deduction."; + `S Options.common_section; + `P "Common options for the dolmen binary"; + `S Options.error_section; + `P "Options to customize the behaviour of dolmen on errors/warnings"; + `P "A warning can have one of three status: Disabled, Enabled, and Fatal. \ + When disabled, a warning will be ignored, when enabled, it will be + printed, and when fatal, it will be transformed into an error."; + `S Options.header_section; + `P "Options to control the checking of headers in the input file"; + `S Options.profiling_section; + `P (Format.asprintf + "Options to profile Dolmen.%s" + (if Memory_profiler.available then "" else + " WARNING: Memory profiling is not available on this version + of Dolmen. You should install memtrace and recompile Dolmen + if you desire to use memory profiling.") + ); + `S Options.gc_section; + `P "Options to fine-tune the gc, only experts should use these."; + `S Cmdliner.Manpage.s_exit_status; + `P "dolmen exits with the following status:"; + `S Cmdliner.Manpage.s_bugs; + `P "You can report bugs at https://github.com/Gbury/dolmen/issues"; + `S Cmdliner.Manpage.s_authors; + `P "Guillaume Bury <guillaume.bury@gmail.com>"; +] diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/memory_profiler.memtrace.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/memory_profiler.memtrace.ml new file mode 100644 index 0000000000000000000000000000000000000000..8a00eafd399bfba2d24885844e1b1678a9992f2b --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/memory_profiler.memtrace.ml @@ -0,0 +1,11 @@ + +let available = true + +let start filename sampling_rate = + let _s = + Memtrace.start_tracing + ~filename ~sampling_rate + ~context:(Some "dolmen") + in + () + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/memory_profiler.missing.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/memory_profiler.missing.ml new file mode 100644 index 0000000000000000000000000000000000000000..99039ec0b17d6c578bd9a0f4db46621230dcd8b2 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/memory_profiler.missing.ml @@ -0,0 +1,5 @@ + +let available = false + +let start _filename _sampling_rate = () + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/options.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/options.ml index cd098a631ae56ba5283bffd240e0eac695215998..74ec6cc1b1dfe203eef0f63a62db38820f2f10af 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/options.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/bin/options.ml @@ -7,75 +7,60 @@ open Cmdliner (* ************************************************************************* *) let gc_section = "GC OPTIONS" +let error_section = "ERROR HANDLING" +let header_section = "HEADER CHECKING" let common_section = Manpage.s_options +let profiling_section = "PROFILING" -(* State creation *) +(* Main commands *) (* ************************************************************************* *) -let gc_opts - minor_heap_size major_heap_increment - space_overhead max_overhead allocation_policy = - Gc.({ (get ()) with - minor_heap_size; major_heap_increment; - space_overhead; max_overhead; allocation_policy; - } - ) - -let split_input = function - | `Stdin -> - Sys.getcwd (), `Stdin - | `File f -> - Filename.dirname f, `File (Filename.basename f) +type cmd = + | Run of { + state : Loop.State.t; + } + | List_reports of { + conf : Dolmen_loop.Report.Conf.t; + } + | Doc of { + report : Dolmen_loop.Report.T.t; + conf : Dolmen_loop.Report.Conf.t; + } + +(* Color options *) +(* ************************************************************************* *) -let mk_state - gc gc_opt bt colors - time_limit size_limit - input_lang input_mode input - header_check header_licenses - header_lang_version - type_check type_strict - debug context max_warn - = - (* Side-effects *) - let () = Gc.set gc_opt in - let () = - let style = if colors then `Ansi_tty else `None in - Fmt.set_style_renderer Format.err_formatter style; +type color = + | Auto + | Always + | Never + +let color_list = [ + "auto", Auto; + "always", Always; + "never", Never; +] + +let color_conv = Arg.enum color_list + +let set_color file_descr formatter color = + let style = + match color with + | Always -> `Ansi_tty + | Never -> `None + | Auto -> if Unix.isatty file_descr then `Ansi_tty else `None in - let () = if bt then Printexc.record_backtrace true in - let () = if gc then at_exit (fun () -> Gc.print_stat stdout;) in - (* State creation *) - let input_dir, input_source = split_input input in - let st : Loop.State.t = { - debug; - - context; max_warn; - cur_warn = 0; - - time_limit; size_limit; - - input_dir; input_lang; - input_mode; input_source; - input_file_loc = Dolmen.Std.Loc.mk_file ""; + Fmt.set_style_renderer formatter style - header_check; header_licenses; header_lang_version; - header_state = Dolmen_loop.Headers.empty; - - type_check; type_strict; - type_state = Dolmen_loop.Typer.new_state (); - solve_state = (); +(* Input format converter *) +(* ************************************************************************* *) - export_lang = []; - } in - st +let input_format_conv = Arg.enum Dolmen_loop.Logic.enum (* Input source converter *) (* ************************************************************************* *) -(* Converter for input formats/languages *) -let input_format_conv = Arg.enum Dolmen_loop.Logic.enum - (* Converter for input file/stdin *) let input_to_string = function | `Stdin -> "<stdin>" @@ -87,18 +72,6 @@ let input_source_conv = let print fmt i = Format.fprintf fmt "%s" (input_to_string i) in Arg.conv (parse, print) -(* Converter for permissions *) -let perm_conv = Arg.enum [ - "allow", Dolmen_loop.State.Allow; - "warn", Dolmen_loop.State.Warn; - "error", Dolmen_loop.State.Error; - ] - -(* Converter for input modes *) -let mode_conv = Arg.enum [ - "full", `Full; - "incremental", `Incremental; - ] (* Output converters *) (* ************************************************************************* *) @@ -116,6 +89,79 @@ let output_conv = Arg.conv (parse_output, print) +(* Input modes *) +(* ************************************************************************* *) + +let mode_list = [ + "full", `Full; + "incremental", `Incremental; + ] + +let mode_conv = Arg.enum mode_list + + +(* Mnemonic converter *) +(* ************************************************************************ *) + +let mnemonic_parser s = + match Dolmen_loop.Report.T.find_mnemonic s with + | Some ((`All | `Error _ | `Warning _) as res) -> + Ok res + | None -> + Error (`Msg ( + Format.asprintf + "@;the mnemonic '%s' is unknown, please check the spelling." s)) + +let mnemonic_printer fmt t = + let s = Dolmen_loop.Report.T.mnemonic t in + Format.fprintf fmt "%s" s + +let mnemonic_conv = Arg.conv (mnemonic_parser, mnemonic_printer) + + +(* Warning modifiers *) +(* ************************************************************************ *) + +type warn_mod = + [ `Disable | `Enable | `Fatal | `Non_fatal ] * + [ Dolmen_loop.Report.T.all | Dolmen_loop.Report.T.warn ] + +let warn_parser s = + if String.length s = 0 then + Error (`Msg "empty warning modifier") + else begin + let aux change modif s = + match mnemonic_parser s with + | Ok ((`All | `Warning _) as res) -> Ok (modif, res) + | Error _ as res -> res + | Ok `Error _ -> + Error (`Msg ( + Format.asprintf + "@;the mnemonic '%s' refers to an error, \ + but only warnings can be %s" s change)) + in + match s.[0] with + | '@' -> aux "made fatal" `Fatal (String.sub s 1 (String.length s - 1)) + | '-' -> aux "disabled" `Disable (String.sub s 1 (String.length s - 1)) + | '+' -> aux "enabled" `Enable (String.sub s 1 (String.length s - 1)) + | '=' -> aux "enabled" `Non_fatal (String.sub s 1 (String.length s - 1)) + | _ -> aux "enabled" `Enable s + end + +let warn_printer fmt (modif, r) = + let c = + match modif with + | `Disable -> '-' + | `Enable -> '+' + | `Fatal -> '@' + | `Non_fatal -> '=' + in + Format.fprintf fmt "%c%s" c (Dolmen_loop.Report.T.mnemonic r) + +let c_warn = Arg.conv (warn_parser, warn_printer) +let c_warn_list = Arg.list c_warn + + (* Argument converter for integer with multiplier suffix *) (* ************************************************************************ *) @@ -157,12 +203,10 @@ let parse_time arg = let size_string f = let n = int_of_float f in let aux n div = n / div, n mod div in - let n_tera, n = aux n 1_000_000_000_000 in let n_giga, n = aux n 1_000_000_000 in let n_mega, n = aux n 1_000_000 in let n_kilo, n = aux n 1_000 in let print_aux s n = if n <> 0 then (string_of_int n) ^ s else "" in - (print_aux "To" n_tera) ^ (print_aux "Go" n_giga) ^ (print_aux "Mo" n_mega) ^ (print_aux "ko" n_kilo) ^ @@ -191,11 +235,147 @@ let parse_size arg = let c_time = parse_time, print_time let c_size = parse_size, print_size + +(* Location styles *) +(* ************************************************************************* *) + +let loc_style = + Arg.enum [ + "short", `Short; + "contextual", `Contextual; + ] + + +(* State creation *) +(* ************************************************************************* *) + +let gc_opts use_env + minor_heap_size major_heap_increment + space_overhead max_overhead allocation_policy = + if use_env then None + else begin + let default = Gc.get () in + Some { default with + minor_heap_size; major_heap_increment; + space_overhead; max_overhead; allocation_policy; + } + end + +let profiling_opts stats + memtrace_filename memtrace_sampling_rate = + Dolmen_std.Stats.enabled := stats; + match (memtrace_filename : _ option) with + | None -> `Ok () + | Some filename -> + if Memory_profiler.available then begin + Memory_profiler.start filename memtrace_sampling_rate; + `Ok () + end else begin + let msg = + "Memtrace is not available, try to install memtrace and recompile Dolmen." + in + `Error (false, msg) + end + +let reports_opts strict warn_modifiers = + let conf = Dolmen_loop.Report.Conf.mk ~default:Enabled in + let conf = + if not strict then conf + else Dolmen_loop.Report.Conf.fatal conf + (`Warning (Dolmen_loop.Report.Any_warn Dolmen_loop.Typer.almost_linear)) + in + let res = + List.fold_left (fun conf l -> + List.fold_left (fun conf -> function + | `Disable, w -> Dolmen_loop.Report.Conf.disable conf w + | `Enable, w -> Dolmen_loop.Report.Conf.enable conf w + | `Fatal, w -> Dolmen_loop.Report.Conf.fatal conf w + | `Non_fatal, w -> Dolmen_loop.Report.Conf.set_enabled conf w + ) conf l + ) conf warn_modifiers + in + `Ok res + +let split_input = function + | `Stdin -> + Sys.getcwd (), `Stdin + | `File f -> + Filename.dirname f, `File (Filename.basename f) + +let mk_run_state + () gc gc_opt bt colors + abort_on_bug + time_limit size_limit + input_lang input_mode input + header_check header_licenses + header_lang_version + type_check + debug loc_style max_warn reports + = + (* Side-effects *) + let () = Option.iter Gc.set gc_opt in + let () = set_color Unix.stdout Format.std_formatter colors in + let () = set_color Unix.stderr Format.err_formatter colors in + let () = if bt then Printexc.record_backtrace true in + let () = if gc then at_exit (fun () -> Gc.print_stat stdout;) in + let () = if abort_on_bug then Dolmen_loop.Code.abort Dolmen_loop.Code.bug in + (* State creation *) + let input_dir, input_source = split_input input in + let st : Loop.State.t = { + debug; loc_style; reports; + + max_warn; cur_warn = 0; + + time_limit; size_limit; + + input_dir; input_lang; + input_mode; input_source; + input_file_loc = Dolmen.Std.Loc.mk_file ""; + + header_check; header_licenses; header_lang_version; + header_state = Dolmen_loop.Headers.empty; + + type_check; + type_state = Dolmen_loop.Typer.new_state (); + + solve_state = (); + + export_lang = []; + } in + st + + +(* Profiling *) +(* ************************************************************************* *) + +let profiling_t = + let docs = profiling_section in + let stats = + let doc = "Enable statistics collecting and printing" in + Arg.(value & flag & info ["stats"] ~doc ~docs) + in + let memtrace_filename = + let doc = "Filename for the memory profiling trace" in + Arg.(value & opt (some string) None & info ["memtrace"] ~doc ~docs ~docv:"FILE") + in + let memtrace_sampling_rate = + let doc = "Sampling rate for the memory profiler" in + Arg.(value & opt float 1e-6 & info ["memtrace-rate"] ~doc ~docs ~docv:"RATE") + in + Term.(ret (const profiling_opts $ stats $ + memtrace_filename $ memtrace_sampling_rate)) + + (* Gc Options parsing *) (* ************************************************************************* *) let gc_t = let docs = gc_section in + let use_env = + let doc = "Use the gc settings from the OCAMLRUNPARAM env variable, \ + and ignore the dolmen settings on the cli." in + Arg.(value & opt bool false & info ["gc-env"] ~doc ~docs) + in let minor_heap_size = let doc = "Set Gc.minor_heap_size" in Arg.(value & opt int 1_000_000 & info ["gc-s"] ~doc ~docs) @@ -216,25 +396,53 @@ let gc_t = let doc = "Set Gc.allocation policy" in Arg.(value & opt int 0 & info ["gc-a"] ~doc ~docs) in - Term.((const gc_opts $ minor_heap_size $ major_heap_increment $ + Term.((const gc_opts $ use_env $ + minor_heap_size $ major_heap_increment $ space_overhead $ max_overhead $ allocation_policy)) -(* Main Options parsing *) +(* Warning controls *) +(* ************************************************************************* *) + +let reports = + let docs = error_section in + let warns = + let doc = "Change the status of a warning. Accepts a list of \ + comma-separated modifiers of the form @mnemonic, where \ + '@' is an (optional) modifier among '+' (default) to \ + enable a warning, '-' to disable a warning and '!' to \ + make the warning fatal, and 'mnemonic' is the short \ + (mnemonic) name of the warning." in + Arg.(value & opt_all c_warn_list [] & info ["w"; "warn"] ~docs ~doc) + in + let strict = + let doc = "Be strict or more lenient wrt to typing" in + Arg.(value & opt bool true & info ["strict"] ~doc ~docs:error_section) + in + Term.(ret (const reports_opts $ strict $ warns)) + + +(* State term *) (* ************************************************************************* *) let state = let docs = common_section in let gc = let doc = "Print statistics about the gc upon exiting" in - Arg.(value & flag & info ["g"; "gc"] ~doc ~docs) + Arg.(value & flag & info ["g"; "gc"] ~doc ~docs:gc_section) in let bt = let doc = "Enables printing of backtraces." in - Arg.(value & flag & info ["b"; "backtrace"] ~doc ~docs) + Arg.(value & flag & info ["b"; "backtrace"] ~doc ~docs:error_section) in let colors = let doc = "Activate coloring of output" in - Arg.(value & opt bool true & info ["color"] ~doc ~docs) + Arg.(value & opt color_conv Auto & info ["color"] ~doc ~docs) + in + let abort_on_bug = + let doc = Format.asprintf + "Abort instead of exiting properly when an internal bug + is detected (i.e. corresponds to an exit code of 125)." in + Arg.(value & flag & info ["abort-on-bug"] ~doc ~docs:error_section) in let time = let doc = "Stop the program after a time lapse of $(docv). @@ -246,19 +454,23 @@ let state = let doc = "Stop the program if it tries and use more the $(docv) memory space. " ^ "Accepts usual suffixes for sizes : k,M,G,T. " ^ "Without suffix, default to a size in octet." in - Arg.(value & opt c_size 1_000_000_000. & info ["s"; "size"] ~docv:"SIZE" ~doc ~docs) + Arg.(value & opt c_size 1_000_000_000. & + info ["s"; "size"] ~docv:"SIZE" ~doc ~docs) in let in_lang = let doc = Format.asprintf - "Set the input language to $(docv) (%s)." - (Arg.doc_alts_enum ~quoted:false Dolmen_loop.Logic.enum) in - Arg.(value & opt (some input_format_conv) None & info ["i"; "input"; "lang"] ~docv:"INPUT" ~doc ~docs) + "Set the input language to $(docv); must be %s." + (Arg.doc_alts_enum ~quoted:true Dolmen_loop.Logic.enum) in + Arg.(value & opt (some input_format_conv) None & + info ["i"; "input"; "lang"] ~docv:"INPUT" ~doc ~docs) in let in_mode = let doc = Format.asprintf - "Set the input mode. the full mode parses the entire file before iterating - over its contents whereas the incremental mode processes each delcaration - before parsing the next one. Default is incremental mode." in + "Set the input mode, must be %s. + The full mode parses the entire file before iterating over its + contents whereas the incremental mode processes each declaration + before parsing the next one. Default is incremental mode." + (Arg.doc_alts_enum ~quoted:true mode_list) in Arg.(value & opt (some mode_conv) None & info ["m"; "mode"] ~doc ~docs) in let input = @@ -269,27 +481,26 @@ let state = let header_check = let doc = "If true, then the presence of headers will be checked in the input file (and errors raised if they are not present)." in - Arg.(value & opt bool false & info ["check-headers"] ~doc ~docs) + Arg.(value & opt bool false & + info ["check-headers"] ~doc ~docs:header_section) in let header_licenses = let doc = "Set the allowed set of licenses in the headers. An empty list means allow everything." in - Arg.(value & opt (list string) [] & info ["header-licenses"] ~doc ~docs) + Arg.(value & opt (list string) [] & + info ["header-licenses"] ~doc ~docs:header_section) in let header_lang_version = let doc = "Set the only allowed language version for headers. If not set, all conforming version numbers are allowed." in - Arg.(value & opt (some string) None & info ["header-lang-version"] ~docs ~doc) + Arg.(value & opt (some string) None & + info ["header-lang-version"] ~doc ~docs:header_section) in let typing = let doc = "Decide whether to type-check input expressions. If false, only parsing is done. " in Arg.(value & opt bool true & info ["type"] ~doc ~docs) in - let strict = - let doc = "Be strict or more lenient wrt to typing" in - Arg.(value & opt bool true & info ["strict"] ~doc ~docs) - in (* let locs = let doc = "Whether to keep location information during typing. \ @@ -300,23 +511,58 @@ let state = *) let debug = let doc = Format.asprintf - "Print the parsed dolmen statement (after expansion of includes)" in + "Activate debug mode. Among other things, this will make dolmen \ + print every statement it parses, and every statement after type \ + checking, as well as activate unique id printing." in Arg.(value & flag & info ["debug"] ~docs ~doc) in - let context = + let loc_style = let doc = Format.asprintf - "Print the context / fragment of parsed AST with errors" in - Arg.(value & flag & info ["context"] ~docs ~doc) - in + "Control the way locations are printed for error and warnings messages. + $(b,short) only prints the location for the message, while + $(b,contextual) also displays the source code snippet corresponding + to the location of the message (except in some cases where the snippet + would be too long)." in + Arg.(value & opt loc_style `Contextual & info ["loc-style"] ~doc ~docs:error_section) + in let max_warn = let doc = Format.asprintf "Maximum number of warnings to display (excess warnings will be counted and a count of silenced warnings reported at the end)." in - Arg.(value & opt int max_int & info ["max-warn"] ~docs ~doc) + Arg.(value & opt int max_int & info ["max-warn"] ~doc ~docs:error_section) in - Term.(const mk_state $ gc $ gc_t $ bt $ colors $ + Term.(const mk_run_state $ profiling_t $ + gc $ gc_t $ bt $ colors $ abort_on_bug $ time $ size $ in_lang $ in_mode $ input $ header_check $ header_licenses $ header_lang_version $ - typing $ strict $ debug $ context $ max_warn) + typing $ debug $ loc_style $ max_warn $ reports) + + +(* List command term *) +(* ************************************************************************* *) +let cli = + let aux state list doc = + match list, doc with + | false, None -> + `Ok (Run { state; }) + | false, Some report -> + `Ok (Doc { report; conf = state.reports; }) + | true, None -> + `Ok (List_reports { conf = state.reports; }) + | true, Some _ -> + `Error (false, + "at most one of --list and --doc might be \ + present on the command line") + in + let list = + let doc = "List all reports (i.e. warnings and errors), + that dolmen can emit." in + Arg.(value & flag & info ["list"] ~doc) + in + let doc = + let doc = "The warning or error of which to show the documentation." in + Arg.(value & opt (some mnemonic_conv) None & info ["doc"] ~doc ~docv:"mnemonic") + in + Term.(ret (const aux $ state $ list $ doc)) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/classes/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/classes/dune index bf0895106ae86198c88cba80cad638736de3d8a2..d559fe47ab8ff2b17e083d01223872909735b0cc 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/classes/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/classes/dune @@ -2,13 +2,14 @@ (library (name dolmen_class) (public_name dolmen.class) + (instrumentation (backend bisect_ppx)) (libraries ; dolmen_intf dolmen_std ; Languages without versions dolmen_ae dolmen_dimacs dolmen_icnf dolmen_zf ; Smtlib2 & versions - dolmen_smtlib2 dolmen_smtlib2_v6 + dolmen_smtlib2 dolmen_smtlib2_v6 dolmen_smtlib2_poly ; TPTP & versions dolmen_tptp dolmen_tptp_v6_3_0 ; Menhir deps diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/classes/logic.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/classes/logic.ml index bc119b16e305c09dd2923025ccda2ec2c2035b98..daba8b83a13575cb37a37a2eebdffe3a9de014b0 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/classes/logic.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/classes/logic.ml @@ -3,6 +3,7 @@ module type S = sig + type file type statement exception Extension_not_found of string @@ -22,18 +23,20 @@ module type S = sig ?dir:string -> string -> string option val parse_file : ?language:language -> - string -> language * statement list + string -> language * file * statement list val parse_file_lazy : ?language:language -> - string -> language * statement list Lazy.t - + string -> language * file * statement list Lazy.t val parse_input : ?language:language -> [< `File of string | `Stdin of language | `Raw of string * language * string ] -> - language * (unit -> statement option) * (unit -> unit) + language * file * (unit -> statement option) * (unit -> unit) + + module type S = Dolmen_intf.Language.S + with type statement := statement + and type file := file - module type S = Dolmen_intf.Language.S with type statement := statement val of_language : language -> language * string * (module S) val of_extension : string -> language * string * (module S) val of_filename : string -> language * string * (module S) @@ -51,7 +54,9 @@ module Make exception Extension_not_found of string - module type S = Dolmen_intf.Language.S with type statement := S.t + module type S = Dolmen_intf.Language.S + with type statement := S.t + and type file := L.file type language = | Alt_ergo @@ -67,6 +72,7 @@ module Make "iCNF", ICNF; "smt2", Smtlib2 `Latest; "smt2.6", Smtlib2 `V2_6; + "psmt2", Smtlib2 `Poly; "tptp", Tptp `Latest; "tptp-6.3.0", Tptp `V6_3_0; "zf", Zf; @@ -91,9 +97,11 @@ module Make (* Smtlib2 *) Smtlib2 `Latest, ".smt2", - (module Dolmen_smtlib2.Latest.Make(L)(I)(T)(S) : S); + (module Dolmen_smtlib2.Poly.Make(L)(I)(T)(S) : S); Smtlib2 `V2_6, ".smt2", - (module Dolmen_smtlib2.V2_6.Make(L)(I)(T)(S) : S); + (module Dolmen_smtlib2.Poly.Make(L)(I)(T)(S) : S); + Smtlib2 `Poly, ".psmt2", + (module Dolmen_smtlib2.Poly.Make(L)(I)(T)(S) : S); (* TPTP *) Tptp `Latest, ".p", @@ -136,7 +144,8 @@ module Make | None -> of_filename file | Some l -> of_language l in - l, P.parse_file file + let locfile, res = P.parse_file file in + l, locfile, res let parse_file_lazy ?language file = let l, _, (module P : S) = @@ -144,7 +153,8 @@ module Make | None -> of_filename file | Some l -> of_language l in - l, lazy (P.parse_file file) + let locfile, res = P.parse_file_lazy file in + l, locfile, res let parse_input ?language = function | `File file -> @@ -153,18 +163,18 @@ module Make | Some l -> of_language l | None -> of_extension (Dolmen_std.Misc.get_extension file) in - let gen, cl = P.parse_input (`File file) in - l, gen, cl + let locfile, gen, cl = P.parse_input (`File file) in + l, locfile, gen, cl | `Stdin l -> - let _, _, (module P : S) = of_language + let l, _, (module P : S) = of_language (match language with | Some l' -> l' | None -> l) in - let gen, cl = P.parse_input `Stdin in - l, gen, cl + let locfile, gen, cl = P.parse_input `Stdin in + l, locfile, gen, cl | `Raw (filename, l, s) -> let _, _, (module P : S) = of_language (match language with | Some l' -> l' | None -> l) in - let gen, cl = P.parse_input (`Contents (filename, s)) in - l, gen, cl + let locfile, gen, cl = P.parse_input (`Contents (filename, s)) in + l, locfile, gen, cl end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/classes/logic.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/classes/logic.mli index 58dc407bf9724ebd7a5ecdfd56351b051440d070..7af7c630a547db86be7b3ddee7ec51ac01881dfd 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/classes/logic.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/classes/logic.mli @@ -5,6 +5,9 @@ module type S = sig + type file + (** File location meta-data. *) + type statement (** The type of statements. *) @@ -45,14 +48,14 @@ module type S = sig val parse_file : ?language:language -> - string -> language * statement list + string -> language * file * statement list (** Given a filename, parse the file, and return the detected language together with the list of statements parsed. @param language specify a language; overrides auto-detection. *) val parse_file_lazy : ?language:language -> - string -> language * statement list Lazy.t + string -> language * file * statement list Lazy.t (** Given a filename, parse the file, and return the detected language together with the list of statements parsed. @param language specify a language; overrides auto-detection. *) @@ -62,7 +65,7 @@ module type S = sig [< `File of string | `Stdin of language | `Raw of string * language * string ] -> - language * (unit -> statement option) * (unit -> unit) + language * file * (unit -> statement option) * (unit -> unit) (** Incremental parsing of either a file (see {!parse_file}), stdin (with given language), or some arbitrary contents, of the form [`Raw (filename, language, contents)]. @@ -74,7 +77,9 @@ module type S = sig (** {2 Mid-level parsing} *) - module type S = Dolmen_intf.Language.S with type statement := statement + module type S = Dolmen_intf.Language.S + with type statement := statement + and type file := file (** The type of language modules. *) val of_language : language -> language * string * (module S) @@ -100,5 +105,5 @@ module Make (S : Dolmen_intf.Stmt.Logic with type location := L.t and type id := I.t and type term := T.t) - : S with type statement := S.t + : S with type statement := S.t and type file := L.file diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/dune index cd84ad1990db93f3188aa1251cc5328be1145703..36c383065eafc23d315b83bdf94015a018818564 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/dune @@ -1,7 +1,8 @@ (library (name dolmen_intf) (public_name dolmen.intf) + (instrumentation (backend bisect_ppx)) (libraries menhirLib) - (modules Msg Tok Lex Parse Location + (modules Map Msg Tok Lex Parse Location Id Tag Ty Term Stmt Language) ) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/id.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/id.ml index 2a63943be1eced1e81727a08a489bb70708b2cbf..a742a2d99e963f182ee92030d015da7b9f9e0ef9 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/id.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/id.ml @@ -41,12 +41,16 @@ module type Logic = sig val track : namespace (** Namespace used to tag and identify sub-terms occuring in files. *) - val mod_name : string -> namespace - (** Namespace used by modules (for instance in dedulkti). *) - val mk : namespace -> string -> t (** Make an identifier from its namespace and name. *) + val indexed : namespace -> string -> string list -> t + (** Make an indexed identifier from a namespace, basename and list of indexes. *) + + val qualified : namespace -> string list -> string -> t + (** Make a qualified identifier from a namespace, a list of modules (a path), + and a base name. *) + val tracked : track:t -> namespace -> string -> t (** An identifier with an additional name for tracking purposes. *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/language.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/language.ml index 9eb0e45bbe60f216db48d4c02202b51d8734dcac..622b9e85288d6046528e6d7622f0494c503dfd1a 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/language.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/language.ml @@ -5,6 +5,9 @@ module type S = sig + type file + (** Meta-data about locations in files. *) + type token (** The type of tokens produced by the language lexer. *) @@ -25,13 +28,17 @@ module type S = sig Separates directory and file because most include directives in languages are relative to the directory of the original file being processed. *) - val parse_file : string -> statement list + val parse_file : string -> file * statement list + (** Parse the given file. + @param dir: optional directory to use if the file path is relative. *) + + val parse_file_lazy : string -> file * statement list Lazy.t (** Parse the given file. @param dir: optional directory to use if the file path is relative. *) val parse_input : [ `Stdin | `File of string | `Contents of string * string ] -> - (unit -> statement option) * (unit -> unit) + file * (unit -> statement option) * (unit -> unit) (** Incremental parsing. Given an input to read (either a file, stdin, or some contents of the form [(filename, s)] where [s] is the contents to parse), returns a generator that will incrementally parse the statements, diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/location.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/location.ml index 8273ec8e4d4530417640acbbcc9cdb60a156c929..6363fa1cdcc34152a54d276ee9fb3967613faf08 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/location.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/location.ml @@ -14,14 +14,27 @@ module type S = sig type t (** The type of locations. *) - exception Uncaught of t * exn + type file + (** A store for various meta-data about an input file, + can be used to optimize representation of locations. *) + + exception Uncaught of t * exn * Printexc.raw_backtrace (** The exception to be raised whenever an unexpected exception is raised during parsing. *) exception Lexing_error of t * string (** The exception to be raised when the lexer cannot parse the input. *) - exception Syntax_error of t * Msg.t - (** The exception to be raised whenever a syntax error is encountered by the parser. *) + exception Syntax_error of t * [ + | `Regular of Msg.t + | `Advanced of Msg.t * Msg.t * Msg.t + ] + (** [Syntax_error (loc, msg)] denotes a syntax error at the given location. + In the [`Advanced (prod, parsed, expected)] case, + - prod is a delayed message to print in order to identify which + production/syntax construction the parser was trying to reduce, + - parsed is a description of the token which raised the error, + - expected is a messages describing what would have been corect + tokens/inputs at that point. *) val of_lexbuf : Lexing.lexbuf -> t (** Make a position using a lexbuf directly. *) @@ -29,9 +42,16 @@ module type S = sig val mk_pos : Lexing.position -> Lexing.position -> t (** Make a position from two lewing positions. *) - val newline : string -> (Lexing.lexbuf -> unit) - (** A function first given the name of the file, and which should return a - closure that will be called on each new_line. *) + val mk_file : string -> file + (** Create meta-data for a given filename. *) + + val newline : file -> Lexing.lexbuf -> unit + (** Offer a way for the file meta-data to store the current location + of the lexbuf as the start of a new line. *) + + val update_size : file -> Lexing.lexbuf -> unit + (** Update the file meta-data to store the maximum offset currently seen + for this file. *) end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/map.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/map.ml new file mode 100644 index 0000000000000000000000000000000000000000..7a1ea715f470964712fc2683cf26ca0f9205c33a --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/map.ml @@ -0,0 +1,30 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +module type S = sig + + type key + type 'a t + + val empty : _ t + + val find_exn : key -> 'a t -> 'a + (** Exception-raising find function. + @raise Not_found *) + + val find_opt : key -> 'a t -> 'a option + (** Option-returning find function. *) + + val add : key -> 'a -> 'a t -> 'a t + (** Add a new binding, shadowing any earlier bdingin to the same key. *) + + val find_add : key -> ('a option -> 'a) -> 'a t -> 'a t + (** Update the value bound to a key. *) + + val iter : (key -> 'a -> unit) -> 'a t -> unit + (** Iter on the map. *) + + val fold : (key -> 'a -> 'acc -> 'acc) -> 'a t -> 'acc -> 'acc + (** Fold on the map. *) + +end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/tag.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/tag.ml index 6676a907ee6514e8fafe19a674e133b90f635ce5..55085b1c4a3acabe05fcc347cda5e7c6b6da2fe3 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/tag.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/tag.ml @@ -10,6 +10,30 @@ module type S = sig type 'a t (** Polymorphic tags *) + val create : unit -> 'a t + (** Create a new tag. *) + +end + +(** Minium required signature for tags to typecheck Alt-Ergo's core/base theory. *) +module type Ae_Base = sig + + type term + (** The type of terms *) + + type 'a t + (** Polymorphic tags *) + + val ac : unit t + (** A flag (i.e. unit tag), indicating that the tagged term/formula + is to be considered as a associative and commutative term. *) + + val triggers : term list list t + (** Multi-triggers that can be added to quantified formulas *) + + val filters : term list t + (** Filters that can be added to quantified formulas *) + end (** Minium required signature for tags to typecheck smtlib's core/base theory. *) @@ -25,7 +49,7 @@ module type Smtlib_Base = sig (** A tag used to named formulas in smtlib. Should correspond to the `:named` attribute. *) - val triggers : term list t + val triggers : term list list t (** Multi-triggers (typically annotated on the body of a quantified formula and not the quantified formula itself). *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/term.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/term.ml index 0d238bc67e63a35b9d29e4fb5ca55f223fd66123..0c8acabc8066930cccc38db11d2b4bb420def716 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/term.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/term.ml @@ -91,6 +91,8 @@ module type Logic = sig val equiv_t : ?loc:location -> unit -> t val implied_t : ?loc:location -> unit -> t val implies_t : ?loc:location -> unit -> t + val pi_t : ?loc:location -> unit -> t + val sigma_t : ?loc:location -> unit -> t (** Standard logical connectives viewed as terms. [implies_t] is usual right implication, i.e [apply implies_t \[p; q\] ] is "p implies q", while [apply implied_t \[p; q \]] means "p is implied by q" or @@ -177,7 +179,9 @@ module type Logic = sig of a pattern and a match branch. *) val pi : ?loc:location -> t list -> t -> t + val par : ?loc:location -> t list -> t -> t val letin : ?loc:location -> t list -> t -> t + val letand : ?loc:location -> t list -> t -> t val forall : ?loc:location -> t list -> t -> t val exists : ?loc:location -> t list -> t -> t val lambda : ?loc:location -> t list -> t -> t @@ -189,8 +193,12 @@ module type Logic = sig - Pi is the polymorphic type quantification, for instance the polymorphic identity function has type: "Pi alpha. alpha -> alpha" - Letin is local binding, takes a list of equality of equivalences - whose left hand-side is a variable. + whose left hand-side is a variable. Letand is the parrallel version + of Letin. - Forall is universal quantification + - Par is universal quantification over type variables specifically + (i.e. the same as forall, but only for a list of type variables, + which thus may omit the [colon] annotations in the arguments). - Exists is existential quantification - Lambda is used for function construction - Choice is the choice operator, also called indefinite description, or @@ -354,6 +362,9 @@ module type Tff = sig type t (** The type of terms and term variables. *) + type path + (** The type of patsh to constants. *) + type ty type ty_var type ty_const @@ -380,6 +391,15 @@ module type Tff = sig val ty : t -> ty (** Return the type of the variable. *) + val get_tag : t -> 'a tag -> 'a option + (** Return the value bound to a tag (if any). *) + + val set_tag : t -> 'a tag -> 'a -> unit + (** Set the value bound to the tag. *) + + val unset_tag : t -> _ tag -> unit + (** Remove the binding to the given tag. *) + end (** A module for constant symbols that occur in terms. *) @@ -394,12 +414,15 @@ module type Tff = sig val arity : t -> int * int (** Returns the arity of a term constant. *) - val mk : string -> ty_var list -> ty list -> ty -> t - (** Create a polymorphic constant symbol. *) + val mk : path -> ty -> t + (** Create a constant symbol. *) - val tag : t -> 'a tag -> 'a -> unit + val set_tag : t -> 'a tag -> 'a -> unit (** Tag a constant. *) + val add_tag : t -> 'a list tag -> 'a -> unit + (** Add a value to the list of values bound to a tag. *) + end (** A module for Algebraic datatype constructors. *) @@ -437,7 +460,7 @@ module type Tff = sig val define_adt : ty_const -> ty_var list -> - (string * (ty * string option) list) list -> + (path * (ty * path option) list) list -> (Cstr.t * (ty * Const.t option) list) list (** [define_aft t vars cstrs] defines the type constant [t], parametrised over the type variables [ty_vars] as defining an algebraic datatypes with constructors @@ -465,7 +488,7 @@ module type Tff = sig *) val define_record : - ty_const -> ty_var list -> (string * ty) list -> Field.t list + ty_const -> ty_var list -> (path * ty) list -> Field.t list (** Define a (previously abstract) type to be a record type, with the given fields. *) exception Wrong_type of t * ty @@ -488,14 +511,22 @@ module type Tff = sig exception Field_missing of Field.t (** Field missing in a record expression. *) + exception Over_application of t list + (** Raised when an application was provided too many term arguments. The + extraneous arguments are returned by the exception. *) + + exception Bad_poly_arity of ty_var list * ty list + (** Raised when a polymorphic application does not have an + adequate number of arguments. *) + val ensure : t -> ty -> t (** Ensure that a given term has the given type. *) val of_var : Var.t -> t (** Create a term from a variable *) - val apply : Const.t -> ty list -> t list -> t - (** Polymorphic application. *) + val apply_cst : Const.t -> ty list -> t list -> t + (** Polymorphic application of a constant. *) val apply_cstr : Cstr.t -> ty list -> t list -> t (** Polymorphic application of a constructor. *) @@ -509,55 +540,18 @@ module type Tff = sig val record_with : t -> (Field.t * t) list -> t (** Create an updated record *) - val _true : t - val _false : t - (** Some usual formulas. *) - - val eq : t -> t -> t - (** Build the equality of two terms. *) - - val distinct : t list -> t - (** Distinct constraints on terms. *) - - val neg : t -> t - (** Negation. *) - val _and : t list -> t (** Conjunction of formulas *) - val _or : t list -> t - (** Disjunction of formulas *) - - val nand : t -> t -> t - (** Not-and *) - val nor : t -> t -> t - (** Not-or *) + val lam : ty_var list * Var.t list -> t -> t + (** Create a local function. *) - val imply : t -> t -> t - (** Implication *) + val all : ty_var list * Var.t list -> t -> t + (** Universally quantify the given formula over the type and terms variables. *) - val equiv : t -> t -> t - (** Equivalence *) - - val xor : t -> t -> t - (** Exclusive disjunction. *) - - val all : - ty_var list * Var.t list -> - ty_var list * Var.t list -> - t -> t - (** Universally quantify the given formula over the type and terms variables. - The first pair of arguments are the variables that are free in the resulting - quantified formula, and the second pair are the variables bound. *) - - val ex : - ty_var list * Var.t list -> - ty_var list * Var.t list -> - t -> t - (** Existencially quantify the given formula over the type and terms variables. - The first pair of arguments are the variables that are free in the resulting - quantified formula, and the second pair are the variables bound. *) + val ex : ty_var list * Var.t list -> t -> t + (** Existencially quantify the given formula over the type and terms variables. *) val bind : Var.t -> t -> t (** Bind a variable to an expressions. This function is called when typing @@ -566,25 +560,48 @@ module type Tff = sig let-binding being typed. *) val letin : (Var.t * t) list -> t -> t - (** Create a let-binding. This function is called after the body of the - let-binding has been typed. *) + (** Create a sequential let-binding. *) + + val letand : (Var.t * t) list -> t -> t + (** Create a parrallel let-binding. *) val pattern_match : t -> (t * t) list -> t (** [pattern_match scrutinee branches] creates a pattern match expression on the scrutinee with the given branches, each of the form [(pattern, body)] *) - val ite : t -> t -> t -> t - (** [ite condition then_t else_t] creates a conditional branch. *) - - val tag : t -> 'a tag -> 'a -> unit + val set_tag : t -> 'a tag -> 'a -> unit (** Annotate the given formula wiht the tag and value. *) + val add_tag : t -> 'a list tag -> 'a -> unit + (** Add a value to the list of values bound to a tag. *) + val fv : t -> ty_var list * Var.t list (** Returns the list of free variables in the formula. *) end +module type Thf = sig + + include Tff + + val apply : t -> ty list -> t list -> t + (** Polymorphic application. *) + +end + +(** Minimum required to type dimacs *) +module type Dimacs = sig + + type t + (** The type of terms *) + + val neg : t -> t + (** Logical negation. *) + +end + + (** Minimum required to type ae's tff *) module type Ae_Base = sig @@ -594,25 +611,154 @@ module type Ae_Base = sig val void : t (** The only value of type unit. *) + val eq : t -> t -> t + (** Build the equality of two terms. *) + + val _true : t + (** The smybol for [true] *) + + val _false : t + (** The symbol for [false] *) + + val neg : t -> t + (** Negation. *) + + val _or : t list -> t + (** Disjunction of formulas *) + + val _and : t list -> t + (** Disjunction of formulas *) + + val imply : t -> t -> t + (** Implication *) + + val equiv : t -> t -> t + (** Equivalence *) + + val xor : t -> t -> t + (** Exclusive disjunction. *) + + val ite : t -> t -> t -> t + (** [ite condition then_t else_t] creates a conditional branch. *) + + val distinct : t list -> t + (** Distinct constraints on terms. *) + end +module type Ae_Arith_Common = sig + + type t + (** The type of terms *) + + val minus : t -> t + (** Arithmetic unary minus/negation. *) + + val add : t -> t -> t + (** Arithmetic addition. *) + + val sub : t -> t -> t + (** Arithmetic substraction *) + + val mul : t -> t -> t + (** Arithmetic multiplication *) + + val pow : t -> t -> t + (** Arithmetic exponentiation *) + + val lt : t -> t -> t + (** Arithmetic "less than" comparison. *) + + val le : t -> t -> t + (** Arithmetic "less or equal" comparison. *) + + val gt : t -> t -> t + (** Arithmetic "greater than" comparison. *) + + val ge : t -> t -> t + (** Arithmetic "greater or equal" comparison. *) + +end (** Minimum required to type ae's arith *) module type Ae_Arith = sig type t - (** The type of terms *) + (** The type of terms. *) type ty (** The type of types. *) val ty : t -> ty - (** Type of a term. *) + (** Get the type of a term. *) + + val int : string -> t + (** Integer literals *) + + val real : string -> t + (** Real literals *) + + module Int : sig + include Ae_Arith_Common with type t := t + + val div_e : t -> t -> t + (** Euclidian division quotient *) + + val rem_e : t -> t -> t + (** Euclidian division remainder *) + + val to_real : t -> t + (** Conversion from an integer term to a real term. *) + + end + + module Real : sig + include Ae_Arith_Common with type t := t + + val div : t -> t -> t + (** Exact division on reals. *) + + end + +end + +module type Ae_Array = sig + + type t + (** The type of terms *) + + val select : t -> t -> t + (** [select arr idx] creates the get operation on functionnal + array [arr] for index [idx]. *) + + val store : t -> t -> t -> t + (** [store arr idx value] creates the set operation on + functional array [arr] for value [value] at index [idx]. *) + +end + +(** Minimum required to type ae's bitvectors *) +module type Ae_Bitv = sig + + type t + (** The type of terms *) + + val mk : string -> t + (** Create a bitvector litteral from a string representation. + The string should only contain characters '0' or '1'. *) + + val concat : t -> t -> t + (** Bitvector concatenation. *) + + val extract : int -> int -> t -> t + (** Bitvector extraction, using in that order, + the start and then end the position of the + bitvector to extract. *) end (** Minimum required to type tptp's tff *) -module type Tptp_Base = sig +module type Tptp_Tff_Core = sig type t (** The type of terms *) @@ -623,10 +769,124 @@ module type Tptp_Base = sig val _false : t (** The symbol for [false] *) + val neg : t -> t + (** Negation. *) + + val _or : t list -> t + (** Disjunction of formulas *) + + val _and : t list -> t + (** Conjunction of formulas *) + + val nand : t -> t -> t + (** Not-and *) + + val nor : t -> t -> t + (** Not-or *) + + val imply : t -> t -> t + (** Implication *) + + val implied : t -> t -> t + (** Implication *) + + val equiv : t -> t -> t + (** Equivalence *) + + val xor : t -> t -> t + (** Exclusive disjunction. *) + + val ite : t -> t -> t -> t + (** [ite condition then_t else_t] creates a conditional branch. *) + + val eq : t -> t -> t + (** Build the equality of two terms. *) + + val neq : t -> t -> t + (** Disequality. *) + + val distinct : t list -> t + (** Distinct constraints on terms. *) + +end + +module type Tptp_Thf_Core_Const = sig + + type t + (** Type for term constans *) + + val _true : t + (** The smybol for [true] *) + + val _false : t + (** The symbol for [false] *) + + val neg : t + (** Negation. *) + + val or_ : t + (** Binary disjunction of formulas *) + + val and_ : t + (** Binary conjunction of formulas *) + + val nand : t + (** Not-and *) + + val nor : t + (** Not-or *) + + val imply : t + (** Implication *) + + val implied : t + (** Reverse implication *) + + val equiv : t + (** Equivalence *) + + val xor : t + (** Exclusive disjunction. *) + + val ite : t + (** [ite condition then_t else_t] creates a conditional branch. *) + + val eq : t + (** Build the equality of two terms. *) + + val neq : t + (** Binary disequality. *) + + val pi : t + (** Higher-order encoding of universla quantification. *) + + val sigma : t + (** Higher-order encoding of existancial quantification. *) + + end + +(** Minimum required to type tptp's thf *) +module type Tptp_Thf_Core = sig + + type t + (** The type of terms *) + + type ty + (** The type of types *) + + module Const : Tptp_Thf_Core_Const + (** Constants *) + + val of_cst : Const.t -> t + (** Create a term out of aconstant. *) + + val distinct : t list -> t + (** Distinct constraints on terms. *) + end (** Common signature for tptp arithmetics *) -module type Tptp_Arith_Common = sig +module type Tptp_Tff_Arith_Common = sig type t (** The type of terms *) @@ -703,7 +963,7 @@ module type Tptp_Arith_Common = sig end (** Signature required by terms for typing tptp arithmetic. *) -module type Tptp_Arith = sig +module type Tptp_Tff_Arith = sig type t (** The type of terms. *) @@ -724,18 +984,18 @@ module type Tptp_Arith = sig (** Real literals *) module Int : sig - include Tptp_Arith_Common with type t := t + include Tptp_Tff_Arith_Common with type t := t end module Rat : sig - include Tptp_Arith_Common with type t := t + include Tptp_Tff_Arith_Common with type t := t val div : t -> t -> t (** Exact division on rationals. *) end module Real : sig - include Tptp_Arith_Common with type t := t + include Tptp_Tff_Arith_Common with type t := t val div : t -> t -> t (** Exact division on reals. *) @@ -752,13 +1012,49 @@ module type Smtlib_Base = sig type cstr (** The type of ADT constructor *) - val eqs : t list -> t - (** Create a chain of equalities. *) - val cstr_tester : cstr -> t -> t (** Given a constructor [c] and a term [t], returns a terms that evaluates to [true] iff [t] has [c] as head constructor. *) + val _true : t + (** The smybol for [true] *) + + val _false : t + (** The symbol for [false] *) + + val neg : t -> t + (** Negation. *) + + val _or : t list -> t + (** Disjunction of formulas *) + + val _and : t list -> t + (** Disjunction of formulas *) + + val nand : t -> t -> t + (** Not-and *) + + val nor : t -> t -> t + (** Not-or *) + + val imply : t -> t -> t + (** Implication *) + + val equiv : t -> t -> t + (** Equivalence *) + + val xor : t -> t -> t + (** Exclusive disjunction. *) + + val ite : t -> t -> t -> t + (** [ite condition then_t else_t] creates a conditional branch. *) + + val eq : t -> t -> t + (** Create a chain of equalities. *) + + val distinct : t list -> t + (** Distinct constraints on terms. *) + end (** Common signature for first-order arithmetic *) @@ -809,7 +1105,7 @@ module type Smtlib_Int = sig (** Euclidian division. See Smtlib theory for a full description. *) val rem : t -> t -> t - (** Euclidiane integer remainder See Smtlib theory for a full description. *) + (** Euclidian integer remainder See Smtlib theory for a full description. *) val abs : t -> t (** Arithmetic absolute value. *) @@ -858,9 +1154,8 @@ module type Smtlib_Real_Int = sig val is_int : t -> t (** Arithmetic predicate, true on reals that are also integers. *) - val to_int : t -> t - (** Partial function from real to integers. Only has defined semantics - when {!is_int} is true. *) + val floor_to_int : t -> t + (** Greatest integer smaller than the given real *) end @@ -1367,3 +1662,75 @@ module type Smtlib_String = sig end +module type Zf_Base = sig + + type t + (** The type of terms *) + + val _true : t + (** The smybol for [true] *) + + val _false : t + (** The symbol for [false] *) + + val neg : t -> t + (** Negation. *) + + val _or : t list -> t + (** Disjunction of formulas *) + + val _and : t list -> t + (** Conjunction of formulas *) + + val imply : t -> t -> t + (** Logical Implication. *) + + val equiv : t -> t -> t + (** Logical Equivalence. *) + + val eq : t -> t -> t + (** Build the equality of two terms. *) + + val neq : t -> t -> t + (** Disequality. *) + + val ite : t -> t -> t -> t + (** If-then-else *) + +end + +module type Zf_Arith = sig + + type t + (** The type of terms *) + + val int : string -> t + (** Integer literals *) + + module Int : sig + val minus : t -> t + (** Arithmetic unary minus/negation. *) + + val add : t -> t -> t + (** Arithmetic addition. *) + + val sub : t -> t -> t + (** Arithmetic substraction *) + + val mul : t -> t -> t + (** Arithmetic multiplication *) + + val lt : t -> t -> t + (** Arithmetic "less than" comparison. *) + + val le : t -> t -> t + (** Arithmetic "less or equal" comparison. *) + + val gt : t -> t -> t + (** Arithmetic "greater than" comparison. *) + + val ge : t -> t -> t + (** Arithmetic "greater or equal" comparison. *) + end + +end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/ty.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/ty.ml index 78980da029e5e7baa1e11865da934f8bf039548b..5750d46e9d1dfc093b421920e9b2e1b479e135bc 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/ty.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/interface/ty.ml @@ -15,9 +15,20 @@ module type Tff = sig type t (** The type of types. *) + type path + (** The type of paths to constants. *) + type 'a tag (** A type for tags to attach to arbitrary types. *) + val print : Format.formatter -> t -> unit + (** Printing function. *) + + exception Prenex_polymorphism of t + (** Raised when the type provided is polymorphic, but occurred in a + place where polymorphic types are forbidden by prenex/rank-1 + polymorphism. *) + (** A module for variables that occur in types. *) module Var : sig @@ -27,9 +38,33 @@ module type Tff = sig val compare : t -> t -> int (** Comparison function on variables. *) + val print : Format.formatter -> t -> unit + (** Printing function. *) + val mk : string -> t (** Create a new type variable with the given name. *) + val wildcard : unit -> t + (** Create a fresh type wildcard. *) + + val is_wildcard : t -> bool + (** Is the variable a type wildcard ? *) + + val set_tag : t -> 'a tag -> 'a -> unit + (** Set the value bound to a tag. *) + + val get_tag : t -> 'a tag -> 'a option + (** Return the value bound to a tag (if any). *) + + val add_tag : t -> 'a list tag -> 'a -> unit + (** Add a value to the list of values bound to a tag. *) + + val get_tag_list : t -> 'a list tag -> 'a list + (** Returns all the values tagged on a variable. *) + + val unset_tag : t -> _ tag -> unit + (** Remove the binding to a tag. *) + end (** A module for constant symbols the occur in types. *) @@ -41,17 +76,26 @@ module type Tff = sig val compare : t -> t -> int (** Comparison function on type constants. *) + val print : Format.formatter -> t -> unit + (** Printing function. *) + val arity : t -> int (** Return the arity of the given symbol. *) - val mk : string -> int -> t + val mk : path -> int -> t (** Create a type constant with the given arity. *) - val tag : t -> 'a tag -> 'a -> unit - (** Tag a variable. *) + val set_tag : t -> 'a tag -> 'a -> unit + (** Set the value bound to a tag. *) + + val add_tag : t -> 'a list tag -> 'a -> unit + (** Add a value to the list of values bound to a tag. *) end + val equal : t -> t -> bool + (** Test equality of types. *) + val prop : t (** The type of propositions *) @@ -61,12 +105,49 @@ module type Tff = sig val apply : Const.t -> t list -> t (** Application for types. *) - val wildcard : unit -> t - (** Create a fresh type wildcard. *) + val arrow : t list -> t -> t + (** Create an arrow type. *) + + val pi : Var.t list -> t -> t + (** Create a polymorphic type. *) + + val fv : t -> Var.t list + (** Returns the list of free_variables in the type. *) + + val set_wildcard : Var.t -> t -> unit + (** Set a wildcard. *) - val tag : t -> 'a tag -> 'a -> unit + val add_wildcard_hook : hook:(Var.t -> t -> unit) -> Var.t -> unit + (** Add a hook to a wildcard, the hook will be run *) + + val set_tag : t -> 'a tag -> 'a -> unit (** Annotate the given type with the given tag and value. *) + val add_tag : t -> 'a list tag -> 'a -> unit + (** Add a value to the list of values bound to a tag. *) + + type view = private [> + | `Wildcard of Var.t + | `Arrow of t list * t + | `Pi of Var.t list * t + ] + (** Partial views for types. *) + + val view : t -> view + (** Partial view of a type. *) + +end + +module type Thf = sig + + include Tff + + val arrow : t list -> t -> t + (** Create a function type. *) + + val pi : Var.t list -> t -> t + (** Create a rank-1/prenex polymorphc type. *) + end (** Signature required by types for typing ae *) @@ -83,15 +164,52 @@ module type Ae_Base = sig end - -(** Signature required by types for typing tptp *) +(** Signature required by types for typing ae's arithmetic *) module type Ae_Arith = sig type t - (** The type of types. *) + (** The type of types *) + + val int : t + (** The type of integers *) + + val real : t + (** The type of reals *) + + type view = private [> + | `Int + | `Real + ] + (** Partial view for types. *) + + val view : t -> view + (** Partial view of a type. *) + +end + +(** Signature required by types for typing ae arrays *) +module type Ae_Array = sig + + type t + (** The type of types *) val int : t - (** The type of integers. *) + (** The type of integers, used as a default type of indexes + when no type is provided *) + + val array : t -> t -> t + (** The type of functionnal arrays from one type to another. *) + +end + +(** Signature required by types for typing ae's bitvectors *) +module type Ae_Bitv = sig + + type t + (** The type of types *) + + val bitv : int -> t + (** Create a fixed size bitvector type. *) end @@ -164,7 +282,7 @@ module type Smtlib_Real = sig end -(** Signature required for types for typing smtlib real_int arithmetic. *) +(** Signature required by types for typing smtlib real_int arithmetic. *) module type Smtlib_Real_Int = sig include Smtlib_Int @@ -183,7 +301,7 @@ module type Smtlib_Real_Int = sig end -(** Signature required for types for typing smtlib arrays *) +(** Signature required by types for typing smtlib arrays *) module type Smtlib_Array = sig type t @@ -207,7 +325,7 @@ module type Smtlib_Array = sig end -(** Signature required for types for typing smtlib bitvectors *) +(** Signature required by types for typing smtlib bitvectors *) module type Smtlib_Bitv = sig type t @@ -218,7 +336,7 @@ module type Smtlib_Bitv = sig end -(** Signature required for types for typing smtlib bitvectors *) +(** Signature required by types for typing smtlib bitvectors *) module type Smtlib_Float = sig type t @@ -247,7 +365,7 @@ module type Smtlib_Float = sig end -(* Signature required for types for typing the smtlib string theory *) +(* Signature required by types for typing the smtlib string theory *) module type Smtlib_String = sig type t @@ -263,3 +381,28 @@ module type Smtlib_String = sig (** The type of regular languages over strings *) end + +(** Signature required by types for typing tptp *) +module type Zf_Base = sig + + type t + (** The type of types *) + + val prop : t + (** The type of propositions. *) + +end + +(** Signature required by types for typing tptp *) +module type Zf_Arith = sig + + type t + (** The type of types *) + + val int : t + (** The type of integers *) + +end + + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/ast.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/ast.ml new file mode 100644 index 0000000000000000000000000000000000000000..af816018775ff58e6b8fbcfaba60c762ff431903 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/ast.ml @@ -0,0 +1,224 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +module type Id = sig + + type t + (** The type of identifiers *) + + type namespace + (** The type for namespaces. *) + + val var : namespace + (** Used for type variables. *) + + val term : namespace + (** Usual namespace, used for temrs, types and propositions. *) + + val decl : namespace + (** Names used to refer to tptp phrases. These are used + in declarations and include statement. *) + + val track : namespace + (** Namespace used to tag and identify sub-terms occuring in files. *) + + val mk : namespace -> string -> t + (** Make an identifier *) + + val tracked : track:t -> namespace -> string -> t + (** Make an identifier with an additional name. *) + +end + +module type Term = sig + + type t + (** The type of terms. *) + + type id + (** The type of identifiers *) + + type location + (** The type of locations attached to terms. *) + + val prop : ?loc:location -> unit -> t + val bool : ?loc:location -> unit -> t + val ty_unit : ?loc:location -> unit -> t + val ty_int : ?loc:location -> unit -> t + val ty_real : ?loc:location -> unit -> t + val ty_bitv : ?loc:location -> int -> t + (** Builtin types. *) + + val void : ?loc:location -> unit -> t + val true_ : ?loc:location -> unit -> t + val false_ : ?loc:location -> unit -> t + (** Builtin constants. *) + + val not_ : ?loc:location -> t -> t + val and_ : ?loc:location -> t list -> t + val or_ : ?loc:location -> t list -> t + val xor : ?loc:location -> t -> t -> t + val imply : ?loc:location -> t -> t -> t + val equiv : ?loc:location -> t -> t -> t + (** Propositional builtins. *) + + val int : ?loc:location -> string -> t + val real : ?loc:location -> string -> t + val hexa : ?loc:location -> string -> t + (** Numerical constant creation. *) + + val uminus : ?loc:location -> t -> t + val add : ?loc:location -> t -> t -> t + val sub : ?loc:location -> t -> t -> t + val mult : ?loc:location -> t -> t -> t + val div : ?loc:location -> t -> t -> t + val mod_ : ?loc:location -> t -> t -> t + val int_pow : ?loc:location -> t -> t -> t + val real_pow : ?loc:location -> t -> t -> t + val lt : ?loc:location -> t -> t -> t + val leq : ?loc:location -> t -> t -> t + val gt : ?loc:location -> t -> t -> t + val geq : ?loc:location -> t -> t -> t + (** Arithmetic builtins. *) + + val eq : ?loc:location -> t -> t -> t + val neq : ?loc:location -> t list -> t + (** Equality and disequality. *) + + val array_get : ?loc:location -> t -> t -> t + val array_set : ?loc:location -> t -> t -> t -> t + (** Array primitives. *) + + val bitv : ?loc:location -> string -> t + (** Bitvector litteral. *) + + val bitv_extract : ?loc:location -> t -> int -> int -> t + (** Bitvoector extraction. + TODO: document meaning of the itnegers indexes. *) + + val bitv_concat : ?loc:location -> t -> t -> t + (** Bitvector concatenation. *) + + val const : ?loc:location -> id -> t + (** Constants, i.e non predefined symbols. This includes both constants + defined by theories, defined locally in a problem, and also quantified variables. *) + + val colon : ?loc:location -> t -> t -> t + (** Juxtaposition of terms, used to annotate terms with their type. *) + + val apply : ?loc:location -> t -> t list -> t + (** Application of terms (as well as types). *) + + val arrow : ?loc:location -> t -> t -> t + (** Create a function type. *) + + val ite : ?loc:location -> t -> t -> t -> t + (** Conditional terms. *) + + val forall : ?loc:location -> t list -> t -> t + val exists : ?loc:location -> t list -> t -> t + (** Universal and existential quantifications. *) + + val letin : ?loc:location -> t list -> t -> t + (** Let-binding. *) + + val match_ : ?loc:location -> t -> (t * t) list -> t + (** Pattern matching. The first term is the term to match, + and each tuple in the list is a match case, which is a pair + of a pattern and a match branch. *) + + val record : ?loc:location -> t list -> t + (** Create a record expression, with a list of equalities of the form + "label = expr". *) + + val record_with : ?loc:location -> t -> t list -> t + (** Record update, of the form "s with [label = expr, ...]". *) + + val record_access : ?loc:location -> t -> id -> t + (** Record access for the field given by the identifier. *) + + val adt_check : ?loc:location -> t -> id -> t + (** Create a check agains the given adt constructor. *) + + val adt_project : ?loc:location -> t -> id -> t + (** Create a projection for the given field of an adt constructor. *) + + val check : ?loc:location -> t -> t + (** Create a term to "check" a formula. + TODO: ask @iguernlala about this. *) + + val cut : ?loc:location -> t -> t + (** Create a cut. + TODO: ask @iguernlala about this. *) + + val in_interval : ?loc:location -> t -> (t * bool) -> (t * bool) -> t + (** Create a trigger for the given term/variable being inside + of a given interval, which is given as a lower bound, and an upper bound. + Each bound contains an expression for the bound value, as well as a boolean + indicating whether the bound is strict or not. *) + + val maps_to : ?loc:location -> id -> t -> t + (** Used in trigger creation. *) + + val trigger : ?loc:location -> t list -> t + (** Create a (multi) trigger. *) + + val triggers : ?loc:location -> t -> t list -> t + (** Annotate a term (generally a quantified formula), with a list of triggers. *) + + val filters : ?loc:location -> t -> t list -> t + (** Annotate a term (genrally a quantified formula) with a list of filters. *) + + val tracked : ?loc:location -> id -> t -> t + (** Annotate a term with an id for tracking purposes. *) + +end + +module type Statement = sig + + type t + (** The type of statements. *) + + type id + (** The type of identifiers *) + + type term + (** The type of terms used in statements. *) + + type location + (** The type of locations attached to statements. *) + + val logic : ?loc:location -> ac:bool -> id list -> term -> t + (** Function declaration. *) + + val record_type : ?loc:location -> id -> term list -> (id * term) list -> t + (** Record type definition. *) + + val fun_def : ?loc:location -> id -> term list -> term list -> term -> term -> t + (** Function definition. *) + + val abstract_type : ?loc:location -> id -> term list -> t + (** Create a new abstract type, quantified over the given type variables. *) + + val algebraic_type : ?loc:location -> id -> term list -> (id * term list) list -> t + (** An algebraic datatype definition. *) + + val rec_types : ?loc:location -> t list -> t + (** Pack a list of mutually recursive algebraic datatypes together. *) + + val axiom : ?loc:location -> id -> term -> t + (** Create an axiom. *) + + val case_split : ?loc:location -> id -> term -> t + (** Create a case split. *) + + val theory : ?loc:location -> id -> id -> t list -> t + (** Create a theory, extending another, with the given list of declarations. *) + + val rewriting : ?loc:location -> id -> term list -> t + (** Create a (set of ?) rewriting rule(s). *) + + val prove_goal : ?loc:location -> id -> term -> t + (** Goal declaration. *) + +end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/dolmen_ae.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/dolmen_ae.ml index f33dcacc024dec214a79a36b8b5c6790a4528f02..208fbd6a5f440197514f1886eb664c88aceb79bb 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/dolmen_ae.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/dolmen_ae.ml @@ -1,9 +1,9 @@ (* This file is free software, part of dolmen. See file "LICENSE" formore information *) -module type Id = Ast_ae.Id -module type Term = Ast_ae.Term -module type Statement = Ast_ae.Statement +module type Id = Ast.Id +module type Term = Ast.Term +module type Statement = Ast.Statement module Make (L : Dolmen_intf.Location.S) @@ -11,10 +11,10 @@ module Make (T : Term with type location := L.t and type id := I.t) (S : Statement with type location := L.t and type id := I.t and type term := T.t) = Dolmen_std.Transformer.Make(L)(struct - type token = Tokens_ae.token + type token = Tokens.token type statement = S.t let env = [] let incremental = false let error s = Syntax_messages.message s - end)(LexAe)(ParseAe.Make(L)(I)(T)(S)) + end)(Lexer)(Parser.Make(L)(I)(T)(S)) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/dolmen_ae.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/dolmen_ae.mli index 94fc1fd455f0e2f91a140af9932f2f89af7c4429..fb25cdb321bffc8116f1219d28951521efdde3b4 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/dolmen_ae.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/dolmen_ae.mli @@ -3,9 +3,9 @@ (** TPTP language input *) -module type Id = Ast_ae.Id -module type Term = Ast_ae.Term -module type Statement = Ast_ae.Statement +module type Id = Ast.Id +module type Term = Ast.Term +module type Statement = Ast.Statement (** Implementation requirement for the TPTP format. *) module Make @@ -13,6 +13,6 @@ module Make (I : Id) (T : Term with type location := L.t and type id := I.t) (S : Statement with type location := L.t and type id := I.t and type term := T.t) : - Dolmen_intf.Language.S with type statement = S.t + Dolmen_intf.Language.S with type statement = S.t and type file := L.file (** Functor to generate a parser for the TPTP format. *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/dune index cfd516aa032b36f6eae60a891b46b222f3cd6fdb..a2c375dcbe612156ac66ba0318d52389cd938843 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/dune @@ -1,65 +1,12 @@ -(ocamllex (modules lexAe)) - -(menhir - (flags (--only-tokens)) - (modules tokens_ae) -) - -(menhir - (infer true) - (flags (--explain --table --external-tokens Tokens_ae)) - (modules tokens_ae parseAe) - (merge_into parseAe) -) - -(rule - (target syntax_messages.ml) - (deps (:tokens tokens_ae.mly) - (:parser parseAe.mly) - (:msg syntax.messages)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_ae %{tokens} - %{parser} --base %{parser} --compile-errors %{msg}))) -) - +; Language library definition (library (name dolmen_ae) (public_name dolmen.ae) + (instrumentation (backend bisect_ppx)) (libraries dolmen_std dolmen_intf menhirLib) - (modules Tokens_ae LexAe ParseAe Ast_ae Syntax_messages Dolmen_ae) -) - -; Convenience rule to generate a fresh messages file, -; and update an already existing one. -(rule - (target new.messages) - (mode promote-until-clean) - (deps (:tokens tokens_ae.mly) - (:parser parseAe.mly)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_ae %{tokens} - %{parser} --base %{parser} --list-errors))) + (modules Dolmen_ae Tokens Lexer Parser Ast Syntax_messages) ) -(rule - (target updated.messages) - (mode promote-until-clean) - (deps (:tokens tokens_ae.mly) - (:parser parseAe.mly) - (:msg syntax.messages)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_ae %{tokens} - %{parser} --base %{parser} --update-errors %{msg}))) -) - -; Additional rule to add to runtest a check that the messages file is up-to-date -(rule - (alias runtest) - (deps (:tokens tokens_ae.mly) - (:parser parseAe.mly) - (:new new.messages) - (:msg syntax.messages)) - (action (run menhir --external-tokens Tokens_ae %{tokens} - %{parser} --base %{parser} --compare-errors %{new} --compare-errors %{msg})) -) +; Common include +(include ../dune.common) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/lexer.mll b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/lexer.mll new file mode 100644 index 0000000000000000000000000000000000000000..3ca9e312a1f1788b07522fd75b9fdadfdd7d3e21 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/lexer.mll @@ -0,0 +1,229 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information. *) + +(** {1 Alt-ergo Lexer} *) + +{ + exception Error + + module T = Dolmen_std.Tok + + open Tokens + + (* Token printing *) + + let reserved s = + T.descr s + ~kind:"reserved word" + ~hint:"reserved words cannot be used as identifiers" + + let descr token : T.descr = + match (token : token) with + | ID s -> T.descr ~kind:"identifier" s + | QM_ID s -> T.descr ~kind:"variable id" s + | INTEGER s -> T.descr ~kind:"integer" s + | DECIMAL s -> T.descr ~kind:"decimal" s + | HEXADECIMAL s -> T.descr ~kind:"hexadecimal" s + | STRING s -> T.descr ~kind:"string" s + | MATCH -> reserved "match" + | WITH -> reserved "with" + | THEORY -> reserved "theory" + | EXTENDS -> reserved "extends" + | END -> reserved "end" + | QM -> reserved "?" + | AND -> reserved "and" + | LEFTARROW -> reserved "<-" + | RIGHTARROW -> reserved "->" + | AC -> reserved "ac" + | AT -> reserved "@" + | AXIOM -> reserved "axiom" + | CASESPLIT -> reserved "case_split" + | REWRITING -> reserved "rewriting" + | BAR -> reserved "|" + | HAT -> reserved "^" + | BOOL -> reserved "bool" + | COLON -> reserved ":" + | COMMA -> reserved "," + | PV -> reserved ";" + | DISTINCT -> reserved "distinct" + | DOT -> reserved "." + | SHARP -> reserved "#" + | ELSE -> reserved "else" + | OF -> reserved "of" + | EOF -> T.descr ~kind:"end of file token" "" + | EQUAL -> reserved "equal" + | EXISTS -> reserved "exists" + | FALSE -> reserved "false" + | VOID -> reserved "void" + | FORALL ->reserved "forall" + | FUNC -> reserved "function" + | GE -> reserved ">=" + | GOAL -> reserved "goal" + | GT -> reserved ">" + | CHECK -> reserved "check" + | CUT -> reserved "cut" + | IF -> reserved "if" + | IN -> reserved "in" + | INT -> reserved "int" + | BITV -> reserved "bitv" + | MAPS_TO -> reserved "|->" + | LE -> reserved "<=" + | LET -> reserved "let" + | LEFTPAR -> reserved "(" + | LEFTSQ -> reserved "[" + | LEFTBR -> reserved "{" + | LOGIC -> reserved "logic" + | LRARROW -> reserved "<->" + | XOR -> reserved "xor" + | LT -> reserved "<" + | MINUS -> reserved "-" + | NOT -> reserved "not" + | NOTEQ -> reserved "<>" + | OR -> reserved "or" + | PERCENT -> reserved "%" + | PLUS -> reserved "+" + | PRED -> reserved "predicate" + | PROP -> reserved "prop" + | QUOTE -> reserved "'" + | REAL -> reserved "real" + | UNIT -> reserved "unit" + | RIGHTPAR -> reserved ")" + | RIGHTSQ -> reserved "]" + | RIGHTBR -> reserved "}" + | SLASH -> reserved "/" + | POW -> reserved "**" + | POWDOT -> reserved "**." + | THEN -> reserved "then" + | TIMES -> reserved "*" + | TRUE -> reserved "true" + | TYPE -> reserved "type" + + (* Token parsing *) + + let escaped_char = function + | 'n' -> '\n' + | 'r' -> '\r' + | 't' -> '\t' + | c -> c + +} + +let letter = ['a'-'z' 'A'-'Z'] +let digit = ['0'-'9'] +let identifier = (letter | '_') (letter | '_' | digit | '?' | '\'')* + +let integer = digit+ +let signed_integer = ['-' '+']? integer +let exp = ['e' 'E'] signed_integer +let real_exp = digit+ exp +let decimal = (digit+ '.' digit*) | (digit* '.' digit+) +let real_dec = decimal exp? +let real = real_exp | real_dec +let hex = digit | ['a'-'f''A'-'F'] +let hex_exp = ['p' 'P'] signed_integer +let real_hex = "0x" hex+ '.' hex* hex_exp + +rule token newline = parse + | '\n' { newline lexbuf; token newline lexbuf } + | [' ' '\t' '\r']+ { token newline lexbuf } + | '?' { QM } + | '?' identifier as id { QM_ID id } + | identifier as i { match i with + | "ac" -> AC + | "and" -> AND + | "axiom" -> AXIOM + | "bitv" -> BITV + | "bool" -> BOOL + | "case_split" -> CASESPLIT + | "check" -> CHECK + | "cut" -> CUT + | "distinct" -> DISTINCT + | "else" -> ELSE + | "end" -> END + | "exists" -> EXISTS + | "extends" -> EXTENDS + | "false" -> FALSE + | "forall" -> FORALL + | "function" -> FUNC + | "goal" -> GOAL + | "if" -> IF + | "in" -> IN + | "int" -> INT + | "let" -> LET + | "logic" -> LOGIC + | "not" -> NOT + | "or" -> OR + | "xor" -> XOR + | "predicate" -> PRED + | "prop" -> PROP + | "real" -> REAL + | "rewriting" -> REWRITING + | "then" -> THEN + | "theory" -> THEORY + | "true" -> TRUE + | "type" -> TYPE + | "unit" -> UNIT + | "void" -> VOID + | "match" -> MATCH + | "with" -> WITH + | "of" -> OF + | _ -> ID i + } + | integer as s { INTEGER s } + | real as s { DECIMAL s } + | real_hex as s { HEXADECIMAL s } + | "(*" { parse_comment newline lexbuf; token newline lexbuf } + | "'" { QUOTE } + | "," { COMMA } + | ";" { PV } + | "(" { LEFTPAR } + | ")" { RIGHTPAR } + | ":" { COLON } + | "->" { RIGHTARROW } + | "<-" { LEFTARROW } + | "<->" { LRARROW } + | "=" { EQUAL } + | "<" { LT } + | "<=" { LE } + | ">" { GT } + | ">=" { GE } + | "<>" { NOTEQ } + | "+" { PLUS } + | "-" { MINUS } + | "*" { TIMES } + | "**." { POWDOT } + | "**" { POW } + | "/" { SLASH } + | "%" { PERCENT } + | "@" { AT } + | "." { DOT } + | "#" { SHARP } + | "[" { LEFTSQ } + | "]" { RIGHTSQ } + | "{" { LEFTBR } + | "}" { RIGHTBR } + | "|" { BAR } + | "^" { HAT } + | "|->" { MAPS_TO } + | "\"" { parse_string newline (Buffer.create 1024) lexbuf } + | eof { EOF } + | _ { raise Error } + +and parse_comment newline = parse + | "*)" { () } + | "(*" { parse_comment newline lexbuf; parse_comment newline lexbuf } + | eof { raise Error } + | _ as c { if c = '\n' then newline lexbuf; parse_comment newline lexbuf } + +and parse_string newline str_buf = parse + | "\"" { STRING (Buffer.contents str_buf) } + | "\\" (_ as c) { Buffer.add_char str_buf (escaped_char c); + parse_string newline str_buf lexbuf } + | '\n' { newline lexbuf; + Buffer.add_char str_buf '\n'; + parse_string newline str_buf lexbuf } + | eof { raise Error } + | _ as c { Buffer.add_char str_buf c; + parse_string newline str_buf lexbuf } + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/parser.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/parser.mly new file mode 100644 index 0000000000000000000000000000000000000000..8b750dd1b6abb854a0e0776521d61e5a42a0c1f1 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/parser.mly @@ -0,0 +1,564 @@ + +(* This file is free software, part of dolmem. See file "LICENSE" for more information *) + +%parameter <L : Dolmen_intf.Location.S> +%parameter <I : Ast.Id> +%parameter <T : Ast.Term + with type location := L.t and type id := I.t> +%parameter <S : Ast.Statement + with type location := L.t and type id := I.t and type term := T.t> + +%start <S.t list> file +%start <S.t option> input + +%% + +file: + | l=decl* EOF { l } + +/* The current syntax has no clear delimiters to denote the end + of declarations resulting in a lot of end-of-stream conflicts. + This prevents incremental parsing from working correctly, + hence the assert false */ +input: + | EOF { assert false } +/* this declaration creates end-of-stream conflicts + | decl { assert false } +*/ + +/* Identifiers */ + +raw_ident: + | id=ID + { (fun ns -> I.mk ns id) } + +decl_ident: + | id=raw_ident + { id I.decl } + +ident: + | id=raw_ident + { let loc = L.mk_pos $startpos $endpos in + T.const ~loc (id I.term) } + +raw_named_ident: + | id=ID + { I.mk I.term id } + | id=ID str=STRING + { let track = I.mk I.track str in + I.tracked ~track I.term id } + +named_ident: + | id=raw_named_ident + { let loc = L.mk_pos $startpos $endpos in + T.const ~loc id } + + +/* Binders */ + +logic_binder: + | v=ident COLON ty=primitive_type + { let loc = L.mk_pos $startpos $endpos in + T.colon ~loc v ty } + +multi_logic_binder: + | vars=separated_nonempty_list(COMMA, named_ident) COLON ty=primitive_type + { let loc = L.mk_pos $startpos $endpos in + List.map (fun x -> T.colon ~loc x ty) vars } + + +/* Type variables */ + +type_var: + | QUOTE id=ID + { let loc = L.mk_pos $startpos $endpos in + let v = I.mk I.var ("'" ^ id) in + T.const ~loc v } + +type_vars: + | { [] } + | v=type_var + { [v] } + | LEFTPAR l=separated_nonempty_list(COMMA, type_var) RIGHTPAR + { l } + + + +/* Type Expressions */ + +primitive_type: + | BOOL + { let loc = L.mk_pos $startpos $endpos in + T.bool ~loc () } + | UNIT + { let loc = L.mk_pos $startpos $endpos in + T.ty_unit ~loc () } + | INT + { let loc = L.mk_pos $startpos $endpos in + T.ty_int ~loc () } + | REAL + { let loc = L.mk_pos $startpos $endpos in + T.ty_real ~loc () } + + | BITV LEFTSQ sz=INTEGER RIGHTSQ + { let loc = L.mk_pos $startpos $endpos in + let n = + (* The lexer should guarantee that the length of a bitv is + * a syntactically correct integer. *) + match int_of_string sz with + | i -> i + | exception Invalid_argument _ -> assert false + in + T.ty_bitv ~loc n } + + | c=ident + { let loc = L.mk_pos $startpos $endpos in + T.apply ~loc c [] } + + | v=type_var + { v } + + | arg=primitive_type c=ident + { let loc = L.mk_pos $startpos $endpos in + T.apply ~loc c [arg] } + + | LEFTPAR args=separated_nonempty_list(COMMA, primitive_type) RIGHTPAR c=ident + { let loc = L.mk_pos $startpos $endpos in + T.apply ~loc c args } + +primitive_type_or_prop: + | ty=primitive_type + { ty } + | PROP + { let loc = L.mk_pos $startpos $endpos in + T.prop ~loc () } + +logic_type: + | ty=primitive_type_or_prop + { ty } + | l=separated_list(COMMA, primitive_type) RIGHTARROW ret=primitive_type_or_prop + { let loc = L.mk_pos $startpos $endpos in + List.fold_right (T.arrow ~loc) l ret } + + +/* Main Expression language */ + +lexpr: + | e=simple_expr + { e } + + /* Unary Operators */ + + | NOT p=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.not_ ~loc p } + | MINUS x=lexpr %prec uminus + { let loc = L.mk_pos $startpos $endpos in + T.uminus ~loc x } + + + /* Binary Operators */ + + | a=lexpr PLUS b=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.add ~loc a b } + | a=lexpr MINUS b=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.sub ~loc a b } + | a=lexpr TIMES b=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.mult ~loc a b } + | a=lexpr SLASH b=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.div ~loc a b } + | a=lexpr PERCENT b=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.mod_ ~loc a b } + | a= lexpr POW b=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.int_pow ~loc a b } + | a=lexpr POWDOT b=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.real_pow ~loc a b } + + | p=lexpr AND q=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.and_ ~loc [p; q] } + | p=lexpr OR q=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.or_ ~loc [p; q] } + | p=lexpr XOR q=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.xor ~loc p q } + | p=lexpr LRARROW q=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.equiv ~loc p q } + | p=lexpr RIGHTARROW q=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.imply ~loc p q } + + | a=lexpr LT b=lexpr %prec prec_relation + { let loc = L.mk_pos $startpos $endpos in + T.lt ~loc a b } + | a=lexpr LE b=lexpr %prec prec_relation + { let loc = L.mk_pos $startpos $endpos in + T.leq ~loc a b } + | a=lexpr GT b=lexpr %prec prec_relation + { let loc = L.mk_pos $startpos $endpos in + T.gt ~loc a b } + | a=lexpr GE b=lexpr %prec prec_relation + { let loc = L.mk_pos $startpos $endpos in + T.geq ~loc a b } + + | a=lexpr EQUAL b=lexpr %prec prec_relation + { let loc = L.mk_pos $startpos $endpos in + T.eq ~loc a b } + | a=lexpr NOTEQ b=lexpr %prec prec_relation + { let loc = L.mk_pos $startpos $endpos in + T.neq ~loc [a; b] } + + + /* Bit Vectors */ + + | LEFTSQ BAR c=INTEGER BAR RIGHTSQ + { let loc = L.mk_pos $startpos $endpos in + T.bitv ~loc c } + | e=lexpr HAT LEFTBR i=INTEGER COMMA j=INTEGER RIGHTBR + { let loc = L.mk_pos $startpos $endpos in + let i, j = + match int_of_string i, int_of_string j with + | i, j -> i, j + | exception Invalid_argument _ -> assert false + in + T.bitv_extract ~loc e i j } + | e=lexpr AT f=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.bitv_concat ~loc e f } + + + /* Predicates/Function Calls */ + + | DISTINCT LEFTPAR l=list2_lexpr_sep_comma RIGHTPAR + { let loc = L.mk_pos $startpos $endpos in + T.neq ~loc l } + + | IF cond=lexpr THEN then_t=lexpr ELSE else_t=lexpr %prec prec_ite + { let loc = L.mk_pos $startpos $endpos in + T.ite ~loc cond then_t else_t } + + | FORALL vars=separated_nonempty_list(COMMA, multi_logic_binder) + triggers=triggers filters=filters DOT body=lexpr %prec prec_forall + { let loc = L.mk_pos $startpos $endpos in + let body = T.triggers ~loc body triggers in + let body = T.filters ~loc body filters in + T.forall ~loc (List.flatten vars) body } + | EXISTS vars=separated_nonempty_list(COMMA, multi_logic_binder) + triggers=triggers filters=filters DOT body=lexpr %prec prec_exists + { let loc = L.mk_pos $startpos $endpos in + let body = T.triggers ~loc body triggers in + let body = T.filters ~loc body filters in + T.exists ~loc (List.flatten vars) body } + + | name=STRING COLON e=lexpr %prec prec_named + { let loc = L.mk_pos $startpos $endpos in + let id = I.mk I.track name in + T.tracked ~loc id e } + + | LET l=separated_nonempty_list(COMMA, let_binder) IN body=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.letin ~loc l body } + + | CHECK e=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.check ~loc e } + + | CUT e=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.cut ~loc e } + + + /* Match */ + + | MATCH e=lexpr WITH cases=match_cases END + { let loc = L.mk_pos $startpos $endpos in + T.match_ ~loc e (List.rev cases) } + +match_case: + | p=simple_pattern RIGHTARROW e = lexpr + { p, e } + +match_cases: + | c=match_case + | BAR c=match_case + { [c] } + | l=match_cases BAR c=match_case + { c :: l } + +simple_pattern: + | t=ident + { t } + | f=ident LEFTPAR args=separated_nonempty_list(COMMA,ident) RIGHTPAR + { let loc = L.mk_pos $startpos $endpos in + T.apply ~loc f args } + +let_binder: + | a=ident EQUAL b=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.eq ~loc a b } + +simple_expr : + | t=ident + { t } + | LEFTPAR e=lexpr RIGHTPAR + { e } + + | s=INTEGER + { let loc = L.mk_pos $startpos $endpos in + T.int ~loc s } + | s=DECIMAL + { let loc = L.mk_pos $startpos $endpos in + T.real ~loc s } + | s=HEXADECIMAL + { let loc = L.mk_pos $startpos $endpos in + T.hexa ~loc s } + + | TRUE + { let loc = L.mk_pos $startpos $endpos in + T.true_ ~loc () } + | FALSE + { let loc = L.mk_pos $startpos $endpos in + T.false_ ~loc () } + | VOID + { let loc = L.mk_pos $startpos $endpos in + T.void ~loc () } + + + /* Records */ + + | LEFTBR l=separated_nonempty_list(PV, label_expr) RIGHTBR + { let loc = L.mk_pos $startpos $endpos in + T.record ~loc l } + | LEFTBR s=simple_expr WITH l=separated_nonempty_list(PV, label_expr) RIGHTBR + { let loc = L.mk_pos $startpos $endpos in + T.record_with ~loc s l } + | s=simple_expr DOT label=raw_ident + { let loc = L.mk_pos $startpos $endpos in + T.record_access ~loc s (label I.term) } + + + /* Function/Predicate Calls */ + + | f=ident LEFTPAR args=separated_list(COMMA, lexpr) RIGHTPAR + { let loc = L.mk_pos $startpos $endpos in + T.apply ~loc f args } + + + /* Arrays */ + + | s=simple_expr LEFTSQ e=lexpr RIGHTSQ + { let loc = L.mk_pos $startpos $endpos in + T.array_get ~loc s e } + + | s=simple_expr LEFTSQ l=separated_nonempty_list(COMMA, array_assignment) RIGHTSQ + { let loc = L.mk_pos $startpos $endpos in + List.fold_left (fun acc (idx, value) -> + T.array_set ~loc acc idx value + ) s l } + + + | s=simple_expr COLON ty=primitive_type + { let loc = L.mk_pos $startpos $endpos in + T.colon ~loc s ty } + + | s=simple_expr QM id=raw_ident + { let loc = L.mk_pos $startpos $endpos in + T.adt_check ~loc s (id I.term) } + + | s=simple_expr c=QM_ID + { let loc = L.mk_pos $startpos $endpos in + T.adt_check ~loc s (I.mk I.term c) } + + | s=simple_expr SHARP label=raw_ident + { let loc = L.mk_pos $startpos $endpos in + T.adt_project ~loc s (label I.term) } + +array_assignment: + | e1=lexpr LEFTARROW e2=lexpr + { (e1, e2) } + +triggers: + | { [] } + | LEFTSQ l=separated_nonempty_list(BAR, trigger) RIGHTSQ + { l } + +filters: + | { [] } + | LEFTBR l=separated_nonempty_list(COMMA, lexpr) RIGHTBR + { l } + +trigger: + | l=separated_nonempty_list(COMMA, lexpr_or_dom) + { let loc = L.mk_pos $startpos $endpos in + T.trigger ~loc l } + +lexpr_or_dom: + | e=lexpr + { e } + | e=lexpr IN ls=sq lb=bound COMMA rb=bound rs=sq + { let loc = L.mk_pos $startpos $endpos in + T.in_interval ~loc e (lb, not ls) (rb, rs) } + | id=raw_ident MAPS_TO e=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.maps_to ~loc (id I.term) e } + +sq: + | LEFTSQ + { true } + | RIGHTSQ + { false } + +bound: + | QM + { let loc = L.mk_pos $startpos $endpos in + let v = I.mk I.term "?" in + T.const ~loc v } + | s=ID + | s=QM_ID + { let loc = L.mk_pos $startpos $endpos in + T.const ~loc (I.mk I.term s) } + | s=INTEGER + { let loc = L.mk_pos $startpos $endpos in + T.int ~loc s } + | s=DECIMAL + { let loc = L.mk_pos $startpos $endpos in + T.real ~loc s } + | s=HEXADECIMAL + { let loc = L.mk_pos $startpos $endpos in + T.hexa ~loc s } + | MINUS s=INTEGER + { let loc = L.mk_pos $startpos $endpos in + T.int ~loc ("-" ^ s) } + | MINUS s=DECIMAL + { let loc = L.mk_pos $startpos $endpos in + T.real ~loc ("-" ^ s) } + | MINUS s=HEXADECIMAL + { let loc = L.mk_pos $startpos $endpos in + T.hexa ~loc ("-" ^ s) } + +list2_lexpr_sep_comma: + | e1=lexpr COMMA e2=lexpr + { [e1; e2] } + | e=lexpr COMMA l=list2_lexpr_sep_comma + { e :: l } + +label_expr: + | id=ident EQUAL e=lexpr + { let loc = L.mk_pos $startpos $endpos in + T.eq ~loc id e } + + +/* Type definitions */ + +record_label_with_type: + | id=raw_ident COLON ty=primitive_type + { id I.term, ty } + +record_type: + | LEFTBR l=separated_nonempty_list(PV, record_label_with_type) RIGHTBR + { l } + +algebraic_label_with_type: + | id=ident COLON ty=primitive_type + { let loc = L.mk_pos $startpos $endpos in + T.colon ~loc id ty } + +algebraic_args: + | { [] } + | OF LEFTBR l=separated_nonempty_list(PV, algebraic_label_with_type) RIGHTBR + { l } + +algebraic_constructor: + | c=raw_ident l=algebraic_args + { c I.term, l } + +algebraic_typedef: + | vars=type_vars c=raw_ident EQUAL l=separated_nonempty_list(BAR, algebraic_constructor) + { let loc = L.mk_pos $startpos $endpos in + S.algebraic_type ~loc (c I.term) vars l } + + +/* Top-level declarations */ + +ac_modifier: + | /* empty */ + { false } + | AC + { true } + +theory_elt: + | AXIOM name=decl_ident COLON body=lexpr + { let loc = L.mk_pos $startpos $endpos in + S.axiom ~loc name body } + + | CASESPLIT name=decl_ident COLON body=lexpr + { let loc = L.mk_pos $startpos $endpos in + S.case_split ~loc name body } + +rewriting_list: + | e=lexpr + | e=lexpr PV + { [e] } + | e=lexpr PV l=rewriting_list + { e :: l } + +decl: + | THEORY id=decl_ident EXTENDS ext=decl_ident EQUAL l=theory_elt* END + { let loc = L.mk_pos $startpos $endpos in + S.theory ~loc id ext l } + + | TYPE vars=type_vars id=raw_ident + { let loc = L.mk_pos $startpos $endpos in + S.abstract_type ~loc (id I.term) vars } + + | TYPE l=separated_nonempty_list(AND, algebraic_typedef) + { let loc = L.mk_pos $startpos $endpos in + S.rec_types ~loc l } + + | TYPE vars=type_vars id=raw_ident EQUAL r=record_type + { let loc = L.mk_pos $startpos $endpos in + S.record_type ~loc (id I.term) vars r } + + | LOGIC ac=ac_modifier args=separated_nonempty_list(COMMA, raw_named_ident) COLON ty=logic_type + { let loc = L.mk_pos $startpos $endpos in + S.logic ~loc ~ac args ty } + + | FUNC f=raw_named_ident + LEFTPAR args=separated_list(COMMA, logic_binder) RIGHTPAR + COLON ret_ty=primitive_type EQUAL body=lexpr + { let loc = L.mk_pos $startpos $endpos in + S.fun_def ~loc f [] args ret_ty body } + + | PRED p=raw_named_ident EQUAL body=lexpr + { let loc = L.mk_pos $startpos $endpos in + let ret_ty = T.prop ~loc () in + S.fun_def ~loc p [] [] ret_ty body } + + | PRED p=raw_named_ident + LEFTPAR args=separated_list(COMMA, logic_binder) RIGHTPAR EQUAL body=lexpr + { let loc = L.mk_pos $startpos $endpos in + let ret_ty = T.prop ~loc () in + S.fun_def ~loc p [] args ret_ty body } + + | AXIOM name=decl_ident COLON body=lexpr + { let loc = L.mk_pos $startpos $endpos in + S.axiom ~loc name body } + + | REWRITING name=decl_ident COLON l=rewriting_list + { let loc = L.mk_pos $startpos $endpos in + S.rewriting ~loc name l } + + | GOAL name=decl_ident COLON body=lexpr + { let loc = L.mk_pos $startpos $endpos in + S.prove_goal ~loc name body } + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/syntax.messages b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/syntax.messages index cba510baf204caf7a5b529d272b39b10e8a18fa7..cd170d4d8ca6ca59241882610c937f78b2b11e72 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/syntax.messages +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/syntax.messages @@ -43,7 +43,7 @@ file: AXIOM ID COLON VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -97,7 +97,7 @@ file: FUNC ID EQUAL ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 108, spurious reduction of production raw_named_ident -> ID +## In state 108, spurious reduction of production raw_named_ident -> ID ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -135,7 +135,7 @@ file: FUNC ID LEFTPAR RIGHTPAR COLON UNIT EQUAL VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -246,7 +246,7 @@ file: GOAL ID COLON VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -348,7 +348,7 @@ file: LOGIC ID COLON LEFTPAR UNIT RIGHTARROW ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 36, spurious reduction of production separated_nonempty_list(COMMA,primitive_type) -> primitive_type +## In state 36, spurious reduction of production separated_nonempty_list(COMMA,primitive_type) -> primitive_type ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -441,9 +441,9 @@ file: LOGIC ID COLON UNIT COMMA BOOL RIGHTPAR ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 36, spurious reduction of production separated_nonempty_list(COMMA,primitive_type) -> primitive_type -## In state 38, spurious reduction of production separated_nonempty_list(COMMA,primitive_type) -> primitive_type COMMA separated_nonempty_list(COMMA,primitive_type) -## In state 329, spurious reduction of production loption(separated_nonempty_list(COMMA,primitive_type)) -> separated_nonempty_list(COMMA,primitive_type) +## In state 36, spurious reduction of production separated_nonempty_list(COMMA,primitive_type) -> primitive_type +## In state 38, spurious reduction of production separated_nonempty_list(COMMA,primitive_type) -> primitive_type COMMA separated_nonempty_list(COMMA,primitive_type) +## In state 329, spurious reduction of production loption(separated_nonempty_list(COMMA,primitive_type)) -> separated_nonempty_list(COMMA,primitive_type) ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -513,7 +513,7 @@ file: LOGIC ID LEFTPAR ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 108, spurious reduction of production raw_named_ident -> ID +## In state 108, spurious reduction of production raw_named_ident -> ID ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -544,7 +544,7 @@ file: PRED ID COMMA ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 108, spurious reduction of production raw_named_ident -> ID +## In state 108, spurious reduction of production raw_named_ident -> ID ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -582,7 +582,7 @@ file: PRED ID EQUAL VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -681,7 +681,7 @@ file: PRED ID LEFTPAR RIGHTPAR EQUAL VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -807,7 +807,7 @@ file: REWRITING ID COLON DISTINCT LEFTPAR VOID COMMA VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -859,7 +859,7 @@ file: REWRITING ID COLON DISTINCT LEFTPAR VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1049,8 +1049,8 @@ file: REWRITING ID COLON FORALL ID COLON BOOL LEFTBR VOID RIGHTPAR ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr -## In state 148, spurious reduction of production separated_nonempty_list(COMMA,lexpr) -> lexpr +## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 148, spurious reduction of production separated_nonempty_list(COMMA,lexpr) -> lexpr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1100,7 +1100,7 @@ file: REWRITING ID COLON FORALL ID COLON BOOL LEFTSQ ID MAPS_TO VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1287,7 +1287,7 @@ file: REWRITING ID COLON FORALL ID COLON BOOL LEFTSQ VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1355,8 +1355,8 @@ file: REWRITING ID COLON FORALL ID LEFTPAR ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 108, spurious reduction of production raw_named_ident -> ID -## In state 209, spurious reduction of production named_ident -> raw_named_ident +## In state 108, spurious reduction of production raw_named_ident -> ID +## In state 209, spurious reduction of production named_ident -> raw_named_ident ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1398,9 +1398,9 @@ file: REWRITING ID COLON ID LEFTPAR VOID RIGHTBR ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr -## In state 148, spurious reduction of production separated_nonempty_list(COMMA,lexpr) -> lexpr -## In state 145, spurious reduction of production loption(separated_nonempty_list(COMMA,lexpr)) -> separated_nonempty_list(COMMA,lexpr) +## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 148, spurious reduction of production separated_nonempty_list(COMMA,lexpr) -> lexpr +## In state 145, spurious reduction of production loption(separated_nonempty_list(COMMA,lexpr)) -> separated_nonempty_list(COMMA,lexpr) ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1439,7 +1439,7 @@ file: REWRITING ID COLON ID LEFTPAR VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1546,7 +1546,7 @@ file: REWRITING ID COLON IF VOID THEN VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1596,7 +1596,7 @@ file: REWRITING ID COLON IF VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1658,7 +1658,7 @@ file: REWRITING ID COLON LEFTBR ID EQUAL VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1778,7 +1778,7 @@ file: REWRITING ID COLON LEFTPAR VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1932,7 +1932,7 @@ file: REWRITING ID COLON LET ID EQUAL VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2006,7 +2006,7 @@ file: REWRITING ID COLON MATCH VOID TYPE ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2117,7 +2117,7 @@ file: REWRITING ID COLON MATCH VOID WITH ID RIGHTARROW VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2613,7 +2613,7 @@ file: REWRITING ID COLON VOID LEFTSQ VOID LEFTARROW DECIMAL COMMA VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2663,7 +2663,7 @@ file: REWRITING ID COLON VOID LEFTSQ VOID LEFTARROW VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2714,7 +2714,7 @@ file: REWRITING ID COLON VOID LEFTSQ VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3351,7 +3351,7 @@ file: REWRITING ID COLON VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3469,7 +3469,7 @@ file: THEORY ID EXTENDS ID EQUAL AXIOM ID COLON VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3543,7 +3543,7 @@ file: THEORY ID EXTENDS ID EQUAL CASESPLIT ID COLON VOID WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 123, spurious reduction of production lexpr -> simple_expr +## In state 123, spurious reduction of production lexpr -> simple_expr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3978,4 +3978,3 @@ file: XOR ## <YOUR SYNTAX ERROR MESSAGE HERE> - diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/tokens.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/tokens.mly new file mode 100644 index 0000000000000000000000000000000000000000..3a988c2cfb1bc53c10bfc85eaa54ff22a0dffed3 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/ae/tokens.mly @@ -0,0 +1,43 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +/* Token declarations for Alt-Ergo parser */ + +%token <string> ID +%token <string> QM_ID +%token <string> INTEGER +%token <string> DECIMAL +%token <string> HEXADECIMAL +%token <string> STRING +%token MATCH WITH THEORY EXTENDS END QM +%token AND LEFTARROW RIGHTARROW AC AT AXIOM CASESPLIT REWRITING +%token BAR HAT +%token BOOL COLON COMMA PV DISTINCT DOT SHARP ELSE OF EOF EQUAL +%token EXISTS FALSE VOID FORALL FUNC GE GOAL GT CHECK CUT +%token IF IN INT BITV MAPS_TO +%token LE LET LEFTPAR LEFTSQ LEFTBR LOGIC LRARROW XOR LT MINUS +%token NOT NOTEQ OR PERCENT PLUS PRED PROP +%token QUOTE REAL UNIT +%token RIGHTPAR RIGHTSQ RIGHTBR +%token SLASH POW POWDOT +%token THEN TIMES TRUE TYPE + +/* Precedences */ + +%nonassoc IN +%nonassoc prec_forall prec_exists +%right RIGHTARROW LRARROW XOR +%right OR +%right AND +%nonassoc prec_ite +%left prec_relation EQUAL NOTEQ LT LE GT GE +%left PLUS MINUS +%left TIMES SLASH PERCENT POW POWDOT AT +%nonassoc HAT +%nonassoc uminus +%nonassoc NOT +%right prec_named +%nonassoc CHECK CUT + +%% + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dedukti/ast_dedukti.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dedukti/ast_dedukti.ml index bb821552369ec3f0a3ceac1f797b3ec06f98286a..4d8f4de5c63c7635199fffcffe5b4f0734de8d1b 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dedukti/ast_dedukti.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dedukti/ast_dedukti.ml @@ -11,10 +11,14 @@ module type Id = sig type namespace (** Namespace for identifiers *) - val mod_name : string -> namespace - (** Create a namespace from a module name. *) + val term : namespace + (** The term namespace. *) val mk : namespace -> string -> t + (** Create a simple identifier. *) + + val qualified : namespace -> string list -> string -> t + (** Create a qualified identifier. *) end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dedukti/parseDedukti.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dedukti/parseDedukti.mly index c352255987cb42ea843b00eba4e97ec4d394cc7d..0706efb9e519ae8f245788d4dfe5caf05b752fc1 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dedukti/parseDedukti.mly +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dedukti/parseDedukti.mly @@ -114,5 +114,5 @@ id: | id=ID { let loc = L.mk_pos $startpos $endpos in let md, name = id in - T.mk ~loc (I.mk (I.mod_name md) name) } + T.mk ~loc (I.qualified I.term [md] name) } %% diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/ast.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/ast.ml new file mode 100644 index 0000000000000000000000000000000000000000..a15e388e22ffe31bfd73ecd45235fc110a911047 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/ast.ml @@ -0,0 +1,46 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information. *) + +(** AST requirements for the Dimacs format. + Dimacs is a very simple format intended to express CNFs (conjunctive normal forms) + in the simplest format possible. *) + +module type Term = sig + + type t + (** The type of terms. *) + + type location + (** The type of locations. *) + + val atom : ?loc:location -> int -> t + (** Make an atom from an non-zero integer. Positive integers denotes variables, + and negative integers denote the negation of the variable corresponding to + their absolute value. *) + +end +(** Requirements for implementations of Dimacs terms. *) + + +module type Statement = sig + + type t + (** The type of statements for dimacs. *) + + type term + (** The type of dimacs terms. *) + + type location + (** The type of locations. *) + + val p_cnf : ?loc:location -> int -> int -> t + (** Header of a dimacs file. First argument is the number of variables, + second is the number of clauses. *) + + val clause : ?loc:location -> term list -> t + (** Make a clause from a list of literals. *) + +end +(** Requirements for implementations of Dimacs statements. *) + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/dolmen_dimacs.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/dolmen_dimacs.ml index 7be1c78511964942d96c4d613557e94b4643c5e1..d96c43b619af326b40c606c23a9bb3b84b499be9 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/dolmen_dimacs.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/dolmen_dimacs.ml @@ -1,18 +1,18 @@ (* This file is free software, part of dolmen. See file "LICENSE" formore information *) -module type Term = Ast_dimacs.Term -module type Statement = Ast_dimacs.Statement +module type Term = Ast.Term +module type Statement = Ast.Statement module Make (L : Dolmen_intf.Location.S) (T : Term with type location := L.t) (S : Statement with type location := L.t and type term := T.t) = Dolmen_std.Transformer.Make(L)(struct - type token = Tokens_dimacs.token + type token = Tokens.token type statement = S.t let env = [] let incremental = true let error s = Syntax_messages.message s - end)(LexDimacs)(ParseDimacs.Make(L)(T)(S)) + end)(Lexer)(Parser.Make(L)(T)(S)) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/dolmen_dimacs.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/dolmen_dimacs.mli index f53e528ed13562f2d0766c2701e33805e0d5b704..eb4ee2697dd69efa76a7d12d01523569931e66b3 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/dolmen_dimacs.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/dolmen_dimacs.mli @@ -3,13 +3,13 @@ (** Dimacs language input *) -module type Term = Ast_dimacs.Term -module type Statement = Ast_dimacs.Statement +module type Term = Ast.Term +module type Statement = Ast.Statement (** Implementation requirement for the Dimacs format. *) module Make (L : Dolmen_intf.Location.S) (T : Term with type location := L.t) (S : Statement with type location := L.t and type term := T.t) : - Dolmen_intf.Language.S with type statement = S.t + Dolmen_intf.Language.S with type statement = S.t and type file := L.file (** Functor to generate a parser for the dimacs format. *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/dune index 8a99b90e96bf0a72ca6987fd49732b7e127356f2..467242e547bb6dfa33cc909d40cbc6d33a480793 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/dune @@ -1,64 +1,12 @@ -(ocamllex (modules lexDimacs)) - -(menhir - (flags (--only-tokens)) - (modules tokens_dimacs) -) - -(menhir - (flags (--explain --table --external-tokens Tokens_dimacs)) - (modules tokens_dimacs parseDimacs) - (merge_into parseDimacs) -) - -(rule - (target syntax_messages.ml) - (deps (:tokens tokens_dimacs.mly) - (:parser parseDimacs.mly) - (:msg syntax.messages)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_dimacs %{tokens} - %{parser} --base %{parser} --compile-errors %{msg}))) -) - +; Language library definition (library (name dolmen_dimacs) (public_name dolmen.dimacs) + (instrumentation (backend bisect_ppx)) (libraries dolmen_std dolmen_intf menhirLib) - (modules Tokens_dimacs LexDimacs ParseDimacs Ast_dimacs Syntax_messages Dolmen_dimacs) -) - -; Convenience rule to generate a fresh messages file, -; and update an already existing one. -(rule - (target new.messages) - (mode promote-until-clean) - (deps (:tokens tokens_dimacs.mly) - (:parser parseDimacs.mly)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_dimacs %{tokens} - %{parser} --base %{parser} --list-errors))) + (modules Dolmen_dimacs Tokens Lexer Parser Ast Syntax_messages) ) -(rule - (target updated.messages) - (mode promote-until-clean) - (deps (:tokens tokens_dimacs.mly) - (:parser parseDimacs.mly) - (:msg syntax.messages)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_dimacs %{tokens} - %{parser} --base %{parser} --update-errors %{msg}))) -) - -; Additional rule to add to runtest a check that the messages file is up-to-date -(rule - (alias runtest) - (deps (:tokens tokens_dimacs.mly) - (:parser parseDimacs.mly) - (:new new.messages) - (:msg syntax.messages)) - (action (run menhir --external-tokens Tokens_dimacs %{tokens} - %{parser} --base %{parser} --compare-errors %{new} --compare-errors %{msg})) -) +; Common include +(include ../dune.common) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/lexer.mll b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/lexer.mll new file mode 100644 index 0000000000000000000000000000000000000000..2c751a32dd6f85d34916ff0255fe2d45daccbfa7 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/lexer.mll @@ -0,0 +1,47 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more details. *) + +{ + exception Error + + module T = Dolmen_std.Tok + + open Tokens + + let descr token : T.descr = + match (token : token) with + | EOF -> T.descr ~kind:"end of file token" "" + | P -> T.descr ~kind:"keyword" "p" + | CNF -> T.descr ~kind:"keyword" "cnf" + | NEWLINE -> T.descr ~kind:"newline character" "" + | ZERO -> T.descr ~kind:"integer" "0" + | INT i -> T.descr ~kind:"integer" (string_of_int i) + +} + +let zero_numeric = '0' +let numeric = ['0' - '9'] +let non_zero_numeric = ['1' - '9'] + +let positive_number = non_zero_numeric numeric* +let negative_number = ['-'] positive_number +let number = positive_number | negative_number + +let any_char_except_newline = [^ '\n'] + +rule token newline = parse + | "c" { comment newline lexbuf } + | "p" { P } + | "cnf" { CNF } + | eof { EOF } + | zero_numeric { ZERO } + | [' ' '\t' '\r'] { token newline lexbuf } + | number { INT (int_of_string @@ Lexing.lexeme lexbuf) } + | '\n' { newline lexbuf; NEWLINE } + | _ { raise Error } + +and comment newline = parse + | eof { EOF } + | '\n' { newline lexbuf; token newline lexbuf } + | any_char_except_newline { comment newline lexbuf } + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/parser.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/parser.mly new file mode 100644 index 0000000000000000000000000000000000000000..205721bf7a5a5f009ff5a5c6e16a6ac1c26aba1e --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/parser.mly @@ -0,0 +1,49 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more details *) + +/* Functor parameters */ + +%parameter <L : Dolmen_intf.Location.S> +%parameter <T : Ast.Term with type location := L.t> +%parameter <S : Ast.Statement with type location := L.t and type term := T.t> + +/* Starting symbols */ + +%start <S.t list> file +%start <S.t option> input + +%% + +input: + | NEWLINE* EOF + { None } + | NEWLINE* s=start + { Some s } + | NEWLINE* c=clause + { Some c } + +file: + | NEWLINE* h=start l=cnf + { h :: l } + +start: + | P CNF nbvar=INT nbclause=INT NEWLINE + { let loc = L.mk_pos $startpos $endpos in + S.p_cnf ~loc nbvar nbclause } + +cnf: + | EOF + { [] } + | NEWLINE l=cnf + { l } + | c=clause l=cnf + { c :: l } + +clause: + | c=nonempty_list(atom) ZERO NEWLINE + { let loc = L.mk_pos $startpos $endpos in S.clause ~loc c } + +atom: + | i=INT + { let loc = L.mk_pos $startpos $endpos in T.atom ~loc i } + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/syntax.messages b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/syntax.messages index ad52c40858cfc62d3b07f0ceabd3442bc595532e..b24c6e964913d3dfd4c664199a3445b8794a516e 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/syntax.messages +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/syntax.messages @@ -51,8 +51,8 @@ file: NEWLINE INT ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 1, spurious reduction of production list(NEWLINE) -> -## In state 2, spurious reduction of production list(NEWLINE) -> NEWLINE list(NEWLINE) +## In state 1, spurious reduction of production list(NEWLINE) -> +## In state 2, spurious reduction of production list(NEWLINE) -> NEWLINE list(NEWLINE) ## 001 diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/tokens.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/tokens.mly new file mode 100644 index 0000000000000000000000000000000000000000..43abe3fa07cddbfd3069699816bf81c0b280837b --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dimacs/tokens.mly @@ -0,0 +1,13 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more details *) + +/* Tokens for dimacs parsing */ + +%token EOF +%token ZERO +%token P CNF +%token NEWLINE +%token <int> INT + +%% + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dune.common b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dune.common new file mode 100644 index 0000000000000000000000000000000000000000..367d4a873e9a2e5902cada9a789fdfdea359165e --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/dune.common @@ -0,0 +1,68 @@ + +; Commons parts defining the lexer, parser and some test rules +; ============================================================================= +; All these rules should be common to all libraries, now that the lexer, parser, +; ast files and so on have standardized names. + +(ocamllex (modules lexer)) + +(menhir + (flags (--only-tokens)) + (modules tokens) +) + +(menhir + (flags (--explain --table --external-tokens Tokens)) + (modules tokens parser) + (merge_into parser) +) + +(rule + (target syntax_messages.ml) + (deps (:tokens tokens.mly) + (:parser parser.mly) + (:msg syntax.messages)) + (action (with-stdout-to %{target} + (run menhir --external-tokens Tokens %{tokens} + %{parser} --base %{parser} --compile-errors %{msg}))) +) + +; Convenience rule to generate a fresh messages file, +; and update an already existing one. +(rule + (target new.messages) + (deps (:tokens tokens.mly) + (:parser parser.mly)) + (action (with-stdout-to %{target} + (run menhir --external-tokens Tokens %{tokens} + %{parser} --base %{parser} --list-errors))) +) + +(rule + (target updated.messages) + (deps (:tokens tokens.mly) + (:parser parser.mly) + (:msg syntax.messages)) + (action (with-stdout-to %{target} + (run menhir --external-tokens Tokens %{tokens} + %{parser} --base %{parser} --update-errors %{msg}))) +) + +(rule + (target syntax.messages.updated) + (deps (:tokens tokens.mly) + (:parser parser.mly) + (:new new.messages) + (:updated updated.messages) + ) + (action (with-stdout-to %{target} + (run menhir --external-tokens Tokens %{tokens} + %{parser} --base %{parser} --merge-errors %{new} --merge-errors %{updated}))) +) + +(rule + (alias runtest) + (package dolmen) + (deps syntax.messages syntax.messages.updated) + (action (diff syntax.messages syntax.messages.updated)) +) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/ast.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/ast.ml new file mode 100644 index 0000000000000000000000000000000000000000..d4970bf4c8f266e5a4f072066cd876fdfc0c36e4 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/ast.ml @@ -0,0 +1,50 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information. *) + +(** AST requirements for the iCNF format. + iCNF is a very simple format intended to express CNFs (conjunctive normal forms) + in the simplest format possible. Compared to dimacs, iCNF allows local + assumptions, and does not require to declare the number of clauses and + formulas. *) + +module type Term = sig + + type t + (** The type of terms. *) + + type location + (** The type of locations. *) + + val atom : ?loc:location -> int -> t + (** Make an atom from an non-zero integer. Positive integers denotes variables, + and negative integers denote the negation of the variable corresponding to + their absolute value. *) + +end +(** Requirements for implementations of Dimacs terms. *) + + +module type Statement = sig + + type t + (** The type of statements for iCNF. *) + + type term + (** The type of iCNF terms. *) + + type location + (** The type of locations. *) + + val p_inccnf : ?loc:location -> unit -> t + (** header of an iCNF file. *) + + val clause : ?loc:location -> term list -> t + (** Make a clause from a list of literals. *) + + val assumption : ?loc:location -> term list -> t + (** Generate a solve instruction with the given list of assumptions. *) + +end +(** Requirements for implementations of iCNF statements. *) + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/dolmen_icnf.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/dolmen_icnf.ml index 411b7ee1dca2b01fa37ed2e36eadd4ad58f7f73c..d96c43b619af326b40c606c23a9bb3b84b499be9 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/dolmen_icnf.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/dolmen_icnf.ml @@ -1,18 +1,18 @@ (* This file is free software, part of dolmen. See file "LICENSE" formore information *) -module type Term = Ast_iCNF.Term -module type Statement = Ast_iCNF.Statement +module type Term = Ast.Term +module type Statement = Ast.Statement module Make (L : Dolmen_intf.Location.S) (T : Term with type location := L.t) (S : Statement with type location := L.t and type term := T.t) = Dolmen_std.Transformer.Make(L)(struct - type token = Tokens_iCNF.token + type token = Tokens.token type statement = S.t let env = [] let incremental = true let error s = Syntax_messages.message s - end)(LexiCNF)(ParseiCNF.Make(L)(T)(S)) + end)(Lexer)(Parser.Make(L)(T)(S)) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/dolmen_icnf.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/dolmen_icnf.mli index 9292a6b3048ed9894dc3931f50bbfacfa01fd438..66c3054e80da0aa7b30371f95da03087f906fbe8 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/dolmen_icnf.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/dolmen_icnf.mli @@ -3,13 +3,13 @@ (** iCNF language input *) -module type Term = Ast_iCNF.Term -module type Statement = Ast_iCNF.Statement +module type Term = Ast.Term +module type Statement = Ast.Statement (** Implementation requirement for the iCNF format. *) module Make (L : Dolmen_intf.Location.S) (T : Term with type location := L.t) (S : Statement with type location := L.t and type term := T.t) : - Dolmen_intf.Language.S with type statement = S.t + Dolmen_intf.Language.S with type statement = S.t and type file := L.file (** Functor to generate a parser for the iCNF format. *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/dune index 38020e6ed37798d945cda8b4357b15bf561072f8..576d85eb691d83f1d690867b28d25ab266fa53c1 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/dune @@ -1,64 +1,12 @@ -(ocamllex (modules lexiCNF)) - -(menhir - (flags (--only-tokens)) - (modules tokens_iCNF) -) - -(menhir - (flags (--explain --table --external-tokens Tokens_iCNF)) - (modules tokens_iCNF parseiCNF) - (merge_into parseiCNF) -) - -(rule - (target syntax_messages.ml) - (deps (:tokens tokens_iCNF.mly) - (:parser parseiCNF.mly) - (:msg syntax.messages)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_iCNF %{tokens} - %{parser} --base %{parser} --compile-errors %{msg}))) -) - +; Language library definition (library (name dolmen_icnf) (public_name dolmen.icnf) + (instrumentation (backend bisect_ppx)) (libraries dolmen_std dolmen_intf menhirLib) - (modules Tokens_iCNF LexiCNF ParseiCNF Ast_iCNF Syntax_messages Dolmen_icnf) -) - -; Convenience rule to generate a fresh messages file, -; and update an already existing one. -(rule - (target new.messages) - (mode promote-until-clean) - (deps (:tokens tokens_iCNF.mly) - (:parser parseiCNF.mly)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_iCNF %{tokens} - %{parser} --base %{parser} --list-errors))) + (modules Dolmen_icnf Tokens Lexer Parser Ast Syntax_messages) ) -(rule - (target updated.messages) - (mode promote-until-clean) - (deps (:tokens tokens_iCNF.mly) - (:parser parseiCNF.mly) - (:msg syntax.messages)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_iCNF %{tokens} - %{parser} --base %{parser} --update-errors %{msg}))) -) - -; Additional rule to add to runtest a check that the messages file is up-to-date -(rule - (alias runtest) - (deps (:tokens tokens_iCNF.mly) - (:parser parseiCNF.mly) - (:new new.messages) - (:msg syntax.messages)) - (action (run menhir --external-tokens Tokens_iCNF %{tokens} - %{parser} --base %{parser} --compare-errors %{new} --compare-errors %{msg})) -) +; Common include +(include ../dune.common) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/lexer.mll b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/lexer.mll new file mode 100644 index 0000000000000000000000000000000000000000..ed9c45bd85912c137d44b902cbec60f6476368f8 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/lexer.mll @@ -0,0 +1,49 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more details. *) + +{ + exception Error + + module T = Dolmen_std.Tok + + open Tokens + + let descr token : T.descr = + match (token : token) with + | EOF -> T.descr ~kind:"end of file token" "" + | NEWLINE -> T.descr ~kind:"newline character" "" + | A -> T.descr ~kind:"keyword" "a" + | P -> T.descr ~kind:"keyword" "p" + | INCCNF -> T.descr ~kind:"keyword" "inccnf" + | ZERO -> T.descr ~kind:"integer" "0" + | INT i -> T.descr ~kind:"integer" (string_of_int i) + +} + +let zero_numeric = '0' +let numeric = ['0' - '9'] +let non_zero_numeric = ['1' - '9'] + +let positive_number = non_zero_numeric numeric* +let negative_number = ['-'] positive_number +let number = positive_number | negative_number + +let printable_char = [^ '\n'] +let comment = ['c'] printable_char* ['\n'] + +rule token newline = parse + | "c" { comment newline lexbuf } + | "p" { P } + | "a" { A } + | "inccnf" { INCCNF } + | eof { EOF } + | zero_numeric { ZERO } + | [' ' '\t' '\r'] { token newline lexbuf } + | number { INT (int_of_string @@ Lexing.lexeme lexbuf) } + | '\n' { newline lexbuf; NEWLINE } + | _ { raise Error } + +and comment newline = parse + | '\n' { newline lexbuf; token newline lexbuf } + | printable_char { comment newline lexbuf } + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/new.messages b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/new.messages deleted file mode 100644 index 7e0eb6805cbb60c5ce62b32a1302587335c59c2a..0000000000000000000000000000000000000000 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/new.messages +++ /dev/null @@ -1,241 +0,0 @@ -input: A INT P -## -## Ends in an error in state: 15. -## -## list(atom) -> atom . list(atom) [ ZERO ] -## -## The known suffix of the stack is as follows: -## atom -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -input: A P -## -## Ends in an error in state: 35. -## -## assumption -> A . list(atom) ZERO NEWLINE [ # ] -## -## The known suffix of the stack is as follows: -## A -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -input: A ZERO ZERO -## -## Ends in an error in state: 37. -## -## assumption -> A list(atom) ZERO . NEWLINE [ # ] -## -## The known suffix of the stack is as follows: -## A list(atom) ZERO -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -input: INT P -## -## Ends in an error in state: 23. -## -## nonempty_list(atom) -> atom . [ ZERO ] -## nonempty_list(atom) -> atom . nonempty_list(atom) [ ZERO ] -## -## The known suffix of the stack is as follows: -## atom -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -input: INT ZERO ZERO -## -## Ends in an error in state: 41. -## -## clause -> nonempty_list(atom) ZERO . NEWLINE [ # ] -## -## The known suffix of the stack is as follows: -## nonempty_list(atom) ZERO -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -input: NEWLINE ZERO -## -## Ends in an error in state: 33. -## -## input -> NEWLINE . input [ # ] -## -## The known suffix of the stack is as follows: -## NEWLINE -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -input: P INCCNF ZERO -## -## Ends in an error in state: 31. -## -## start -> P INCCNF . NEWLINE [ # ] -## -## The known suffix of the stack is as follows: -## P INCCNF -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -input: P ZERO -## -## Ends in an error in state: 30. -## -## start -> P . INCCNF NEWLINE [ # ] -## -## The known suffix of the stack is as follows: -## P -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -input: ZERO -## -## Ends in an error in state: 29. -## -## input' -> . input [ # ] -## -## The known suffix of the stack is as follows: -## -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -file: NEWLINE ZERO -## -## Ends in an error in state: 1. -## -## list(NEWLINE) -> NEWLINE . list(NEWLINE) [ P ] -## -## The known suffix of the stack is as follows: -## NEWLINE -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -file: P INCCNF NEWLINE A P -## -## Ends in an error in state: 11. -## -## assumption -> A . list(atom) ZERO NEWLINE [ NEWLINE INT EOF A ] -## -## The known suffix of the stack is as follows: -## A -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -file: P INCCNF NEWLINE A ZERO NEWLINE ZERO -## -## Ends in an error in state: 25. -## -## problem -> assumption . problem [ # ] -## -## The known suffix of the stack is as follows: -## assumption -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -file: P INCCNF NEWLINE A ZERO ZERO -## -## Ends in an error in state: 13. -## -## assumption -> A list(atom) ZERO . NEWLINE [ NEWLINE INT EOF A ] -## -## The known suffix of the stack is as follows: -## A list(atom) ZERO -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -file: P INCCNF NEWLINE INT ZERO NEWLINE ZERO -## -## Ends in an error in state: 21. -## -## problem -> clause . problem [ # ] -## -## The known suffix of the stack is as follows: -## clause -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -file: P INCCNF NEWLINE INT ZERO ZERO -## -## Ends in an error in state: 19. -## -## clause -> nonempty_list(atom) ZERO . NEWLINE [ NEWLINE INT EOF A ] -## -## The known suffix of the stack is as follows: -## nonempty_list(atom) ZERO -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -file: P INCCNF NEWLINE NEWLINE ZERO -## -## Ends in an error in state: 8. -## -## problem -> NEWLINE . problem [ # ] -## -## The known suffix of the stack is as follows: -## NEWLINE -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -file: P INCCNF NEWLINE ZERO -## -## Ends in an error in state: 7. -## -## file -> list(NEWLINE) start . problem [ # ] -## -## The known suffix of the stack is as follows: -## list(NEWLINE) start -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -file: P INCCNF ZERO -## -## Ends in an error in state: 5. -## -## start -> P INCCNF . NEWLINE [ NEWLINE INT EOF A ] -## -## The known suffix of the stack is as follows: -## P INCCNF -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -file: P ZERO -## -## Ends in an error in state: 4. -## -## start -> P . INCCNF NEWLINE [ NEWLINE INT EOF A ] -## -## The known suffix of the stack is as follows: -## P -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - -file: ZERO -## -## Ends in an error in state: 0. -## -## file' -> . file [ # ] -## -## The known suffix of the stack is as follows: -## -## - -<YOUR SYNTAX ERROR MESSAGE HERE> - diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/parser.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/parser.mly new file mode 100644 index 0000000000000000000000000000000000000000..2ed8eaa7b2db25e321bc7ce376a227c4b97419d9 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/parser.mly @@ -0,0 +1,58 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more details *) + +/* Functor parameters */ + +%parameter <L : Dolmen_intf.Location.S> +%parameter <T : Ast.Term with type location := L.t> +%parameter <S : Ast.Statement with type location := L.t and type term := T.t> + +/* Starting symbols */ + +%start <S.t list> file +%start <S.t option> input + +%% + +input: + | NEWLINE i=input + { i } + | s=start + { Some s } + | c=clause + { Some c } + | a=assumption + { Some a } + | EOF + { None } + +file: + | NEWLINE* start l=problem + { l } + +start: + | P INCCNF NEWLINE + { let loc = L.mk_pos $startpos $endpos in S.p_inccnf ~loc () } + +problem: + | EOF + { [] } + | NEWLINE l=problem + { l } + | c=clause l=problem + { c :: l } + | a=assumption l=problem + { a :: l } + +clause: + | l=atom+ ZERO NEWLINE + { let loc = L.mk_pos $startpos $endpos in S.clause ~loc l } + +assumption: + | A l=atom* ZERO NEWLINE + { let loc = L.mk_pos $startpos $endpos in S.assumption ~loc l } + +atom: + | i=INT + { let loc = L.mk_pos $startpos $endpos in T.atom ~loc i } + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/tokens.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/tokens.mly new file mode 100644 index 0000000000000000000000000000000000000000..2ca588c384750864dc2533675a11204756d2fb38 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/icnf/tokens.mly @@ -0,0 +1,14 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more details *) + +/* Tokens for iCNF parsing */ + +%token EOF +%token ZERO +%token A P +%token INCCNF +%token NEWLINE +%token <int> INT + +%% + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/line/dolmen_line.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/line/dolmen_line.ml index 6bc39c98d5009ac8790bbdd6f3eb3830fceb30a7..58cb2c2f023d0352143d01bfb7e16b6eb65bc77e 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/line/dolmen_line.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/line/dolmen_line.ml @@ -1,9 +1,9 @@ (* This file is free software, part of dolmen. See file "LICENSE" for more information *) -let rec consume lexbuf = +let rec consume ~newline ~sync lexbuf = match LexLine.token lexbuf with - | LexLine.EOF -> () - | LexLine.CHAR '\n' -> Lexing.new_line lexbuf - | _ -> consume lexbuf + | LexLine.EOF -> sync lexbuf; () + | LexLine.CHAR '\n' -> newline lexbuf; Lexing.new_line lexbuf + | _ -> consume ~newline ~sync lexbuf diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/line/dolmen_line.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/line/dolmen_line.mli index a351b56531769f689c3a6ad2ab50f264fd473610..b843e48a4af95c0231463a56f0a6eb4629429348 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/line/dolmen_line.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/line/dolmen_line.mli @@ -1,6 +1,9 @@ (* This file is free software, part of dolmen. See file "LICENSE" for more information *) -val consume : Lexing.lexbuf -> unit +val consume : + newline:(Lexing.lexbuf -> unit) -> + sync:(Lexing.lexbuf -> unit) -> + Lexing.lexbuf -> unit (** Consumes all characters on the current line. *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/dolmen_smtlib2.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/dolmen_smtlib2.ml index 4927bf464217e701ebf8c3829f741290695e718f..69ca9f00e16fef005b3c50af51401290f5dcd7aa 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/dolmen_smtlib2.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/dolmen_smtlib2.ml @@ -4,10 +4,12 @@ type version = [ | `Latest | `V2_6 + | `Poly ] (* Alias the sub-libraries *) module V2_6 = Dolmen_smtlib2_v6 +module Poly = Dolmen_smtlib2_poly (* Alias for the latest module *) module Latest = V2_6 diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/dune index c3e841a416d978e7a54b16abe8df03e45d4c9a53..edd906c61e162b949d6629c45df334211ad396e8 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/dune @@ -2,6 +2,6 @@ (library (name dolmen_smtlib2) (public_name dolmen.smtlib2) - (libraries dolmen_smtlib2_v6) (modules Dolmen_smtlib2) + (libraries dolmen_smtlib2_v6 dolmen_smtlib2_poly) ) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/.gitignore b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..3e7642f21ea6d59434dd6685051bb7dcca603cc0 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/.gitignore @@ -0,0 +1,2 @@ +new.messages +updated.messages diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/ast.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/ast.ml new file mode 100644 index 0000000000000000000000000000000000000000..285cc72d4dbabfbf350bd9a4662f4fe5fa18bfe6 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/ast.ml @@ -0,0 +1,224 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information. *) + +(** AST requirement for the Smtlib format. + The smtlib format is widely used among SMT solvers, and is the language + of the smtlib benchmark library. Terms are expressed as s-expressions, + and top-level directives include everything needed to use a prover + in an interactive loop (so it includes directive for getting and setting options, + getting information about the solver's internal model, etc...) *) + +module type Id = sig + + type t + (** The type of identifiers *) + + type namespace + (** Namespace for identifiers *) + + val sort : namespace + val term : namespace + val attr : namespace + (** The namespace for sorts (also called typee), terms + and attributes, respectively. *) + + val mk : namespace -> string -> t + (** Make an identifier from a name and namespace. *) + + val indexed : namespace -> string -> string list -> t + (** Create an indexed identifier. *) + +end + +module type Term = sig + + type t + (** The type of terms. *) + + type id + (** The type of identifiers for constants. *) + + type location + (** The type of locations. *) + + val const : ?loc:location -> id -> t + (** Constants, i.e non predefined symbols. This includes both constants + defined by theories, defined locally in a problem, and also quantified variables. *) + + val str : ?loc:location -> string -> t + (** Quoted strings. According to the smtlib manual, these can be interpreted as + either string literals (when the String theory is used), or simply constants *) + + val int : ?loc:location -> string -> t + val real : ?loc:location -> string -> t + val hexa : ?loc:location -> string -> t + val binary : ?loc:location -> string -> t + (** Constants lexically recognised as numbers in different formats. According to the smtlib + manual, these should not always be interpreted as numbers since their interpretation + is actually dependent on the theory set by the problem. *) + + val colon : ?loc:location -> t -> t -> t + (** Juxtaposition of terms, used to annotate terms with their type. *) + + val apply : ?loc:location -> t -> t list -> t + (** Application. *) + + val letand : ?loc:location -> t list -> t -> t + (** Local parrallel bindings. The bindings are a list of terms built using + the [colon] function. *) + + val par : ?loc:location -> t list -> t -> t + (** universal quantification by type variables. *) + + val forall : ?loc:location -> t list -> t -> t + (** Universal quantification. *) + + val exists : ?loc:location -> t list -> t -> t + (** Existencial quantification. *) + + val match_ : ?loc:location -> t -> (t * t) list -> t + (** Pattern matching. The first term is the term to match, + and each tuple in the list is a match case, which is a pair + of a pattern and a match branch. *) + + val sexpr : ?loc:location -> t list -> t + (** S-expressions. Used in smtlib's annotations, *) + + val annot : ?loc:location -> t -> t list -> t + (** Attach a list of attributes (also called annotations) to a term. As written + in the smtlib manual, "Term attributes have no logical meaning -- + semantically, [attr t l] is equivalent to [t]" *) + +end +(** Implementation requirements for Smtlib terms. *) + +module type Statement = sig + + type t + (** The type of statements. *) + + type id + (** The type of identifiers. *) + + type term + (** The type of terms. *) + + type location + (** The type of locations. *) + + (** (Re)starting and terminating *) + + val reset : ?loc:location -> unit -> t + (** Full reset of the prover state. *) + + val set_logic : ?loc:location -> string -> t + (** Set the problem logic. *) + + val set_option : ?loc:location -> term -> t + (** Set the value of a prover option. *) + + val exit : ?loc:location -> unit -> t + (** Exit the interactive loop. *) + + + (** Modifying the assertion stack *) + + val push : ?loc:location -> int -> t + (** Push the given number of new level on the stack of assertions. *) + + val pop : ?loc:location -> int -> t + (** Pop the given number of level on the stack of assertions. *) + + val reset_assertions : ?loc:location -> unit -> t + (** Reset assumed assertions. *) + + + (** Introducing new symbols *) + + val type_decl : ?loc:location -> id -> int -> t + (** Declares a new type constructor with given arity. *) + + val type_def : ?loc:location -> id -> id list -> term -> t + (** Defines an alias for types. [type_def f args body] is such that + later occurences of [f] applied to a list of arguments [l] should + be replaced by [body] where the [args] have been substituted by + their value in [l]. *) + + val datatypes : ?loc:location -> (id * term list * (id * term list) list) list -> t + (** Inductive type definitions. *) + + val fun_decl : ?loc:location -> id -> term list -> term list -> term -> t + (** Declares a new term symbol, and its type. [fun_decl f ty_args args ret] + declares [f] as a new function symbol which takes arguments of types + described in [args], and with return type [ret]. *) + + val fun_def : ?loc:location -> id -> term list -> term list -> term -> term -> t + (** Defines a new function. [fun_def f ty_args args ret body] is such that + applications of [f] are equal to [body] (module substitution of the arguments), + which should be of type [ret]. *) + + val funs_def_rec : ?loc:location -> (id * term list * term list * term * term) list -> t + (** Declare a list of mutually recursive functions. *) + + + (** Asserting and inspecting formulas *) + + val assert_ : ?loc:location -> term -> t + (** Add a proposition to the current set of assertions. *) + + val get_assertions : ?loc:location -> unit -> t + (** Return the current set of assertions. *) + + + (** Checking for satisfiablity *) + + val check_sat : ?loc:location -> term list -> t + (** Solve the current set of assertions for satisfiability, + under the local assumptions specified. *) + + + (** Models *) + + val get_model : ?loc:location -> unit -> t + (** Return the model found. *) + + val get_value : ?loc:location -> term list -> t + (** Return the value of the given terms in the current model of the solver. *) + + val get_assignment : ?loc:location -> unit -> t + (** Return the values of asserted propositions which have been labelled using + the ":named" attribute. *) + + (** Proofs *) + + val get_proof : ?loc:location -> unit -> t + (** Return the proof of the lastest [check_sat] if it returned unsat, else + is undefined. *) + + val get_unsat_core : ?loc:location -> unit -> t + (** Return the unsat core of the latest [check_sat] if it returned unsat, + else is undefined. *) + + val get_unsat_assumptions : ?loc:location -> unit -> t + (** Return a list of local assumptions (as givne in {!check_sat}, + that is enough to deduce unsat. *) + + (** Inspecting settings *) + + val get_info : ?loc:location -> string -> t + (** Get information (see smtlib manual). *) + + val get_option : ?loc:location -> string -> t + (** Get the value of a prover option. *) + + (** Scripts commands *) + + val echo : ?loc:location -> string -> t + (** Print back as-is, including the double quotes. *) + + val set_info : ?loc:location -> term -> t + (** Set information (see smtlib manual). *) + +end +(** implementation requirement for smtlib statements. *) + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/dolmen_smtlib2_poly.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/dolmen_smtlib2_poly.ml new file mode 100644 index 0000000000000000000000000000000000000000..967995eca3031c1b077ce03462a0e5749ccf6625 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/dolmen_smtlib2_poly.ml @@ -0,0 +1,19 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" formore information *) + +module type Id = Ast.Id +module type Term = Ast.Term +module type Statement = Ast.Statement + +module Make + (L : Dolmen_intf.Location.S) + (I : Id) + (T : Term with type location := L.t and type id := I.t) + (S : Statement with type location := L.t and type id := I.t and type term := T.t) = + Dolmen_std.Transformer.Make(L)(struct + type token = Tokens.token + type statement = S.t + let env = [] + let incremental = true + let error s = Syntax_messages.message s + end)(Lexer)(Parser.Make(L)(I)(T)(S)) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/dolmen_smtlib2_poly.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/dolmen_smtlib2_poly.mli new file mode 100644 index 0000000000000000000000000000000000000000..267db8e6df71208f059f429308867bc5a280b02c --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/dolmen_smtlib2_poly.mli @@ -0,0 +1,17 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" formore information *) + +(** Smtlib language input *) + +module type Id = Ast.Id +module type Term = Ast.Term +module type Statement = Ast.Statement +(** Implementation requirement for the Smtlib format. *) + +module Make + (L : Dolmen_intf.Location.S) + (I : Id) + (T : Term with type location := L.t and type id := I.t) + (S : Statement with type location := L.t and type id := I.t and type term := T.t) : + Dolmen_intf.Language.S with type statement = S.t and type file := L.file +(** Functor to generate a parser for the Smtlib format. *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/dune new file mode 100644 index 0000000000000000000000000000000000000000..a7e9c49eccf0ee95334b382b64bb4d69db2f30aa --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/dune @@ -0,0 +1,12 @@ + +; Language library definition +(library + (name dolmen_smtlib2_poly) + (public_name dolmen.smtlib2.poly) + (instrumentation (backend bisect_ppx)) + (libraries dolmen_std dolmen_intf menhirLib) + (modules Dolmen_smtlib2_poly Tokens Lexer Parser Ast Syntax_messages) +) + +; Common include +(include ../../dune.common) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/lexer.mll b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/lexer.mll new file mode 100644 index 0000000000000000000000000000000000000000..05d832c64cd60e3df85f61840b35a1f078589373 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/lexer.mll @@ -0,0 +1,198 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information. *) + +(** {1 Smtlib Lexer} *) + +{ + exception Error + + module T = Dolmen_std.Tok + module M = Map.Make(String) + + open Tokens + + (* Token printing *) + + let keyword_descr s = + T.descr s ~kind:"keyword" + + let reserved_descr s = + T.descr s ~kind:"reserved word" + + let descr token : T.descr = + match (token : token) with + | EOF -> T.descr ~kind:"end of file token" "" + | OPEN -> T.descr ~article:"an" ~kind:"opening partenthesis" "" + | CLOSE -> T.descr ~article:"a" ~kind:"closing parenthesise" "" + | NUM s -> T.descr ~kind:"integer" s + | DEC s -> T.descr ~kind:"decimal" s + | HEX s -> T.descr ~kind:"hexadecimal" s + | BIN s -> T.descr ~kind:"binary" s + | STR s -> T.descr ~kind:"string" s + | SYMBOL s -> T.descr ~kind:"symbol" s + | KEYWORD s -> keyword_descr s + | UNDERSCORE -> reserved_descr "_" + | ATTRIBUTE -> reserved_descr "!" + | AS -> reserved_descr "as" + | LET -> reserved_descr "let" + | EXISTS -> reserved_descr "exists" + | FORALL -> reserved_descr "forall" + | MATCH -> reserved_descr "match" + | PAR -> reserved_descr "par" + | ASSERT -> reserved_descr "assert" + | CHECK_SAT -> reserved_descr "check-sat" + | CHECK_SAT_ASSUMING -> reserved_descr "check-sat-assuming" + | DECLARE_CONST -> reserved_descr "declare-const" + | DECLARE_DATATYPE -> reserved_descr "declare-datatype" + | DECLARE_DATATYPES -> reserved_descr "declare-datatypes" + | DECLARE_FUN -> reserved_descr "declare-fun" + | DECLARE_SORT -> reserved_descr "declare-sort" + | DEFINE_FUN -> reserved_descr "define-fun" + | DEFINE_FUN_REC -> reserved_descr "define-fun-rec" + | DEFINE_FUNS_REC -> reserved_descr "define-funs-rec" + | DEFINE_SORT -> reserved_descr "define-sort" + | ECHO -> reserved_descr "echo" + | EXIT -> reserved_descr "exit" + | GET_ASSERTIONS -> reserved_descr "get-assertions" + | GET_ASSIGNMENT -> reserved_descr "gert-assignment" + | GET_INFO -> reserved_descr "get-info" + | GET_MODEL -> reserved_descr "get-model" + | GET_OPTION -> reserved_descr "get-option" + | GET_PROOF -> reserved_descr "get-proof" + | GET_UNSAT_ASSUMPTIONS -> reserved_descr "get-unsat-assumptions" + | GET_UNSAT_CORE -> reserved_descr "get-unsat-core" + | GET_VALUE -> reserved_descr "get-value" + | POP -> reserved_descr "pop" + | PUSH -> reserved_descr "push" + | RESET -> reserved_descr "reset" + | RESET_ASSERTIONS -> reserved_descr "reset-assertions" + | SET_INFO -> reserved_descr "set-info" + | SET_LOGIC -> reserved_descr "set-logic" + | SET_OPTION -> reserved_descr "set-option" + + (* Token parsing *) + + let bind map (x, v) = M.add x v map + + let reserved_words = + List.fold_left bind M.empty [ + (* reserved words *) + (* These are currently unused in smtlib scripts commands + * (they are only used in logic definitions), hence they are currently + * ignored, given that only scripts are currently parsed. + "BINARY", BINARY; + "DECIMAL", DECIMAL; + "HEXADECIMAL", HEXADECIMAL; + "NUMERAL", NUMERAL; + "STRING", STRING; + *) + "_", UNDERSCORE; + "!", ATTRIBUTE; + "as", AS; + "let", LET; + "exists", EXISTS; + "forall", FORALL; + "match", MATCH; + "par", PAR; + (* command names *) + "assert", ASSERT; + "check-sat", CHECK_SAT; + "check-sat-assuming", CHECK_SAT_ASSUMING; + "declare-const", DECLARE_CONST; + "declare-datatype", DECLARE_DATATYPE; + "declare-datatypes", DECLARE_DATATYPES; + "declare-fun", DECLARE_FUN; + "declare-sort", DECLARE_SORT; + "define-fun", DEFINE_FUN; + "define-fun-rec", DEFINE_FUN_REC; + "define-funs-rec", DEFINE_FUNS_REC; + "define-sort", DEFINE_SORT; + "echo", ECHO; + "exit", EXIT; + "get-assertions", GET_ASSERTIONS; + "get-assignment", GET_ASSIGNMENT; + "get-info", GET_INFO; + "get-model", GET_MODEL; + "get-option", GET_OPTION; + "get-proof", GET_PROOF; + "get-unsat-assumptions", GET_UNSAT_ASSUMPTIONS; + "get-unsat-core", GET_UNSAT_CORE; + "get-value", GET_VALUE; + "pop", POP; + "push", PUSH; + "reset", RESET; + "reset-assertions", RESET_ASSERTIONS; + "set-info", SET_INFO; + "set-logic", SET_LOGIC; + "set-option", SET_OPTION; + ] + + let symbol newline lexbuf s = + (* register the newlines in quoted symbols to maintain correct locations.*) + for i = 0 to (String.length s - 1) do + match s.[i] with + | '\n' -> newline lexbuf + | _ -> () + done; + (* Check whetehr the symbol is a reserved word. *) + try M.find s reserved_words + with Not_found -> SYMBOL s + +} + +let white_space_char = ['\t' '\n' '\r' ' '] +let printable_char = [' ' - '~' '\128' - '\255'] +let white_space_or_printable = ['\t' '\n' '\r' ' ' - '~' '\128' - '\255'] +let digit = ['0' - '9'] +let letter = ['A' - 'Z' 'a' - 'z'] + +let numeral = '0' | (['1' - '9'] digit*) +let decimal = numeral '.' '0'* numeral + +let hex = ['0' - '9'] | ['A' - 'F'] | ['a' - 'f'] +let hexadecimal = "#x" hex+ + +let bin = ['0' '1'] +let binary = "#b" bin+ + +let ss_first_char = + letter | ['+' '-' '/' '*' '=' '%' '?' '!' '.' '$' '_' '~' '&' '^' '<' '>' '@'] +let ss_char = ss_first_char | digit +let simple_symbol = ss_first_char ss_char* + +let quoted_symbol_char = (white_space_or_printable # ['|' '\\']) +let quoted_symbol = ['|'] quoted_symbol_char* ['|'] + +let symbol = simple_symbol | quoted_symbol + +let keyword = ':' simple_symbol + +let comment = ';' (white_space_or_printable # ['\r' '\n'])* + +rule token newline = parse + (* Whitespace, newlines and comments *) + | eof { EOF } + | [' ' '\t' '\r']+ { token newline lexbuf } + | '\n' { newline lexbuf; token newline lexbuf } + | comment { token newline lexbuf } + + (* SMTLIB tokens *) + | '(' { OPEN } + | ')' { CLOSE } + | numeral as s { NUM s } + | decimal as s { DEC s } + | hexadecimal as s { HEX s } + | binary as s { BIN s } + | '"' { string newline (Buffer.create 42) lexbuf } + | keyword as s { KEYWORD s } + | simple_symbol as s + | '|' (quoted_symbol_char* as s) '|' + { symbol newline lexbuf s } + +and string newline b = parse + | '"' '"' { Buffer.add_char b '"'; string newline b lexbuf } + | '"' { STR (Buffer.contents b) } + | (printable_char | white_space_char) as c + { if c = '\n' then newline lexbuf; + Buffer.add_char b c; string newline b lexbuf } + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/parser.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/parser.mly new file mode 100644 index 0000000000000000000000000000000000000000..408bef3c52342e2bec9dfb345eba586091a77850 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/parser.mly @@ -0,0 +1,408 @@ + +(* This file is free software, part of dolmem. See file "LICENSE" for more information *) + +%parameter <L : Dolmen_intf.Location.S> +%parameter <I : Ast.Id> +%parameter <T : Ast.Term with type location := L.t and type id := I.t> +%parameter <S : Ast.Statement with type location := L.t and type id := I.t and type term := T.t> + +%start <T.t> term +%start <S.t list> file +%start <S.t option> input + +%% + +spec_constant: + | s=NUM + { let loc = L.mk_pos $startpos $endpos in T.int ~loc s } + | s=DEC + { let loc = L.mk_pos $startpos $endpos in T.real ~loc s } + | s=HEX + { let loc = L.mk_pos $startpos $endpos in T.hexa ~loc s } + | s=BIN + { let loc = L.mk_pos $startpos $endpos in T.binary ~loc s } + | s=STR + { let loc = L.mk_pos $startpos $endpos in T.str ~loc s } +; + +s_expr: + | c=spec_constant + { c } + | s=SYMBOL + { let loc = L.mk_pos $startpos $endpos in T.const ~loc I.(mk term s) } + | s=KEYWORD + { let loc = L.mk_pos $startpos $endpos in T.const ~loc I.(mk term s) } + | PAR + { let loc = L.mk_pos $startpos $endpos in T.const ~loc I.(mk term "par") } + | AS + { let loc = L.mk_pos $startpos $endpos in T.const ~loc I.(mk term "as") } + | OPEN l=s_expr* CLOSE + { let loc = L.mk_pos $startpos $endpos in T.sexpr ~loc l } +; + +index: + | s=NUM + | s=SYMBOL + { s } + /* Small language extension to support string char literals */ + | s=HEX + { s } +; + +identifier: + | s=SYMBOL + { fun ns -> I.mk ns s } + | OPEN UNDERSCORE s=SYMBOL l=index+ CLOSE + { fun ns -> I.indexed ns s l } +; + +sort: + | s=identifier + { let loc = L.mk_pos $startpos $endpos in T.const ~loc (s I.sort) } + | OPEN f=identifier args=sort+ CLOSE + { let c = + let loc = L.mk_pos $startpos(f) $endpos(f) in + T.const ~loc (f I.sort) + in + let loc = L.mk_pos $startpos $endpos in T.apply ~loc c args } +; + +attribute_value: + | v=spec_constant + { v } + | v=SYMBOL + { let loc = L.mk_pos $startpos $endpos in T.const ~loc I.(mk attr v) } + | OPEN l=s_expr* CLOSE + { let loc = L.mk_pos $startpos $endpos in T.sexpr ~loc l } +; + +attribute: + | s=KEYWORD a=attribute_value? + { + let t = + let loc = L.mk_pos $startpos(s) $endpos(s) in + T.const ~loc I.(mk attr s) + in + match a with + | None -> t + | Some t' -> + let loc = L.mk_pos $startpos $endpos in + T.apply ~loc t [t'] + } +; + +/* +The [(as id ty)] doesn't specify the type of the function [id] +but only its result type +*/ +qual_identifier: + | s=identifier + { let loc = L.mk_pos $startpos $endpos in `NoAs (T.const ~loc (s I.term)) } + | OPEN AS s=identifier ty=sort CLOSE + { let loc = L.mk_pos $startpos $endpos in + `As (T.const ~loc (s I.term),ty) } +; + +var_binding: + | OPEN s=SYMBOL t=term CLOSE + { let c = + let loc = L.mk_pos $startpos(s) $endpos(s) in + T.const ~loc I.(mk term s) + in + let loc = L.mk_pos $startpos $endpos in T.colon ~loc c t } +; + +sorted_var: + | OPEN s=SYMBOL ty=sort CLOSE + { let c = + let loc = L.mk_pos $startpos(s) $endpos(s) in + T.const ~loc I.(mk term s) + in + let loc = L.mk_pos $startpos $endpos in T.colon ~loc c ty } +; + +/* Additional rule for pattern symbols, useful for: + 1- locations in symbol lists in patterns, + 2- menhir '+' syntax doesn't support raw tokens afaik */ +pattern_symbol: + | s=SYMBOL + { let loc = L.mk_pos $startpos $endpos in T.const ~loc I.(mk term s) } +; + +pattern: + | c=pattern_symbol + { c } + | OPEN f=pattern_symbol args=pattern_symbol+ CLOSE + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc f args } +; + +match_case: + | OPEN p=pattern t=term CLOSE + { p, t } +; + +term: + | c=spec_constant + { c } + | s=qual_identifier + { let loc = L.mk_pos $startpos $endpos in + match s with + | `NoAs f -> f + | `As (f,ty) -> T.colon ~loc f ty } + | OPEN s=qual_identifier args=term+ CLOSE + { let loc = L.mk_pos $startpos $endpos in + match s with + | `NoAs f -> T.apply ~loc f args + | `As (f,ty) -> T.colon (T.apply ~loc f args) ty } + | OPEN LET OPEN l=var_binding+ CLOSE t=term CLOSE + { let loc = L.mk_pos $startpos $endpos in T.letand ~loc l t } + | OPEN FORALL OPEN l=sorted_var+ CLOSE t=term CLOSE + { let loc = L.mk_pos $startpos $endpos in T.forall ~loc l t } + | OPEN EXISTS OPEN l=sorted_var+ CLOSE t=term CLOSE + { let loc = L.mk_pos $startpos $endpos in T.exists ~loc l t } + | OPEN MATCH t=term OPEN l=match_case+ CLOSE CLOSE + { let loc = L.mk_pos $startpos $endpos in T.match_ ~loc t l } + | OPEN ATTRIBUTE f=term args=attribute+ CLOSE + { let loc = L.mk_pos $startpos $endpos in T.annot ~loc f args } +; + +info_flag: + /* The following cases are subsumed by the last case, and thus ignored, + most notably because they would force to introduce tokens for specific + keywords even though these rules are syntaxically useless. + | :all-statistics + | :assertion-stack-levels + | :authors + | :error-behavior + | :name + | :reason-unknown + | :version + */ + | s=KEYWORD + { s } +; + +/* This definition is useless (not used in the syntax), + and it would force to match on non-reserved symbols, + which is very, very, very ugly... +b_value: + | true + | false +; +*/ + +/* renamed from option to avoid a name_clash */ +command_option: + /* These cases are subsumed by the last case, and thus ignored, + most notably because they would force to introduce tokens for specific + keywords even though these rules are syntaxically useless. + Also, this allows to ignore the definition of <b_value>, which is problematic. + | :diagnostic-output-channel <string> + | :global-declarations <b_value> + | :interactive-mode <b_value> + | :print-success <b_value> + | :produce-assertions <b_value> + | :produce-assignments <b_value> + | :produce-models <b_value> + | :produce-proofs <b_value> + | :produce-unsat-assumptions <b_value> + | :produce-unsat-cores <b_value> + | :random-seed <numeral> + | :regular-output-channel <string> + | :reproducible-resource-limit <numeral> + | :verbosity <numeral> + */ + | a=attribute + { a } +; + +sort_dec: + | OPEN s=SYMBOL n=NUM CLOSE + { I.(mk sort s), int_of_string n + (* shouldn't raise because of the definition of numeral in lexer *) } +; + +selector_dec: + | OPEN s=SYMBOL ty=sort CLOSE + { let f = + let loc = L.mk_pos $startpos $endpos in + T.const ~loc (I.mk I.term s) + in + let loc = L.mk_pos $startpos $endpos in + T.colon ~loc f ty } +; + +constructor_dec: + | OPEN s=SYMBOL l=selector_dec* CLOSE + { (I.mk I.term s), l } +; + +/* Additional rule for datatype symbols, useful because + menhir '+' syntax does'nt support raw tokens afaik */ +datatype_symbol: + | s=SYMBOL + { let loc = L.mk_pos $startpos $endpos in T.const ~loc I.(mk sort s) } + +datatype_dec: + | OPEN l=constructor_dec+ CLOSE + { [], l } + | OPEN PAR OPEN vars=datatype_symbol+ CLOSE OPEN l=constructor_dec+ CLOSE CLOSE + { vars, l } +; + +function_dec(args_var): + | s=SYMBOL OPEN args=args_var* CLOSE ret=sort + { I.(mk term s), [], args, ret } +/* polymorphism as in smtlib3 draft and Conchon et al 18 */ + | s=SYMBOL OPEN PAR OPEN vars=datatype_symbol+ CLOSE OPEN args=args_var* CLOSE ret=sort CLOSE +/* polymorphism as in Bonichon et al 08 */ + | OPEN PAR OPEN vars=datatype_symbol+ CLOSE s=SYMBOL OPEN args=args_var* CLOSE ret=sort CLOSE + { I.(mk term s), vars, args, ret } + +function_def: + | s=SYMBOL OPEN args=sorted_var* CLOSE ret=sort body=term + { I.(mk term s), [], args, ret, body } +/* polymorphism as in smtlib3 draft */ + | s=SYMBOL OPEN PAR OPEN vars=datatype_symbol+ CLOSE OPEN args=sorted_var* CLOSE ret=sort body=term CLOSE +/* polymorphism as in Bonichon et al 08 */ + | OPEN PAR OPEN vars=datatype_symbol+ CLOSE s=SYMBOL OPEN args=sorted_var* CLOSE ret=sort body=term CLOSE +/* polymorphism as in Conchon et al 18 */ + | s=SYMBOL OPEN PAR OPEN vars=datatype_symbol+ CLOSE OPEN args=sorted_var* CLOSE ret=sort CLOSE body=term + { I.(mk term s), vars, args, ret, body } + + +/* Additional rule for prop_literals symbols, to have lighter + semantic actions in prop_literal reductions. */ +prop_symbol: + | s=pattern_symbol { s } +; + +/* This is a ugly hack, but necessary because the syntax defines + this reduction using a `not` token which doesn't really exists, + since it is not a reserved word, thus forcing us to pattern + match on the string... */ +not_symbol: + | s=SYMBOL + { if not (s = "not") then assert false; + let loc = L.mk_pos $startpos $endpos in T.const ~loc I.(mk term s) } +; + +prop_literal: + | s=prop_symbol + { s } + | OPEN f=not_symbol s=prop_symbol CLOSE + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc f [s] } +; + +command: + | OPEN ASSERT t=term CLOSE + { let loc = L.mk_pos $startpos $endpos in S.assert_ ~loc t } + | OPEN ASSERT OPEN PAR OPEN vars=datatype_symbol+ CLOSE t=term CLOSE CLOSE + { let loc = L.mk_pos $startpos $endpos in + S.assert_ ~loc (T.par ~loc vars t) } + | OPEN CHECK_SAT CLOSE + { let loc = L.mk_pos $startpos $endpos in S.check_sat ~loc [] } + | OPEN CHECK_SAT_ASSUMING OPEN l=prop_literal* CLOSE CLOSE + { let loc = L.mk_pos $startpos $endpos in S.check_sat ~loc l } + | OPEN DECLARE_CONST s=SYMBOL ty=sort CLOSE + { let loc = L.mk_pos $startpos $endpos in S.fun_decl ~loc I.(mk term s) [] [] ty } + | OPEN DECLARE_CONST s=SYMBOL OPEN PAR OPEN vars=datatype_symbol+ CLOSE ty=sort CLOSE CLOSE + { let loc = L.mk_pos $startpos $endpos in S.fun_decl ~loc I.(mk term s) vars [] ty } + | OPEN DECLARE_DATATYPE s=SYMBOL d=datatype_dec CLOSE + { let vars, constructors = d in + let loc = L.mk_pos $startpos $endpos in + S.datatypes ~loc [I.(mk sort s), vars, constructors] } + | OPEN DECLARE_DATATYPES OPEN l1=sort_dec+ CLOSE OPEN l2=datatype_dec+ CLOSE CLOSE + { let res = + try + List.map2 (fun (s, _) (vars, constructors) -> s, vars, constructors) l1 l2 + with Invalid_argument _ -> + assert false + in + let loc = L.mk_pos $startpos $endpos in + S.datatypes ~loc res } + | OPEN DECLARE_FUN dec=function_dec(sort) CLOSE + { let id, vars, args, ret = dec in + let loc = L.mk_pos $startpos $endpos in + S.fun_decl ~loc id vars args ret } + | OPEN DECLARE_SORT s=SYMBOL n=NUM CLOSE + { let id = I.(mk sort s) in + let loc = L.mk_pos $startpos $endpos in + S.type_decl ~loc id (int_of_string n) } + | OPEN DEFINE_FUN f=function_def CLOSE + { let id, vars, args, ret, body = f in + let loc = L.mk_pos $startpos $endpos in + S.fun_def ~loc id vars args ret body } + | OPEN DEFINE_FUN_REC f=function_def CLOSE + { let id, vars, args, ret, body = f in + let loc = L.mk_pos $startpos $endpos in + S.funs_def_rec ~loc [id, vars, args, ret, body] } + /* The syntax technically defines this reduction as having l and l' be the same length, + but that isn't easily expressible in menhir, so the check is delayed */ + | OPEN DEFINE_FUNS_REC OPEN l1=nonempty_list(OPEN dec=function_dec(sorted_var) CLOSE { dec }) CLOSE OPEN l2=term+ CLOSE CLOSE + { let res = + try List.map2 (fun (id, vars, args, ret) body -> id, vars, args, ret, body) l1 l2 + with Invalid_argument _ -> assert false + in + let loc = L.mk_pos $startpos $endpos in + S.funs_def_rec ~loc res } + | OPEN DEFINE_SORT s=SYMBOL OPEN args=SYMBOL* CLOSE ty=sort CLOSE + { let id = I.(mk sort s) in + let l = List.map I.(mk sort) args in + let loc = L.mk_pos $startpos $endpos in + S.type_def ~loc id l ty } + | OPEN ECHO s=STR CLOSE + { let loc = L.mk_pos $startpos $endpos in + S.echo ~loc s } + + | OPEN EXIT CLOSE + { let loc = L.mk_pos $startpos $endpos in S.exit ~loc () } + + | OPEN GET_ASSERTIONS CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_assertions ~loc () } + | OPEN GET_ASSIGNMENT CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_assignment ~loc () } + | OPEN GET_INFO i=info_flag CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_info ~loc i } + | OPEN GET_MODEL CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_model ~loc () } + | OPEN GET_OPTION k=KEYWORD CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_option ~loc k } + | OPEN GET_PROOF CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_proof ~loc () } + | OPEN GET_UNSAT_ASSUMPTIONS CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_unsat_assumptions ~loc () } + | OPEN GET_UNSAT_CORE CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_unsat_core ~loc () } + | OPEN GET_VALUE OPEN l=term+ CLOSE CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_value ~loc l } + + | OPEN POP n=NUM CLOSE + { let loc = L.mk_pos $startpos $endpos in S.pop ~loc (int_of_string n) } + | OPEN PUSH n=NUM CLOSE + { let loc = L.mk_pos $startpos $endpos in S.push ~loc (int_of_string n) } + | OPEN RESET CLOSE + { let loc = L.mk_pos $startpos $endpos in S.reset ~loc () } + | OPEN RESET_ASSERTIONS CLOSE + { let loc = L.mk_pos $startpos $endpos in S.reset_assertions ~loc () } + + | OPEN SET_INFO c=command_option CLOSE + { let loc = L.mk_pos $startpos $endpos in S.set_info ~loc c } + | OPEN SET_LOGIC s=SYMBOL CLOSE + { let loc = L.mk_pos $startpos $endpos in S.set_logic ~loc s } + | OPEN SET_OPTION c=command_option CLOSE + { let loc = L.mk_pos $startpos $endpos in S.set_option ~loc c } +; + +file: + | l=command* EOF + { l } +; + +input: + | EOF + { None } + | c=command + { Some c } + +%% diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/syntax.messages b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/syntax.messages new file mode 100644 index 0000000000000000000000000000000000000000..d8f7faf42388d95bc0180d6cc86eed74dabe2013 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/syntax.messages @@ -0,0 +1,4738 @@ +# +# Syntax Error Messages for smtlib v2.6 +# +# Each error message *must* follow the following format: +# +# """ +# XXX +# production parsed (on a single line) +# what is expected at that point, +# possibly on multiple lines +# """ +# +# The error numbers can be pretty much arbitrary, but for each +# error number XXX, a corresponding test case file must exists as +# tests/parsing/smtlib/v2.6/errors/XXX_some_descr_of_the_error_Y.smt2 +# (Y is there to allow multiple example of the error test case to exist, +# for instance with various different error tokens) +# +# Notes: +# - some error messages are shared among more than one error case, +# in such cases, the error number is the same, so when modifying +# an error message, be sure to modify all occurrences of the same +# error number + +term: OPEN AS OPEN SYMBOL +## +## Ends in an error in state: 60. +## +## identifier -> OPEN . UNDERSCORE SYMBOL nonempty_list(index) CLOSE [ SYMBOL OPEN ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +001 +an identifier +an underscore: identifiers starting with an opening parenthesis must be +indexed identifiers, of the form "(_ symbol index+)" + +term: OPEN AS SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 530. +## +## qual_identifier -> OPEN AS identifier sort . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN AS identifier sort +## + +002 +a qualified identifier +a closing parenthesis + +term: OPEN AS SYMBOL UNDERSCORE +## +## Ends in an error in state: 529. +## +## qual_identifier -> OPEN AS identifier . sort CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN AS identifier +## + +003 +a qualified identifier +a sort, i.e. either an identifier, or a sort constructor application; +note that keywords and reserved words (such as '_', 'as', ...) are not +identifiers, and thus are not allowed here + +term: OPEN AS UNDERSCORE +## +## Ends in an error in state: 528. +## +## qual_identifier -> OPEN AS . identifier sort CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN AS +## + +004 +a qualified identifier +an identifier; +note that keywords and reserved words (such as '_', 'as', ...) are not +identifiers, and thus are not allowed here + +term: OPEN ATTRIBUTE SYMBOL KEYWORD BIN UNDERSCORE +## +## Ends in an error in state: 128. +## +## nonempty_list(attribute) -> attribute . [ CLOSE ] +## nonempty_list(attribute) -> attribute . nonempty_list(attribute) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## attribute +## + +005 +an attribute list +either a closing parenthesis, or another attribute of the form +"keyword value" + +term: OPEN ATTRIBUTE SYMBOL UNDERSCORE +## +## Ends in an error in state: 525. +## +## term -> OPEN ATTRIBUTE term . nonempty_list(attribute) CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN ATTRIBUTE term +## + +006 +a term +an attribute of the form "keyword value" + +term: OPEN ATTRIBUTE UNDERSCORE +## +## Ends in an error in state: 524. +## +## term -> OPEN ATTRIBUTE . term nonempty_list(attribute) CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN ATTRIBUTE +## + +007 +a term with attribute +a term + +term: OPEN EXISTS OPEN OPEN SYMBOL SYMBOL CLOSE CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 522. +## +## term -> OPEN EXISTS OPEN nonempty_list(sorted_var) CLOSE term . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN EXISTS OPEN nonempty_list(sorted_var) CLOSE term +## + +008 +a term +a closing parenthesis to end the existencially quantified formula + +term: OPEN EXISTS OPEN OPEN SYMBOL SYMBOL CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 521. +## +## term -> OPEN EXISTS OPEN nonempty_list(sorted_var) CLOSE . term CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN EXISTS OPEN nonempty_list(sorted_var) CLOSE +## + +009 +a term +a term (body for the existencial quantification) + +term: OPEN EXISTS OPEN UNDERSCORE +## +## Ends in an error in state: 519. +## +## term -> OPEN EXISTS OPEN . nonempty_list(sorted_var) CLOSE term CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN EXISTS OPEN +## + +010 +a term +a sorted variable of the form "(var sort)" + +term: OPEN EXISTS UNDERSCORE +## +## Ends in an error in state: 518. +## +## term -> OPEN EXISTS . OPEN nonempty_list(sorted_var) CLOSE term CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN EXISTS +## + +011 +a term +a list of sorted variables, starting with an opening parenthesis + +term: OPEN FORALL OPEN OPEN SYMBOL SYMBOL CLOSE CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 516. +## +## term -> OPEN FORALL OPEN nonempty_list(sorted_var) CLOSE term . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN FORALL OPEN nonempty_list(sorted_var) CLOSE term +## + +012 +a term +a closing parenthesis to end the universally quantified formula + +term: OPEN FORALL OPEN OPEN SYMBOL SYMBOL CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 515. +## +## term -> OPEN FORALL OPEN nonempty_list(sorted_var) CLOSE . term CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN FORALL OPEN nonempty_list(sorted_var) CLOSE +## + +013 +a term +a term (body for the universal quantification) + +term: OPEN FORALL OPEN OPEN SYMBOL SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 112. +## +## nonempty_list(sorted_var) -> sorted_var . [ CLOSE ] +## nonempty_list(sorted_var) -> sorted_var . nonempty_list(sorted_var) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## sorted_var +## + +014 +a list of sorted variables +either a closing parentheis, or a sorted var of the form "(var sort)" + +term: OPEN FORALL OPEN OPEN SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 110. +## +## sorted_var -> OPEN SYMBOL sort . CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN SYMBOL sort +## + +015 +a sorted variable +a closing parenthesis + +term: OPEN FORALL OPEN OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 109. +## +## sorted_var -> OPEN SYMBOL . sort CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN SYMBOL +## + +016 +a sorted variable +a sort, i.e. either an identifier, or a sort constructor application; +note that keywords and reserved words (such as '_', 'as', ...) are not +identifiers, and thus are not allowed here + +term: OPEN FORALL OPEN OPEN UNDERSCORE +## +## Ends in an error in state: 108. +## +## sorted_var -> OPEN . SYMBOL sort CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +017 +a sorted variable +a symbol, i.e. a variable name + +term: OPEN FORALL OPEN UNDERSCORE +## +## Ends in an error in state: 513. +## +## term -> OPEN FORALL OPEN . nonempty_list(sorted_var) CLOSE term CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN FORALL OPEN +## + +018 +a term +a sorted variable of the form "(var sort)" + +term: OPEN FORALL UNDERSCORE +## +## Ends in an error in state: 512. +## +## term -> OPEN FORALL . OPEN nonempty_list(sorted_var) CLOSE term CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN FORALL +## + +019 +a term +a list of sorted variables, starting with an opening parenthesis + +term: OPEN LET OPEN OPEN SYMBOL BIN CLOSE CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 510. +## +## term -> OPEN LET OPEN nonempty_list(var_binding) CLOSE term . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN LET OPEN nonempty_list(var_binding) CLOSE term +## + +020 +a term +a closing parenthesis to end the let binding + +term: OPEN LET OPEN OPEN SYMBOL BIN CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 509. +## +## term -> OPEN LET OPEN nonempty_list(var_binding) CLOSE . term CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN LET OPEN nonempty_list(var_binding) CLOSE +## + +021 +a term +a term (body for the let binding) + +term: OPEN LET OPEN OPEN SYMBOL BIN CLOSE UNDERSCORE +## +## Ends in an error in state: 100. +## +## nonempty_list(var_binding) -> var_binding . [ CLOSE ] +## nonempty_list(var_binding) -> var_binding . nonempty_list(var_binding) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## var_binding +## + +022 +a list of variable binding +a closing parenthesis or a variable binding of the form "(var term)" + +term: OPEN LET OPEN OPEN SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 98. +## +## var_binding -> OPEN SYMBOL term . CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN SYMBOL term +## + +023 +a variable binding +a closing parenthesis + +term: OPEN LET OPEN OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 97. +## +## var_binding -> OPEN SYMBOL . term CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN SYMBOL +## + +024 +a variable binding +a term + +term: OPEN LET OPEN OPEN UNDERSCORE +## +## Ends in an error in state: 96. +## +## var_binding -> OPEN . SYMBOL term CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +025 +a variable binding +a symbol (i.e. variable name) + +term: OPEN LET OPEN UNDERSCORE +## +## Ends in an error in state: 507. +## +## term -> OPEN LET OPEN . nonempty_list(var_binding) CLOSE term CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN LET OPEN +## + +026 +a term +a variable binding of the form "(var term)" + +term: OPEN LET UNDERSCORE +## +## Ends in an error in state: 506. +## +## term -> OPEN LET . OPEN nonempty_list(var_binding) CLOSE term CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN LET +## + +027 +a term +a variable binding list, starting with an opening parenthesis + +term: OPEN MATCH SYMBOL OPEN OPEN OPEN SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 78. +## +## nonempty_list(pattern_symbol) -> pattern_symbol . [ CLOSE ] +## nonempty_list(pattern_symbol) -> pattern_symbol . nonempty_list(pattern_symbol) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## pattern_symbol +## + +028 +arguments of a constructor in a pattern +a closing parenthesis, or a symbol to bind the argument; +note that keywords and reserved words (such as '_', 'as', ...) are not +symbols, and thus are not allowed here + +term: OPEN MATCH SYMBOL OPEN OPEN OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 77. +## +## pattern -> OPEN pattern_symbol . nonempty_list(pattern_symbol) CLOSE [ SYMBOL STR OPEN NUM HEX DEC BIN ] +## +## The known suffix of the stack is as follows: +## OPEN pattern_symbol +## + +029 +arguments of a constructor in a pattern +a symbol to bind the first constructor argument; +note that keywords and reserved words (such as '_', 'as', ...) are not +symbols, and thus are not allowed here + +term: OPEN MATCH SYMBOL OPEN OPEN OPEN UNDERSCORE +## +## Ends in an error in state: 76. +## +## pattern -> OPEN . pattern_symbol nonempty_list(pattern_symbol) CLOSE [ SYMBOL STR OPEN NUM HEX DEC BIN ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +030 +a pattern +a symbol (either a variable or a datatype constructor); +note that keywords and reserved words (such as '_', 'as', ...) are not +symbols, and thus are not allowed here + +term: OPEN MATCH SYMBOL OPEN OPEN SYMBOL SYMBOL CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 504. +## +## term -> OPEN MATCH term OPEN nonempty_list(match_case) CLOSE . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN MATCH term OPEN nonempty_list(match_case) CLOSE +## + +031 +a match +a closing parenthesis to close the match + +term: OPEN MATCH SYMBOL OPEN OPEN SYMBOL SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 92. +## +## nonempty_list(match_case) -> match_case . [ CLOSE ] +## nonempty_list(match_case) -> match_case . nonempty_list(match_case) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## match_case +## + +032 +a list of match cases +a closing parenthesis or a match case of the form "(pattern body)" + +term: OPEN MATCH SYMBOL OPEN OPEN SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 84. +## +## match_case -> OPEN pattern term . CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN pattern term +## + +033 +a match case +a closing parenthesis to close the match case + +term: OPEN MATCH SYMBOL OPEN OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 83. +## +## match_case -> OPEN pattern . term CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN pattern +## + +034 +a match case +a term for the case body + +term: OPEN MATCH SYMBOL OPEN OPEN UNDERSCORE +## +## Ends in an error in state: 74. +## +## match_case -> OPEN . pattern term CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +035 +a match case +a pattern, i.e. either a symbol or a datatype pattern of +the form "(symbol symbol+)"; +note that keywords and reserved words (such as '_', 'as', ...) are not +symbols, and thus are not allowed here + +term: OPEN MATCH SYMBOL OPEN UNDERSCORE +## +## Ends in an error in state: 502. +## +## term -> OPEN MATCH term OPEN . nonempty_list(match_case) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN MATCH term OPEN +## + +036 +a list of match cases +a match case of the form "(pattern term)" + +term: OPEN MATCH SYMBOL UNDERSCORE +## +## Ends in an error in state: 501. +## +## term -> OPEN MATCH term . OPEN nonempty_list(match_case) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN MATCH term +## + +037 +a match +a match case list, starting with an opening parenthesis + +term: OPEN MATCH UNDERSCORE +## +## Ends in an error in state: 500. +## +## term -> OPEN MATCH . term OPEN nonempty_list(match_case) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN MATCH +## + +038 +a match +a term to match (i.e. the scrutinee of the match) + +term: OPEN OPEN AS SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 69. +## +## qual_identifier -> OPEN AS identifier sort . CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN AS identifier sort +## + +002 +a qualified identifier +a closing parenthesis + +term: OPEN OPEN AS SYMBOL UNDERSCORE +## +## Ends in an error in state: 61. +## +## qual_identifier -> OPEN AS identifier . sort CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN AS identifier +## + +003 +a qualified identifier +a sort, i.e. either an identifier, or a sort constructor application; +note that keywords (such as '_', 'as', ...) are none of these, and thus +are not allowed here. + +term: OPEN OPEN AS UNDERSCORE +## +## Ends in an error in state: 59. +## +## qual_identifier -> OPEN AS . identifier sort CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN AS +## + +004 +a qualified identifier +an identifier. +Note that keywords (such as '_', 'as', ...) are not identifiers, +and thus are not allowed here. + +term: OPEN OPEN SYMBOL +## +## Ends in an error in state: 58. +## +## identifier -> OPEN . UNDERSCORE SYMBOL nonempty_list(index) CLOSE [ SYMBOL STR OPEN NUM HEX DEC BIN ] +## qual_identifier -> OPEN . AS identifier sort CLOSE [ SYMBOL STR OPEN NUM HEX DEC BIN ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +039 +an identifier in parentheses +either an indexed identifier (starting with an underscore) +or an 'as' type ascription; +note that this is because of the preceding opening parenthesis + +term: OPEN OPEN UNDERSCORE SYMBOL UNDERSCORE +## +## Ends in an error in state: 50. +## +## identifier -> OPEN UNDERSCORE SYMBOL . nonempty_list(index) CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN UNDERSCORE SYMBOL +## + +040 +an indexed identifier +an index, i.e. either a numeral, a symbol, or a hexadecimal number; +note that keywords and reserved words (such as '_', 'as', ...) are not +symbols, and thus are not allowed here + +term: OPEN OPEN UNDERSCORE UNDERSCORE +## +## Ends in an error in state: 49. +## +## identifier -> OPEN UNDERSCORE . SYMBOL nonempty_list(index) CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN UNDERSCORE +## + +041 +an indexed identifier +a symbol; +note that keywords and reserved words (such as '_', 'as', ...) are not +symbols, and thus are not allowed here + +term: OPEN STR +## +## Ends in an error in state: 495. +## +## identifier -> OPEN . UNDERSCORE SYMBOL nonempty_list(index) CLOSE [ # ] +## qual_identifier -> OPEN . AS identifier sort CLOSE [ # ] +## term -> OPEN . qual_identifier nonempty_list(term) CLOSE [ # ] +## term -> OPEN . LET OPEN nonempty_list(var_binding) CLOSE term CLOSE [ # ] +## term -> OPEN . FORALL OPEN nonempty_list(sorted_var) CLOSE term CLOSE [ # ] +## term -> OPEN . EXISTS OPEN nonempty_list(sorted_var) CLOSE term CLOSE [ # ] +## term -> OPEN . MATCH term OPEN nonempty_list(match_case) CLOSE CLOSE [ # ] +## term -> OPEN . ATTRIBUTE term nonempty_list(attribute) CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +042 +a term +a term construction (identifier, let binding, quantification, ...); +note that this expectation if caused by the preceding opening parenthesis + +term: OPEN SYMBOL OPEN STR +## +## Ends in an error in state: 48. +## +## identifier -> OPEN . UNDERSCORE SYMBOL nonempty_list(index) CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## qual_identifier -> OPEN . AS identifier sort CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## term -> OPEN . qual_identifier nonempty_list(term) CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## term -> OPEN . LET OPEN nonempty_list(var_binding) CLOSE term CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## term -> OPEN . FORALL OPEN nonempty_list(sorted_var) CLOSE term CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## term -> OPEN . EXISTS OPEN nonempty_list(sorted_var) CLOSE term CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## term -> OPEN . MATCH term OPEN nonempty_list(match_case) CLOSE CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## term -> OPEN . ATTRIBUTE term nonempty_list(attribute) CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +042 +a term +a term construction (identifier, let binding, quantification, ...); +note that this expectation if caused by the preceding opening parenthesis + +term: OPEN SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 131. +## +## nonempty_list(term) -> term . [ CLOSE ] +## nonempty_list(term) -> term . nonempty_list(term) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## term +## + +043 +a list of terms +either a closing parenthesis, or another term; +note that keywords and reserved words (such as '_', 'as', ...) are +not terms, and thus are not allowed here + +term: OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 532. +## +## term -> OPEN qual_identifier . nonempty_list(term) CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN qual_identifier +## + +044 +a function application +a term as argument; +note that keywords and reserved words (such as '_', 'as', ...) are +not terms, and thus are not allowed here + +term: OPEN UNDERSCORE SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 56. +## +## nonempty_list(index) -> index . [ CLOSE ] +## nonempty_list(index) -> index . nonempty_list(index) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## index +## + +045 +an index list +either a closing parenthesis, or an index (i.e. a numeral, a symbol, +or a hexadecimal); +note that keywords and reserved words (such as '_', 'as', ...) are +not symbols, and thus are not allowed here + +term: OPEN UNDERSCORE SYMBOL UNDERSCORE +## +## Ends in an error in state: 497. +## +## identifier -> OPEN UNDERSCORE SYMBOL . nonempty_list(index) CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN UNDERSCORE SYMBOL +## + +040 +an indexed identifier +an index, i.e. either a numeral, a symbol, or a hexadecimal number; +note that keywords and reserved words (such as '_', 'as', ...) are not +symbols, and thus are not allowed here + +term: OPEN UNDERSCORE UNDERSCORE +## +## Ends in an error in state: 496. +## +## identifier -> OPEN UNDERSCORE . SYMBOL nonempty_list(index) CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN UNDERSCORE +## + +041 +an indexed identifier +a symbol; +note that keywords and reserved words (such as '_', 'as', ...) are not +symbols, and thus are not allowed here + +term: UNDERSCORE +## +## Ends in an error in state: 492. +## +## term' -> . term [ # ] +## +## The known suffix of the stack is as follows: +## +## + +046 +a term +a term construction (symbol, function application, match, let binding, ...); +note that keywords and reserved words (such as '_', 'as', ...) are +not symbols, and thus are not allowed here + +input: OPEN ASSERT OPEN PAR OPEN SYMBOL CLOSE SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 485. +## +## command -> OPEN ASSERT OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE term CLOSE . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE term CLOSE +## + +173 +a command +a closing parenthesis + + +input: OPEN ASSERT OPEN PAR OPEN SYMBOL CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 484. +## +## command -> OPEN ASSERT OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE term . CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE term +## + +174 +a command +two closing parenthesis + +input: OPEN ASSERT OPEN PAR OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 483. +## +## command -> OPEN ASSERT OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE . term CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE +## + +175 +a term +a term construction (symbol, function application, match, let binding, ...); +note that keywords and reserved words (such as '_', 'as', ...) are +not symbols, and thus are not allowed here + +input: OPEN ASSERT OPEN PAR OPEN UNDERSCORE +## +## Ends in an error in state: 481. +## +## command -> OPEN ASSERT OPEN PAR OPEN . nonempty_list(datatype_symbol) CLOSE term CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT OPEN PAR OPEN +## + +176 +a non-empty list of type variables +a type variable + +input: OPEN ASSERT OPEN PAR UNDERSCORE +## +## Ends in an error in state: 480. +## +## command -> OPEN ASSERT OPEN PAR . OPEN nonempty_list(datatype_symbol) CLOSE term CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT OPEN PAR +## + +177 +a non-empty list of type variables +an opening parenthesis + +input: OPEN ASSERT SYMBOL UNDERSCORE +## +## Ends in an error in state: 487. +## +## command -> OPEN ASSERT term . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT term +## + +049 +an assertion +a closing parenthesis + +input: OPEN ASSERT UNDERSCORE +## +## Ends in an error in state: 478. +## +## command -> OPEN ASSERT . term CLOSE [ # ] +## command -> OPEN ASSERT . OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE term CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT +## + +048 +a term +a term construction (symbol, function application, match, let binding, ...); +note that keywords and reserved words (such as '_', 'as', ...) are +not symbols, and thus are not allowed here + +input: OPEN CHECK_SAT UNDERSCORE +## +## Ends in an error in state: 476. +## +## command -> OPEN CHECK_SAT . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN CHECK_SAT +## + +050 +a check-sat command +a closing parenthesis + +input: OPEN CHECK_SAT_ASSUMING OPEN CLOSE UNDERSCORE +## +## Ends in an error in state: 474. +## +## command -> OPEN CHECK_SAT_ASSUMING OPEN list(prop_literal) CLOSE . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN CHECK_SAT_ASSUMING OPEN list(prop_literal) CLOSE +## + +051 +a check-sat-assuming command +a closing parenthesis + +input: OPEN CHECK_SAT_ASSUMING OPEN UNDERSCORE +## +## Ends in an error in state: 472. +## +## command -> OPEN CHECK_SAT_ASSUMING OPEN . list(prop_literal) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN CHECK_SAT_ASSUMING OPEN +## + +052 +a list of propositional literals +a propositional literal, i.e. either a symbol or the negation of a symbol + +input: OPEN CHECK_SAT_ASSUMING UNDERSCORE +## +## Ends in an error in state: 471. +## +## command -> OPEN CHECK_SAT_ASSUMING . OPEN list(prop_literal) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN CHECK_SAT_ASSUMING +## + +053 +a check-sat-assuming command +a list of propositional literals, starting with an opening parenthesis + +input: OPEN DECLARE_CONST SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 469. +## +## command -> OPEN DECLARE_CONST SYMBOL sort . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL sort +## + +054 +a constant declaration +a closing parenthesis + +input: OPEN DECLARE_CONST SYMBOL UNDERSCORE +## +## Ends in an error in state: 460. +## +## command -> OPEN DECLARE_CONST SYMBOL . sort CLOSE [ # ] +## command -> OPEN DECLARE_CONST SYMBOL . OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE sort CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL +## + +055 +a constant declaration +a sort + +input: OPEN DECLARE_CONST UNDERSCORE +## +## Ends in an error in state: 459. +## +## command -> OPEN DECLARE_CONST . SYMBOL sort CLOSE [ # ] +## command -> OPEN DECLARE_CONST . SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE sort CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST +## + +056 +a constant declaration +a symbol; +note that keywords and reserved words (such as '_', 'as', ...) are not +symbols, and thus are not allowed here + +input: OPEN DECLARE_DATATYPE SYMBOL OPEN OPEN SYMBOL CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 457. +## +## command -> OPEN DECLARE_DATATYPE SYMBOL datatype_dec . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPE SYMBOL datatype_dec +## + +057 +a datatype declaration +a closing parenthesis + +input: OPEN DECLARE_DATATYPE SYMBOL UNDERSCORE +## +## Ends in an error in state: 456. +## +## command -> OPEN DECLARE_DATATYPE SYMBOL . datatype_dec CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPE SYMBOL +## + +058 +a datatype declaration +an opening parenthesis to start the datatype declaration + +input: OPEN DECLARE_DATATYPE UNDERSCORE +## +## Ends in an error in state: 455. +## +## command -> OPEN DECLARE_DATATYPE . SYMBOL datatype_dec CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPE +## + +059 +a datatype declaration +a symbol + +input: OPEN DECLARE_DATATYPES OPEN OPEN SYMBOL NUM CLOSE CLOSE OPEN OPEN OPEN SYMBOL CLOSE CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 453. +## +## command -> OPEN DECLARE_DATATYPES OPEN nonempty_list(sort_dec) CLOSE OPEN nonempty_list(datatype_dec) CLOSE . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPES OPEN nonempty_list(sort_dec) CLOSE OPEN nonempty_list(datatype_dec) CLOSE +## + +060 +a datatypes declaration +a closing parenthesis + +input: OPEN DECLARE_DATATYPES OPEN OPEN SYMBOL NUM CLOSE CLOSE OPEN UNDERSCORE +## +## Ends in an error in state: 451. +## +## command -> OPEN DECLARE_DATATYPES OPEN nonempty_list(sort_dec) CLOSE OPEN . nonempty_list(datatype_dec) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPES OPEN nonempty_list(sort_dec) CLOSE OPEN +## + +061 +a datatypes definition +an opening parenthesis to start a list of constructors for the first defined datatype + +input: OPEN DECLARE_DATATYPES OPEN OPEN SYMBOL NUM CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 450. +## +## command -> OPEN DECLARE_DATATYPES OPEN nonempty_list(sort_dec) CLOSE . OPEN nonempty_list(datatype_dec) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPES OPEN nonempty_list(sort_dec) CLOSE +## + +062 +a datatypes declaration +an opening parenthesis to start a list of datatype definitions, +one for each of the sorts being declared + +input: OPEN DECLARE_DATATYPES OPEN UNDERSCORE +## +## Ends in an error in state: 448. +## +## command -> OPEN DECLARE_DATATYPES OPEN . nonempty_list(sort_dec) CLOSE OPEN nonempty_list(datatype_dec) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPES OPEN +## + +063 +a datatypes declaration +a parametric sort declaration of the form "(symbol num)" + +input: OPEN DECLARE_DATATYPES UNDERSCORE +## +## Ends in an error in state: 447. +## +## command -> OPEN DECLARE_DATATYPES . OPEN nonempty_list(sort_dec) CLOSE OPEN nonempty_list(datatype_dec) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPES +## + +064 +a datatypes declaration +a list of sort declaration, starting with an opening parenthesis + +input: OPEN DECLARE_FUN SYMBOL OPEN CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 445. +## +## command -> OPEN DECLARE_FUN function_dec(sort) . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_FUN function_dec(sort) +## + +065 +a function declaration +a closing parenthesis + +input: OPEN DECLARE_FUN SYMBOL OPEN CLOSE UNDERSCORE +## +## Ends in an error in state: 268. +## +## function_dec(sort) -> SYMBOL OPEN list(sort) CLOSE . sort [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN list(sort) CLOSE +## + +066 +a function declaration +a sort for the return type of the function + +input: OPEN DECLARE_FUN SYMBOL OPEN PAR OPEN SYMBOL CLOSE OPEN CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 265. +## +## function_dec(sort) -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sort) CLOSE sort . CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sort) CLOSE sort +## + +179 +a function declaration +two closing parenthesis + +input: OPEN DECLARE_FUN SYMBOL OPEN PAR OPEN SYMBOL CLOSE OPEN CLOSE UNDERSCORE +## +## Ends in an error in state: 264. +## +## function_dec(sort) -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sort) CLOSE . sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sort) CLOSE +## + +180 +a function declaration +the return sort + + +input: OPEN DECLARE_FUN SYMBOL OPEN PAR OPEN SYMBOL CLOSE OPEN UNDERSCORE +## +## Ends in an error in state: 260. +## +## function_dec(sort) -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN . list(sort) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN +## + +181 +a function declaration +a sort or a closing parenthesis + +input: OPEN DECLARE_FUN SYMBOL OPEN PAR OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 259. +## +## function_dec(sort) -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE . OPEN list(sort) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE +## + +182 +a function declaration +an opening parenthesis for starting a possibly empty list of sorts + +input: OPEN DECLARE_FUN SYMBOL OPEN PAR OPEN UNDERSCORE +## +## Ends in an error in state: 257. +## +## function_dec(sort) -> SYMBOL OPEN PAR OPEN . nonempty_list(datatype_symbol) CLOSE OPEN list(sort) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN +## + +183 +the type variable of a function declaration +a type variable + +input: OPEN DECLARE_FUN SYMBOL OPEN PAR UNDERSCORE +## +## Ends in an error in state: 256. +## +## function_dec(sort) -> SYMBOL OPEN PAR . OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sort) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR +## + +184 +the list of type variable of a function declaration +an opening parenthesis + +input: OPEN DECLARE_FUN SYMBOL OPEN UNDERSCORE +## +## Ends in an error in state: 255. +## +## function_dec(sort) -> SYMBOL OPEN . list(sort) CLOSE sort [ CLOSE ] +## function_dec(sort) -> SYMBOL OPEN . PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sort) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN +## + +067 +a function declaration +a closing parenthesis, or a list of sorts for the arguments of the function + +input: OPEN DECLARE_FUN SYMBOL UNDERSCORE +## +## Ends in an error in state: 254. +## +## function_dec(sort) -> SYMBOL . OPEN list(sort) CLOSE sort [ CLOSE ] +## function_dec(sort) -> SYMBOL . OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sort) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL +## + +068 +a function declaration +an opening parenthesis to start the list of sorts for the function's arguments + +input: OPEN DECLARE_FUN UNDERSCORE +## +## Ends in an error in state: 444. +## +## command -> OPEN DECLARE_FUN . function_dec(sort) CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_FUN +## + +069 +a function declaration +a symbol for the function's name + +input: OPEN DECLARE_SORT SYMBOL NUM UNDERSCORE +## +## Ends in an error in state: 442. +## +## command -> OPEN DECLARE_SORT SYMBOL NUM . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_SORT SYMBOL NUM +## + +070 +a sort declaration +a closing parenthesis + +input: OPEN DECLARE_SORT SYMBOL UNDERSCORE +## +## Ends in an error in state: 441. +## +## command -> OPEN DECLARE_SORT SYMBOL . NUM CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_SORT SYMBOL +## + +071 +a sort declaration +a numeral for the arity of the sort being declared + +input: OPEN DECLARE_SORT UNDERSCORE +## +## Ends in an error in state: 440. +## +## command -> OPEN DECLARE_SORT . SYMBOL NUM CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_SORT +## + +072 +a sort declaration +a symbol for the sort name + +input: OPEN DEFINE_FUN SYMBOL OPEN CLOSE SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 438. +## +## command -> OPEN DEFINE_FUN function_def . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUN function_def +## + +073 +a function definition +a closing parenthesis + +input: OPEN DEFINE_FUN UNDERSCORE +## +## Ends in an error in state: 437. +## +## command -> OPEN DEFINE_FUN . function_def CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUN +## + +074 +a function definition +a symbol for the function's name + +input: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE SYMBOL CLOSE CLOSE OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 435. +## +## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(__anonymous_0) CLOSE OPEN nonempty_list(term) CLOSE . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUNS_REC OPEN nonempty_list(__anonymous_0) CLOSE OPEN nonempty_list(term) CLOSE +## + +075 +a recursive functions definition +a closing parenthesis + +input: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE SYMBOL CLOSE CLOSE OPEN UNDERSCORE +## +## Ends in an error in state: 433. +## +## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(__anonymous_0) CLOSE OPEN . nonempty_list(term) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUNS_REC OPEN nonempty_list(__anonymous_0) CLOSE OPEN +## + +076 +a recursive functions definition +a term for the first function's body + +input: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE SYMBOL CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 432. +## +## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(__anonymous_0) CLOSE . OPEN nonempty_list(term) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUNS_REC OPEN nonempty_list(__anonymous_0) CLOSE +## + +077 +a recursive functions definition +an opening parenthesis to start a list the function's bodies + +input: OPEN DEFINE_FUNS_REC OPEN UNDERSCORE +## +## Ends in an error in state: 430. +## +## command -> OPEN DEFINE_FUNS_REC OPEN . nonempty_list(__anonymous_0) CLOSE OPEN nonempty_list(term) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUNS_REC OPEN +## + +078 +a recursive functions definition +a function declaration of the form "(name (sort*) sort)", +or "(name (par (var+) (sort*) sort))" + +input: OPEN DEFINE_FUNS_REC UNDERSCORE +## +## Ends in an error in state: 429. +## +## command -> OPEN DEFINE_FUNS_REC . OPEN nonempty_list(__anonymous_0) CLOSE OPEN nonempty_list(term) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUNS_REC +## + +079 +a recursive functions declaration +an opening parenthesis to start a list of function declaration + +input: OPEN DEFINE_FUN_REC SYMBOL OPEN CLOSE SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 427. +## +## command -> OPEN DEFINE_FUN_REC function_def . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUN_REC function_def +## + +080 +a recursive function definition +a closing parenthesis + +input: OPEN DEFINE_FUN_REC UNDERSCORE +## +## Ends in an error in state: 426. +## +## command -> OPEN DEFINE_FUN_REC . function_def CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUN_REC +## + +081 +a recursive function definition +a symbol for the function's name + +input: OPEN DEFINE_SORT SYMBOL OPEN CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 424. +## +## command -> OPEN DEFINE_SORT SYMBOL OPEN list(SYMBOL) CLOSE sort . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_SORT SYMBOL OPEN list(SYMBOL) CLOSE sort +## + +082 +a sort definition +a closing parenthesis + +input: OPEN DEFINE_SORT SYMBOL OPEN CLOSE UNDERSCORE +## +## Ends in an error in state: 423. +## +## command -> OPEN DEFINE_SORT SYMBOL OPEN list(SYMBOL) CLOSE . sort CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_SORT SYMBOL OPEN list(SYMBOL) CLOSE +## + +083 +a sort definition +a sort for the definition body + +input: OPEN DEFINE_SORT SYMBOL OPEN UNDERSCORE +## +## Ends in an error in state: 421. +## +## command -> OPEN DEFINE_SORT SYMBOL OPEN . list(SYMBOL) CLOSE sort CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_SORT SYMBOL OPEN +## + +084 +a sort definition +a closing parenthesis, or a list of symbols for the definition arguments + +input: OPEN DEFINE_SORT SYMBOL UNDERSCORE +## +## Ends in an error in state: 420. +## +## command -> OPEN DEFINE_SORT SYMBOL . OPEN list(SYMBOL) CLOSE sort CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_SORT SYMBOL +## + +085 +a sort definition +an opening parenthesis to start a list of arguments + +input: OPEN DEFINE_SORT UNDERSCORE +## +## Ends in an error in state: 419. +## +## command -> OPEN DEFINE_SORT . SYMBOL OPEN list(SYMBOL) CLOSE sort CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_SORT +## + +086 +a sort definition +a symbol for the defined sort's name + +input: OPEN ECHO STR UNDERSCORE +## +## Ends in an error in state: 417. +## +## command -> OPEN ECHO STR . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN ECHO STR +## + +087 +an echo command +a closing parenthesis + +input: OPEN ECHO UNDERSCORE +## +## Ends in an error in state: 416. +## +## command -> OPEN ECHO . STR CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN ECHO +## + +088 +an echo command +a string literal + +input: OPEN EXIT UNDERSCORE +## +## Ends in an error in state: 414. +## +## command -> OPEN EXIT . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN EXIT +## + +089 +an exit command +a closing parenthesis + +input: OPEN GET_ASSERTIONS UNDERSCORE +## +## Ends in an error in state: 412. +## +## command -> OPEN GET_ASSERTIONS . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN GET_ASSERTIONS +## + +090 +a get-assertions command +a closing parenthesis + +input: OPEN GET_ASSIGNMENT UNDERSCORE +## +## Ends in an error in state: 410. +## +## command -> OPEN GET_ASSIGNMENT . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN GET_ASSIGNMENT +## + +091 +a get-assignment command +a closing parenthesis + +input: OPEN GET_INFO KEYWORD UNDERSCORE +## +## Ends in an error in state: 408. +## +## command -> OPEN GET_INFO info_flag . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN GET_INFO info_flag +## + +092 +a get-info command +a closing parenthesis + +input: OPEN GET_INFO UNDERSCORE +## +## Ends in an error in state: 407. +## +## command -> OPEN GET_INFO . info_flag CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN GET_INFO +## + +093 +a get-info command +a keyword of the form ":symbol" + +input: OPEN GET_MODEL UNDERSCORE +## +## Ends in an error in state: 405. +## +## command -> OPEN GET_MODEL . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN GET_MODEL +## + +094 +a get-model command +a closing parenthesis + +input: OPEN GET_OPTION KEYWORD UNDERSCORE +## +## Ends in an error in state: 403. +## +## command -> OPEN GET_OPTION KEYWORD . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN GET_OPTION KEYWORD +## + +095 +a get-option command +a closing parenthesis + +input: OPEN GET_OPTION UNDERSCORE +## +## Ends in an error in state: 402. +## +## command -> OPEN GET_OPTION . KEYWORD CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN GET_OPTION +## + +096 +a get-option command +a keyword of the form ":symbol" + +input: OPEN GET_PROOF UNDERSCORE +## +## Ends in an error in state: 400. +## +## command -> OPEN GET_PROOF . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN GET_PROOF +## + +097 +a get-proof command +a closing parenthesis + +input: OPEN GET_UNSAT_ASSUMPTIONS UNDERSCORE +## +## Ends in an error in state: 398. +## +## command -> OPEN GET_UNSAT_ASSUMPTIONS . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN GET_UNSAT_ASSUMPTIONS +## + +098 +a get-unsat-assumptions command +a closing parenthesis + +input: OPEN GET_UNSAT_CORE UNDERSCORE +## +## Ends in an error in state: 396. +## +## command -> OPEN GET_UNSAT_CORE . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN GET_UNSAT_CORE +## + +099 +a get-unsat-core command +a closing parenthesis + +input: OPEN GET_VALUE OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 394. +## +## command -> OPEN GET_VALUE OPEN nonempty_list(term) CLOSE . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN GET_VALUE OPEN nonempty_list(term) CLOSE +## + +100 +a get-value command +a closing parenthesis + +input: OPEN GET_VALUE OPEN UNDERSCORE +## +## Ends in an error in state: 392. +## +## command -> OPEN GET_VALUE OPEN . nonempty_list(term) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN GET_VALUE OPEN +## + +101 +a get-value command +a term + +input: OPEN GET_VALUE UNDERSCORE +## +## Ends in an error in state: 391. +## +## command -> OPEN GET_VALUE . OPEN nonempty_list(term) CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN GET_VALUE +## + +102 +a get-value command +an opening parenthesis to start a list of terms + +input: OPEN POP NUM UNDERSCORE +## +## Ends in an error in state: 389. +## +## command -> OPEN POP NUM . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN POP NUM +## + +103 +a pop command +a closing parenthesis + +input: OPEN POP UNDERSCORE +## +## Ends in an error in state: 388. +## +## command -> OPEN POP . NUM CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN POP +## + +104 +a pop command +a numeral + +input: OPEN PUSH NUM UNDERSCORE +## +## Ends in an error in state: 386. +## +## command -> OPEN PUSH NUM . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN PUSH NUM +## + +105 +a push command +a closing parenthesis + +input: OPEN PUSH UNDERSCORE +## +## Ends in an error in state: 385. +## +## command -> OPEN PUSH . NUM CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN PUSH +## + +106 +a push command +a numeral + +input: OPEN RESET UNDERSCORE +## +## Ends in an error in state: 383. +## +## command -> OPEN RESET . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN RESET +## + +107 +a reset command +a closing parenthesis + +input: OPEN RESET_ASSERTIONS UNDERSCORE +## +## Ends in an error in state: 381. +## +## command -> OPEN RESET_ASSERTIONS . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN RESET_ASSERTIONS +## + +108 +a reset-assertions command +a closing parenthesis + +input: OPEN SET_INFO KEYWORD KEYWORD +## +## Ends in an error in state: 379. +## +## command -> OPEN SET_INFO command_option . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN SET_INFO command_option +## +## WARNING: This example involves spurious reductions. +## This implies that, although the LR(1) items shown above provide an +## accurate view of the past (what has been recognized so far), they +## may provide an INCOMPLETE view of the future (what was expected next). +## In state 3, spurious reduction of production option(attribute_value) -> +## In state 24, spurious reduction of production attribute -> KEYWORD option(attribute_value) +## In state 28, spurious reduction of production command_option -> attribute +## + +109 +a set-info command +a closing parenthesis, or an attribute value; +note that keywords and reserved words (such as '_', 'as', ...) are not +valid attribute values, and thus are not allowed here + +input: OPEN SET_INFO UNDERSCORE +## +## Ends in an error in state: 378. +## +## command -> OPEN SET_INFO . command_option CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN SET_INFO +## + +110 +a set-info command +an attribute of the form "keyword value?" + +input: OPEN SET_LOGIC SYMBOL UNDERSCORE +## +## Ends in an error in state: 376. +## +## command -> OPEN SET_LOGIC SYMBOL . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN SET_LOGIC SYMBOL +## + +111 +a set-logic command +a closing parenthesis + +input: OPEN SET_LOGIC UNDERSCORE +## +## Ends in an error in state: 375. +## +## command -> OPEN SET_LOGIC . SYMBOL CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN SET_LOGIC +## + +112 +a set-logic command +a symbol for the logic name + +input: OPEN SET_OPTION KEYWORD KEYWORD +## +## Ends in an error in state: 373. +## +## command -> OPEN SET_OPTION command_option . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN SET_OPTION command_option +## +## WARNING: This example involves spurious reductions. +## This implies that, although the LR(1) items shown above provide an +## accurate view of the past (what has been recognized so far), they +## may provide an INCOMPLETE view of the future (what was expected next). +## In state 3, spurious reduction of production option(attribute_value) -> +## In state 24, spurious reduction of production attribute -> KEYWORD option(attribute_value) +## In state 28, spurious reduction of production command_option -> attribute +## + +113 +a set-option command +a closing parenthesis, or an attribute value; +note that keywords and reserved words (such as '_', 'as', ...) are not +valid attribute values, and thus are not allowed here + +input: OPEN SET_OPTION UNDERSCORE +## +## Ends in an error in state: 372. +## +## command -> OPEN SET_OPTION . command_option CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN SET_OPTION +## + +114 +a set-option command +an attribute of the form "keyword value?" + +input: OPEN UNDERSCORE +## +## Ends in an error in state: 371. +## +## command -> OPEN . ASSERT term CLOSE [ # ] +## command -> OPEN . ASSERT OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE term CLOSE CLOSE [ # ] +## command -> OPEN . CHECK_SAT CLOSE [ # ] +## command -> OPEN . CHECK_SAT_ASSUMING OPEN list(prop_literal) CLOSE CLOSE [ # ] +## command -> OPEN . DECLARE_CONST SYMBOL sort CLOSE [ # ] +## command -> OPEN . DECLARE_CONST SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE sort CLOSE CLOSE [ # ] +## command -> OPEN . DECLARE_DATATYPE SYMBOL datatype_dec CLOSE [ # ] +## command -> OPEN . DECLARE_DATATYPES OPEN nonempty_list(sort_dec) CLOSE OPEN nonempty_list(datatype_dec) CLOSE CLOSE [ # ] +## command -> OPEN . DECLARE_FUN function_dec(sort) CLOSE [ # ] +## command -> OPEN . DECLARE_SORT SYMBOL NUM CLOSE [ # ] +## command -> OPEN . DEFINE_FUN function_def CLOSE [ # ] +## command -> OPEN . DEFINE_FUN_REC function_def CLOSE [ # ] +## command -> OPEN . DEFINE_FUNS_REC OPEN nonempty_list(__anonymous_0) CLOSE OPEN nonempty_list(term) CLOSE CLOSE [ # ] +## command -> OPEN . DEFINE_SORT SYMBOL OPEN list(SYMBOL) CLOSE sort CLOSE [ # ] +## command -> OPEN . ECHO STR CLOSE [ # ] +## command -> OPEN . EXIT CLOSE [ # ] +## command -> OPEN . GET_ASSERTIONS CLOSE [ # ] +## command -> OPEN . GET_ASSIGNMENT CLOSE [ # ] +## command -> OPEN . GET_INFO info_flag CLOSE [ # ] +## command -> OPEN . GET_MODEL CLOSE [ # ] +## command -> OPEN . GET_OPTION KEYWORD CLOSE [ # ] +## command -> OPEN . GET_PROOF CLOSE [ # ] +## command -> OPEN . GET_UNSAT_ASSUMPTIONS CLOSE [ # ] +## command -> OPEN . GET_UNSAT_CORE CLOSE [ # ] +## command -> OPEN . GET_VALUE OPEN nonempty_list(term) CLOSE CLOSE [ # ] +## command -> OPEN . POP NUM CLOSE [ # ] +## command -> OPEN . PUSH NUM CLOSE [ # ] +## command -> OPEN . RESET CLOSE [ # ] +## command -> OPEN . RESET_ASSERTIONS CLOSE [ # ] +## command -> OPEN . SET_INFO command_option CLOSE [ # ] +## command -> OPEN . SET_LOGIC SYMBOL CLOSE [ # ] +## command -> OPEN . SET_OPTION command_option CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +115 +a command +a command name + +input: UNDERSCORE +## +## Ends in an error in state: 370. +## +## input' -> . input [ # ] +## +## The known suffix of the stack is as follows: +## +## + +116 +an input statement +an opening parenthesis to start a command + +file: OPEN ASSERT OPEN ATTRIBUTE SYMBOL UNDERSCORE +## +## Ends in an error in state: 125. +## +## term -> OPEN ATTRIBUTE term . nonempty_list(attribute) CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN ATTRIBUTE term +## + +006 +attributes for a term +an attribute of the form "keyword value" + +file: OPEN ASSERT OPEN ATTRIBUTE UNDERSCORE +## +## Ends in an error in state: 124. +## +## term -> OPEN ATTRIBUTE . term nonempty_list(attribute) CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN ATTRIBUTE +## + +007 +a term with attribute +a term. + +file: OPEN ASSERT OPEN EXISTS OPEN OPEN SYMBOL SYMBOL CLOSE CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 122. +## +## term -> OPEN EXISTS OPEN nonempty_list(sorted_var) CLOSE term . CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN EXISTS OPEN nonempty_list(sorted_var) CLOSE term +## + +008 +a term +a closing parenthesis to end the existencially quantified formula + +file: OPEN ASSERT OPEN EXISTS OPEN OPEN SYMBOL SYMBOL CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 121. +## +## term -> OPEN EXISTS OPEN nonempty_list(sorted_var) CLOSE . term CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN EXISTS OPEN nonempty_list(sorted_var) CLOSE +## + +009 +a term +a term (body for the existencial quantification) + +file: OPEN ASSERT OPEN EXISTS OPEN UNDERSCORE +## +## Ends in an error in state: 119. +## +## term -> OPEN EXISTS OPEN . nonempty_list(sorted_var) CLOSE term CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN EXISTS OPEN +## + +010 +a term +a sorted variable of the form "(var sort)" + +file: OPEN ASSERT OPEN EXISTS UNDERSCORE +## +## Ends in an error in state: 118. +## +## term -> OPEN EXISTS . OPEN nonempty_list(sorted_var) CLOSE term CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN EXISTS +## + +011 +a term +a list of sorted variables, starting with an opening parenthesis + +file: OPEN ASSERT OPEN FORALL OPEN OPEN SYMBOL SYMBOL CLOSE CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 116. +## +## term -> OPEN FORALL OPEN nonempty_list(sorted_var) CLOSE term . CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN FORALL OPEN nonempty_list(sorted_var) CLOSE term +## + +012 +a term +a closing parenthesis to end the universally quantified formula + +file: OPEN ASSERT OPEN FORALL OPEN OPEN SYMBOL SYMBOL CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 115. +## +## term -> OPEN FORALL OPEN nonempty_list(sorted_var) CLOSE . term CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN FORALL OPEN nonempty_list(sorted_var) CLOSE +## + +013 +a term +a term (body for the universal quantification) + +file: OPEN ASSERT OPEN FORALL OPEN UNDERSCORE +## +## Ends in an error in state: 107. +## +## term -> OPEN FORALL OPEN . nonempty_list(sorted_var) CLOSE term CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN FORALL OPEN +## + +018 +a term +a sorted variable of the form "(var sort)" + +file: OPEN ASSERT OPEN FORALL UNDERSCORE +## +## Ends in an error in state: 106. +## +## term -> OPEN FORALL . OPEN nonempty_list(sorted_var) CLOSE term CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN FORALL +## + +019 +a term +a list of sorted variables, starting with an opening parenthesis + +file: OPEN ASSERT OPEN LET OPEN OPEN SYMBOL BIN CLOSE CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 104. +## +## term -> OPEN LET OPEN nonempty_list(var_binding) CLOSE term . CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN LET OPEN nonempty_list(var_binding) CLOSE term +## + +020 +a term +a closing parenthesis to end the let binding + +file: OPEN ASSERT OPEN LET OPEN OPEN SYMBOL BIN CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 103. +## +## term -> OPEN LET OPEN nonempty_list(var_binding) CLOSE . term CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN LET OPEN nonempty_list(var_binding) CLOSE +## + +021 +a term +a term (body for the let binding) + +file: OPEN ASSERT OPEN LET OPEN UNDERSCORE +## +## Ends in an error in state: 95. +## +## term -> OPEN LET OPEN . nonempty_list(var_binding) CLOSE term CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN LET OPEN +## + +026 +a term +a variable binding of the form "(var term)" + +file: OPEN ASSERT OPEN LET UNDERSCORE +## +## Ends in an error in state: 94. +## +## term -> OPEN LET . OPEN nonempty_list(var_binding) CLOSE term CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN LET +## + +027 +a term +a variable binding list, starting with an opening parenthesis + +file: OPEN ASSERT OPEN MATCH SYMBOL OPEN OPEN SYMBOL BIN CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 90. +## +## term -> OPEN MATCH term OPEN nonempty_list(match_case) CLOSE . CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN MATCH term OPEN nonempty_list(match_case) CLOSE +## + +031 +a match +a closing parenthesis to close the match + +file: OPEN ASSERT OPEN MATCH SYMBOL OPEN UNDERSCORE +## +## Ends in an error in state: 73. +## +## term -> OPEN MATCH term OPEN . nonempty_list(match_case) CLOSE CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN MATCH term OPEN +## + +036 +a list of match cases +a match case of the form "(pattern term)" + +file: OPEN ASSERT OPEN MATCH SYMBOL UNDERSCORE +## +## Ends in an error in state: 72. +## +## term -> OPEN MATCH term . OPEN nonempty_list(match_case) CLOSE CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN MATCH term +## + +037 +a match +a match case list, starting with an opening parenthesis + +file: OPEN ASSERT OPEN MATCH UNDERSCORE +## +## Ends in an error in state: 71. +## +## term -> OPEN MATCH . term OPEN nonempty_list(match_case) CLOSE CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN MATCH +## + +038 +a match +a term to match (i.e. the scrutinee of the match) + +file: OPEN ASSERT OPEN STR +## +## Ends in an error in state: 355. +## +## command -> OPEN ASSERT OPEN . PAR OPEN nonempty_list(datatype_symbol) CLOSE term CLOSE CLOSE [ OPEN EOF ] +## identifier -> OPEN . UNDERSCORE SYMBOL nonempty_list(index) CLOSE [ CLOSE ] +## qual_identifier -> OPEN . AS identifier sort CLOSE [ CLOSE ] +## term -> OPEN . qual_identifier nonempty_list(term) CLOSE [ CLOSE ] +## term -> OPEN . LET OPEN nonempty_list(var_binding) CLOSE term CLOSE [ CLOSE ] +## term -> OPEN . FORALL OPEN nonempty_list(sorted_var) CLOSE term CLOSE [ CLOSE ] +## term -> OPEN . EXISTS OPEN nonempty_list(sorted_var) CLOSE term CLOSE [ CLOSE ] +## term -> OPEN . MATCH term OPEN nonempty_list(match_case) CLOSE CLOSE [ CLOSE ] +## term -> OPEN . ATTRIBUTE term nonempty_list(attribute) CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT OPEN +## + +042 +a term +a term construction (identifier, let binding, quantification, ...); +note that this expectation if caused by the preceding opening parenthesis + +input: OPEN ASSERT OPEN STR +## +## Ends in an error in state: 479. +## +## command -> OPEN ASSERT OPEN . PAR OPEN nonempty_list(datatype_symbol) CLOSE term CLOSE CLOSE [ # ] +## identifier -> OPEN . UNDERSCORE SYMBOL nonempty_list(index) CLOSE [ CLOSE ] +## qual_identifier -> OPEN . AS identifier sort CLOSE [ CLOSE ] +## term -> OPEN . qual_identifier nonempty_list(term) CLOSE [ CLOSE ] +## term -> OPEN . LET OPEN nonempty_list(var_binding) CLOSE term CLOSE [ CLOSE ] +## term -> OPEN . FORALL OPEN nonempty_list(sorted_var) CLOSE term CLOSE [ CLOSE ] +## term -> OPEN . EXISTS OPEN nonempty_list(sorted_var) CLOSE term CLOSE [ CLOSE ] +## term -> OPEN . MATCH term OPEN nonempty_list(match_case) CLOSE CLOSE [ CLOSE ] +## term -> OPEN . ATTRIBUTE term nonempty_list(attribute) CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT OPEN +## + +042 +a term +a term construction (identifier, let binding, quantification, ...); +note that this expectation if caused by the preceding opening parenthesis + +file: OPEN ASSERT OPEN PAR OPEN SYMBOL CLOSE SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 361. +## +## command -> OPEN ASSERT OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE term CLOSE . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE term CLOSE +## + +173 +a command +a closing parenthesis + +file: OPEN ASSERT OPEN PAR OPEN SYMBOL CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 360. +## +## command -> OPEN ASSERT OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE term . CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE term +## + +174 +a command +two closing parenthesis + +file: OPEN ASSERT OPEN PAR OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 359. +## +## command -> OPEN ASSERT OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE . term CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE +## + +175 +a term +a term construction (symbol, function application, match, let binding, ...); +note that keywords and reserved words (such as '_', 'as', ...) are +not symbols, and thus are not allowed here + +file: OPEN ASSERT OPEN PAR OPEN UNDERSCORE +## +## Ends in an error in state: 357. +## +## command -> OPEN ASSERT OPEN PAR OPEN . nonempty_list(datatype_symbol) CLOSE term CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT OPEN PAR OPEN +## + +176 +a non-empty list of type variables +a type variable + +file: OPEN ASSERT OPEN PAR UNDERSCORE +## +## Ends in an error in state: 356. +## +## command -> OPEN ASSERT OPEN PAR . OPEN nonempty_list(datatype_symbol) CLOSE term CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT OPEN PAR +## + +177 +a non-empty list of type variables +an opening parenthesis + +file: OPEN ASSERT OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 130. +## +## term -> OPEN qual_identifier . nonempty_list(term) CLOSE [ SYMBOL STR OPEN NUM KEYWORD HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN qual_identifier +## + +044 +a function application +a term as argument; +note that keywords and reserved words (such as '_', 'as', ...) are +not terms, and thus are not allowed here + +file: OPEN ASSERT SYMBOL UNDERSCORE +## +## Ends in an error in state: 363. +## +## command -> OPEN ASSERT term . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT term +## + +049 +an assertion +a closing parenthesis + +file: OPEN ASSERT UNDERSCORE +## +## Ends in an error in state: 354. +## +## command -> OPEN ASSERT . term CLOSE [ OPEN EOF ] +## command -> OPEN ASSERT . OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE term CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN ASSERT +## + +048 +a term +a term construction (symbol, function application, match, let binding, ...); +note that keywords and reserved words (such as '_', 'as', ...) are +not symbols, and thus are not allowed here + +file: OPEN CHECK_SAT UNDERSCORE +## +## Ends in an error in state: 352. +## +## command -> OPEN CHECK_SAT . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN CHECK_SAT +## + +050 +a check-sat command +a closing parenthesis + +file: OPEN CHECK_SAT_ASSUMING OPEN CLOSE UNDERSCORE +## +## Ends in an error in state: 350. +## +## command -> OPEN CHECK_SAT_ASSUMING OPEN list(prop_literal) CLOSE . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN CHECK_SAT_ASSUMING OPEN list(prop_literal) CLOSE +## + +051 +a check-sat-assuming command +a closing parenthesis + +file: OPEN CHECK_SAT_ASSUMING OPEN OPEN SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 343. +## +## prop_literal -> OPEN not_symbol prop_symbol . CLOSE [ SYMBOL OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN not_symbol prop_symbol +## + +046 +a propositional literal +a closing parenthesis + +file: OPEN CHECK_SAT_ASSUMING OPEN OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 342. +## +## prop_literal -> OPEN not_symbol . prop_symbol CLOSE [ SYMBOL OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN not_symbol +## + +047 +a propositional literal +a symbol + +file: OPEN CHECK_SAT_ASSUMING OPEN OPEN UNDERSCORE +## +## Ends in an error in state: 340. +## +## prop_literal -> OPEN . not_symbol prop_symbol CLOSE [ SYMBOL OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +117 +a propositional literal +the "not" symbol + +file: OPEN CHECK_SAT_ASSUMING OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 347. +## +## list(prop_literal) -> prop_literal . list(prop_literal) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## prop_literal +## + +119 +a list of propositional literals +a propositional literal of the form "symbol" or "(not symbol)" + +file: OPEN CHECK_SAT_ASSUMING OPEN UNDERSCORE +## +## Ends in an error in state: 339. +## +## command -> OPEN CHECK_SAT_ASSUMING OPEN . list(prop_literal) CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN CHECK_SAT_ASSUMING OPEN +## + +052 +a list of propositional literals +a propositional literal, i.e. either a symbol or the negation of a symbol + +file: OPEN CHECK_SAT_ASSUMING UNDERSCORE +## +## Ends in an error in state: 338. +## +## command -> OPEN CHECK_SAT_ASSUMING . OPEN list(prop_literal) CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN CHECK_SAT_ASSUMING +## + +053 +a check-sat-assuming command +a list of propositional literals, starting with an opening parenthesis + +file: OPEN DECLARE_CONST SYMBOL OPEN STR +## +## Ends in an error in state: 328. +## +## command -> OPEN DECLARE_CONST SYMBOL OPEN . PAR OPEN nonempty_list(datatype_symbol) CLOSE sort CLOSE CLOSE [ OPEN EOF ] +## identifier -> OPEN . UNDERSCORE SYMBOL nonempty_list(index) CLOSE [ CLOSE ] +## sort -> OPEN . identifier nonempty_list(sort) CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL OPEN +## + +120 +a parametric or monormorphic constant +the keyword par or a sort + +file: OPEN DECLARE_CONST SYMBOL OPEN SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 64. +## +## nonempty_list(sort) -> sort . [ CLOSE ] +## nonempty_list(sort) -> sort . nonempty_list(sort) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## sort +## + +121 +a list of sorts +a sort, or a closing parenthesis + +file: OPEN DECLARE_CONST SYMBOL OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 63. +## +## sort -> OPEN identifier . nonempty_list(sort) CLOSE [ SYMBOL STR OPEN NUM HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN identifier +## + +122 +a sort +a sort to start a non-empty list of arguments + +file: OPEN DECLARE_CONST SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 336. +## +## command -> OPEN DECLARE_CONST SYMBOL sort . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL sort +## + +054 +a constant declaration +a closing parenthesis + +file: OPEN DECLARE_CONST SYMBOL UNDERSCORE +## +## Ends in an error in state: 327. +## +## command -> OPEN DECLARE_CONST SYMBOL . sort CLOSE [ OPEN EOF ] +## command -> OPEN DECLARE_CONST SYMBOL . OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE sort CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL +## + +055 +a constant declaration +a sort + +file: OPEN DECLARE_CONST UNDERSCORE +## +## Ends in an error in state: 326. +## +## command -> OPEN DECLARE_CONST . SYMBOL sort CLOSE [ OPEN EOF ] +## command -> OPEN DECLARE_CONST . SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE sort CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST +## + +056 +a constant declaration +a symbol; +note that keywords and reserved words (such as '_', 'as', ...) are not +symbols, and thus are not allowed here + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN OPEN SYMBOL CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 324. +## +## command -> OPEN DECLARE_DATATYPE SYMBOL datatype_dec . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPE SYMBOL datatype_dec +## + +057 +a datatype declaration +a closing parenthesis + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 313. +## +## nonempty_list(constructor_dec) -> constructor_dec . [ CLOSE ] +## nonempty_list(constructor_dec) -> constructor_dec . nonempty_list(constructor_dec) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## constructor_dec +## + +123 +a list of constructor declarations +another constructor declaration of the form "(constructor selectors*)", +or a closing parenthesis + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN OPEN SYMBOL OPEN SYMBOL SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 306. +## +## list(selector_dec) -> selector_dec . list(selector_dec) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## selector_dec +## + +124 +a list of selectors +another selector of the form "(selector sort)", or a closing parenthesis + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN OPEN SYMBOL OPEN SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 304. +## +## selector_dec -> OPEN SYMBOL sort . CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN SYMBOL sort +## + +125 +a selector declaration +a closing parenthesis + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN OPEN SYMBOL OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 303. +## +## selector_dec -> OPEN SYMBOL . sort CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN SYMBOL +## + +126 +a selector declaration +a sort for the return type of the selector + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN OPEN SYMBOL OPEN UNDERSCORE +## +## Ends in an error in state: 302. +## +## selector_dec -> OPEN . SYMBOL sort CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +127 +a selector declaration +a symbol for the selector name + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 301. +## +## constructor_dec -> OPEN SYMBOL . list(selector_dec) CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN SYMBOL +## + +128 +a constructor declaration +a selector declaration, of the form "(selector sort)", or a closing parenthesis + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN OPEN UNDERSCORE +## +## Ends in an error in state: 300. +## +## constructor_dec -> OPEN . SYMBOL list(selector_dec) CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +129 +a constructor declaration +a symbol for the constructor name + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN PAR OPEN SYMBOL CLOSE OPEN OPEN SYMBOL CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 311. +## +## datatype_dec -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN nonempty_list(constructor_dec) CLOSE . CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN nonempty_list(constructor_dec) CLOSE +## + +130 +a datatype declaration +a closing parenthesis + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN PAR OPEN SYMBOL CLOSE OPEN UNDERSCORE +## +## Ends in an error in state: 299. +## +## datatype_dec -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN . nonempty_list(constructor_dec) CLOSE CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN +## + +131 +a datatype declaration +a constructor declaration of the form "(symbol selector*)" + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN PAR OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 298. +## +## datatype_dec -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE . OPEN nonempty_list(constructor_dec) CLOSE CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE +## + +132 +a datatype declaration +an opening parenthesis to start the list of constructors + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN PAR OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 189. +## +## nonempty_list(datatype_symbol) -> datatype_symbol . [ CLOSE ] +## nonempty_list(datatype_symbol) -> datatype_symbol . nonempty_list(datatype_symbol) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## datatype_symbol +## + +133 +a list of sort variables to parameterize a datatype +another symbol, or a closing parenthesis + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN PAR OPEN UNDERSCORE +## +## Ends in an error in state: 296. +## +## datatype_dec -> OPEN PAR OPEN . nonempty_list(datatype_symbol) CLOSE OPEN nonempty_list(constructor_dec) CLOSE CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN +## + +134 +a list of sort variables to parameterize a datatype +a symbol + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN PAR UNDERSCORE +## +## Ends in an error in state: 295. +## +## datatype_dec -> OPEN PAR . OPEN nonempty_list(datatype_symbol) CLOSE OPEN nonempty_list(constructor_dec) CLOSE CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR +## + +135 +a datatype declaration +an opening parenthesis to start a list of sort parameters for the datatype + +file: OPEN DECLARE_DATATYPE SYMBOL OPEN UNDERSCORE +## +## Ends in an error in state: 294. +## +## datatype_dec -> OPEN . nonempty_list(constructor_dec) CLOSE [ OPEN CLOSE ] +## datatype_dec -> OPEN . PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN nonempty_list(constructor_dec) CLOSE CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +136 +a datatype declaration +a list of constructor declarations of the form "(symbol selector*)", +or a parameterization of the datatype of the form "par (sort+)" + +file: OPEN DECLARE_DATATYPE SYMBOL UNDERSCORE +## +## Ends in an error in state: 323. +## +## command -> OPEN DECLARE_DATATYPE SYMBOL . datatype_dec CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPE SYMBOL +## + +058 +a datatype declaration +an opening parenthesis to start the datatype declaration + +file: OPEN DECLARE_DATATYPE UNDERSCORE +## +## Ends in an error in state: 322. +## +## command -> OPEN DECLARE_DATATYPE . SYMBOL datatype_dec CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPE +## + +059 +a datatype declaration +a symbol + +file: OPEN DECLARE_DATATYPES OPEN OPEN SYMBOL NUM CLOSE CLOSE OPEN OPEN OPEN SYMBOL CLOSE CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 318. +## +## command -> OPEN DECLARE_DATATYPES OPEN nonempty_list(sort_dec) CLOSE OPEN nonempty_list(datatype_dec) CLOSE . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPES OPEN nonempty_list(sort_dec) CLOSE OPEN nonempty_list(datatype_dec) CLOSE +## + +060 +a datatypes declaration +a closing parenthesis + +file: OPEN DECLARE_DATATYPES OPEN OPEN SYMBOL NUM CLOSE CLOSE OPEN OPEN OPEN SYMBOL CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 320. +## +## nonempty_list(datatype_dec) -> datatype_dec . [ CLOSE ] +## nonempty_list(datatype_dec) -> datatype_dec . nonempty_list(datatype_dec) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## datatype_dec +## + +137 +a list of datatype declarations +another datatype declaration, or a closing parenthesis + +file: OPEN DECLARE_DATATYPES OPEN OPEN SYMBOL NUM CLOSE CLOSE OPEN UNDERSCORE +## +## Ends in an error in state: 293. +## +## command -> OPEN DECLARE_DATATYPES OPEN nonempty_list(sort_dec) CLOSE OPEN . nonempty_list(datatype_dec) CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPES OPEN nonempty_list(sort_dec) CLOSE OPEN +## + +061 +a datatypes definition +an opening parenthesis to start a list of constructors for the first defined datatype + +file: OPEN DECLARE_DATATYPES OPEN OPEN SYMBOL NUM CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 292. +## +## command -> OPEN DECLARE_DATATYPES OPEN nonempty_list(sort_dec) CLOSE . OPEN nonempty_list(datatype_dec) CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPES OPEN nonempty_list(sort_dec) CLOSE +## + +062 +a datatypes declaration +an opening parenthesis to start a list of datatype definitions, +one for each of the sorts being declared + +file: OPEN DECLARE_DATATYPES OPEN OPEN SYMBOL NUM CLOSE UNDERSCORE +## +## Ends in an error in state: 289. +## +## nonempty_list(sort_dec) -> sort_dec . [ CLOSE ] +## nonempty_list(sort_dec) -> sort_dec . nonempty_list(sort_dec) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## sort_dec +## + +138 +a list of datatype arity declarations +another datatype arity declaration, or a closing parenthesis + +file: OPEN DECLARE_DATATYPES OPEN OPEN SYMBOL NUM UNDERSCORE +## +## Ends in an error in state: 287. +## +## sort_dec -> OPEN SYMBOL NUM . CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN SYMBOL NUM +## + +139 +a datatype arity declaration +a closing parenthesis + +file: OPEN DECLARE_DATATYPES OPEN OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 286. +## +## sort_dec -> OPEN SYMBOL . NUM CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN SYMBOL +## + +140 +a datatype arity declaration +a numeral for the datatype arity + +file: OPEN DECLARE_DATATYPES OPEN OPEN UNDERSCORE +## +## Ends in an error in state: 285. +## +## sort_dec -> OPEN . SYMBOL NUM CLOSE [ OPEN CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +141 +a datatype arity declaration +a symbol for the datatype name + +file: OPEN DECLARE_DATATYPES OPEN UNDERSCORE +## +## Ends in an error in state: 284. +## +## command -> OPEN DECLARE_DATATYPES OPEN . nonempty_list(sort_dec) CLOSE OPEN nonempty_list(datatype_dec) CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPES OPEN +## + +063 +a datatypes declaration +a parametric sort declaration of the form "(symbol num)" + +file: OPEN DECLARE_DATATYPES UNDERSCORE +## +## Ends in an error in state: 283. +## +## command -> OPEN DECLARE_DATATYPES . OPEN nonempty_list(sort_dec) CLOSE OPEN nonempty_list(datatype_dec) CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_DATATYPES +## + +064 +a datatypes declaration +a list of sort declaration, starting with an opening parenthesis + +file: OPEN DECLARE_FUN SYMBOL OPEN CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 281. +## +## command -> OPEN DECLARE_FUN function_dec(sort) . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_FUN function_dec(sort) +## + +065 +a function declaration +a closing parenthesis + +file: OPEN DECLARE_FUN SYMBOL OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 261. +## +## list(sort) -> sort . list(sort) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## sort +## + +142 +a list of sorts +another sort or a closing parenthesis + +file: OPEN DECLARE_FUN UNDERSCORE +## +## Ends in an error in state: 253. +## +## command -> OPEN DECLARE_FUN . function_dec(sort) CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_FUN +## + +069 +a function declaration +a symbol for the function's name + +file: OPEN DECLARE_SORT SYMBOL NUM UNDERSCORE +## +## Ends in an error in state: 251. +## +## command -> OPEN DECLARE_SORT SYMBOL NUM . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_SORT SYMBOL NUM +## + +070 +a sort declaration +a closing parenthesis + +file: OPEN DECLARE_SORT SYMBOL UNDERSCORE +## +## Ends in an error in state: 250. +## +## command -> OPEN DECLARE_SORT SYMBOL . NUM CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_SORT SYMBOL +## + +071 +a sort declaration +a numeral for the arity of the sort being declared + +file: OPEN DECLARE_SORT UNDERSCORE +## +## Ends in an error in state: 249. +## +## command -> OPEN DECLARE_SORT . SYMBOL NUM CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_SORT +## + +072 +a sort declaration +a symbol for the sort name + +file: OPEN DEFINE_FUN SYMBOL OPEN CLOSE SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 247. +## +## command -> OPEN DEFINE_FUN function_def . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUN function_def +## + +073 +a function definition +a closing parenthesis + +file: OPEN DEFINE_FUN UNDERSCORE +## +## Ends in an error in state: 246. +## +## command -> OPEN DEFINE_FUN . function_def CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUN +## + +074 +a function definition +a symbol for the function's name + +file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE SYMBOL CLOSE CLOSE OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 244. +## +## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(__anonymous_0) CLOSE OPEN nonempty_list(term) CLOSE . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUNS_REC OPEN nonempty_list(__anonymous_0) CLOSE OPEN nonempty_list(term) CLOSE +## + +075 +a recursive functions definition +a closing parenthesis + +file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE SYMBOL CLOSE CLOSE OPEN UNDERSCORE +## +## Ends in an error in state: 242. +## +## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(__anonymous_0) CLOSE OPEN . nonempty_list(term) CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUNS_REC OPEN nonempty_list(__anonymous_0) CLOSE OPEN +## + +076 +a recursive functions definition +a term for the first function's body + +file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE SYMBOL CLOSE CLOSE UNDERSCORE +## +## Ends in an error in state: 241. +## +## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(__anonymous_0) CLOSE . OPEN nonempty_list(term) CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUNS_REC OPEN nonempty_list(__anonymous_0) CLOSE +## + +077 +a recursive functions definition +an opening parenthesis to start a list the function's bodies + +file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 238. +## +## nonempty_list(__anonymous_0) -> OPEN function_dec(sorted_var) CLOSE . [ CLOSE ] +## nonempty_list(__anonymous_0) -> OPEN function_dec(sorted_var) CLOSE . nonempty_list(__anonymous_0) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN function_dec(sorted_var) CLOSE +## + +143 +a list of function declarations +another function declaration, or a closing parenthesis + +file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 237. +## +## nonempty_list(__anonymous_0) -> OPEN function_dec(sorted_var) . CLOSE [ CLOSE ] +## nonempty_list(__anonymous_0) -> OPEN function_dec(sorted_var) . CLOSE nonempty_list(__anonymous_0) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN function_dec(sorted_var) +## + +144 +a function declaration +a closing parenthesis + +file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE UNDERSCORE +## +## Ends in an error in state: 224. +## +## function_dec(sorted_var) -> SYMBOL OPEN list(sorted_var) CLOSE . sort [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN list(sorted_var) CLOSE +## + +145 +a function declaration +a sort for the return type of the function + +file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN UNDERSCORE +## +## Ends in an error in state: 213. +## +## function_dec(sorted_var) -> SYMBOL OPEN . list(sorted_var) CLOSE sort [ CLOSE ] +## function_dec(sorted_var) -> SYMBOL OPEN . PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN +## + +146 +a function declaration +either a sort for the first argument type, or a closing parenthesis + +file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 212. +## +## function_dec(sorted_var) -> SYMBOL . OPEN list(sorted_var) CLOSE sort [ CLOSE ] +## function_dec(sorted_var) -> SYMBOL . OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL +## + +147 +a function declaration +an opening parenthesis to start the list of arguments sorts + +file: OPEN DEFINE_FUNS_REC OPEN OPEN UNDERSCORE +## +## Ends in an error in state: 211. +## +## nonempty_list(__anonymous_0) -> OPEN . function_dec(sorted_var) CLOSE [ CLOSE ] +## nonempty_list(__anonymous_0) -> OPEN . function_dec(sorted_var) CLOSE nonempty_list(__anonymous_0) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +148 +a function declaration +a symbol for the function name + +file: OPEN DEFINE_FUNS_REC OPEN UNDERSCORE +## +## Ends in an error in state: 210. +## +## command -> OPEN DEFINE_FUNS_REC OPEN . nonempty_list(__anonymous_0) CLOSE OPEN nonempty_list(term) CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUNS_REC OPEN +## + +078 +a recursive functions definition +a function declaration of the form "(name (sort*) sort)", +or "(name (par (var+) (sort*) sort))" + +file: OPEN DEFINE_FUNS_REC UNDERSCORE +## +## Ends in an error in state: 209. +## +## command -> OPEN DEFINE_FUNS_REC . OPEN nonempty_list(__anonymous_0) CLOSE OPEN nonempty_list(term) CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUNS_REC +## + +079 +a recursive functions declaration +an opening parenthesis to start a list of function declaration + +file: OPEN DEFINE_FUN_REC SYMBOL OPEN CLOSE SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 207. +## +## command -> OPEN DEFINE_FUN_REC function_def . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUN_REC function_def +## + +080 +a recursive function definition +a closing parenthesis + +file: OPEN DEFINE_FUN_REC SYMBOL OPEN CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 193. +## +## function_def -> SYMBOL OPEN list(sorted_var) CLOSE sort . term [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN list(sorted_var) CLOSE sort +## + +149 +a function definition +a term for the body of the function + +file: OPEN DEFINE_FUN_REC SYMBOL OPEN CLOSE UNDERSCORE +## +## Ends in an error in state: 192. +## +## function_def -> SYMBOL OPEN list(sorted_var) CLOSE . sort term [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN list(sorted_var) CLOSE +## + +150 +a function definition +a sort for the return type of the function + +file: OPEN DEFINE_FUN_REC SYMBOL OPEN OPEN SYMBOL SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 180. +## +## list(sorted_var) -> sorted_var . list(sorted_var) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## sorted_var +## + +151 +a list of sorted variables +another sorted variable of the form "(var sort)", or a closing parenthesis + +file: OPEN DEFINE_FUN_REC SYMBOL OPEN UNDERSCORE +## +## Ends in an error in state: 173. +## +## function_def -> SYMBOL OPEN . list(sorted_var) CLOSE sort term [ CLOSE ] +## function_def -> SYMBOL OPEN . PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort term CLOSE [ CLOSE ] +## function_def -> SYMBOL OPEN . PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort CLOSE term [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN +## + +152 +a function definition +a sorted variable of the form "(var sort)", a closing parenthesis, +or a parameterization of the form "par (var+)" + +file: OPEN DEFINE_FUN_REC SYMBOL UNDERSCORE +## +## Ends in an error in state: 172. +## +## function_def -> SYMBOL . OPEN list(sorted_var) CLOSE sort term [ CLOSE ] +## function_def -> SYMBOL . OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort term CLOSE [ CLOSE ] +## function_def -> SYMBOL . OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort CLOSE term [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL +## + +153 +a function definition +an opening parenthesis to start the list of arguments + +file: OPEN DEFINE_FUN_REC UNDERSCORE +## +## Ends in an error in state: 171. +## +## command -> OPEN DEFINE_FUN_REC . function_def CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_FUN_REC +## + +081 +a recursive function definition +a symbol for the function's name + +file: OPEN DEFINE_SORT SYMBOL OPEN CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 169. +## +## command -> OPEN DEFINE_SORT SYMBOL OPEN list(SYMBOL) CLOSE sort . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_SORT SYMBOL OPEN list(SYMBOL) CLOSE sort +## + +082 +a sort definition +a closing parenthesis + +file: OPEN DEFINE_SORT SYMBOL OPEN CLOSE UNDERSCORE +## +## Ends in an error in state: 168. +## +## command -> OPEN DEFINE_SORT SYMBOL OPEN list(SYMBOL) CLOSE . sort CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_SORT SYMBOL OPEN list(SYMBOL) CLOSE +## + +083 +a sort definition +a sort for the definition body + +file: OPEN DEFINE_SORT SYMBOL OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 165. +## +## list(SYMBOL) -> SYMBOL . list(SYMBOL) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL +## + +154 +a list of symbols +another symbol or a closing parenthesis + +file: OPEN DEFINE_SORT SYMBOL OPEN UNDERSCORE +## +## Ends in an error in state: 164. +## +## command -> OPEN DEFINE_SORT SYMBOL OPEN . list(SYMBOL) CLOSE sort CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_SORT SYMBOL OPEN +## + +084 +a sort definition +a closing parenthesis, or a list of symbols for the definition arguments + +file: OPEN DEFINE_SORT SYMBOL UNDERSCORE +## +## Ends in an error in state: 163. +## +## command -> OPEN DEFINE_SORT SYMBOL . OPEN list(SYMBOL) CLOSE sort CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_SORT SYMBOL +## + +085 +a sort definition +an opening parenthesis to start a list of arguments + +file: OPEN DEFINE_SORT UNDERSCORE +## +## Ends in an error in state: 162. +## +## command -> OPEN DEFINE_SORT . SYMBOL OPEN list(SYMBOL) CLOSE sort CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DEFINE_SORT +## + +086 +a sort definition +a symbol for the defined sort's name + +file: OPEN ECHO STR UNDERSCORE +## +## Ends in an error in state: 160. +## +## command -> OPEN ECHO STR . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN ECHO STR +## + +087 +an echo command +a closing parenthesis + +file: OPEN ECHO UNDERSCORE +## +## Ends in an error in state: 159. +## +## command -> OPEN ECHO . STR CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN ECHO +## + +088 +an echo command +a string literal + +file: OPEN EXIT UNDERSCORE +## +## Ends in an error in state: 157. +## +## command -> OPEN EXIT . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN EXIT +## + +089 +an exit command +a closing parenthesis + +file: OPEN GET_ASSERTIONS UNDERSCORE +## +## Ends in an error in state: 155. +## +## command -> OPEN GET_ASSERTIONS . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN GET_ASSERTIONS +## + +090 +a get-assertions command +a closing parenthesis + +file: OPEN GET_ASSIGNMENT UNDERSCORE +## +## Ends in an error in state: 153. +## +## command -> OPEN GET_ASSIGNMENT . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN GET_ASSIGNMENT +## + +091 +a get-assignment command +a closing parenthesis + +file: OPEN GET_INFO KEYWORD UNDERSCORE +## +## Ends in an error in state: 151. +## +## command -> OPEN GET_INFO info_flag . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN GET_INFO info_flag +## + +092 +a get-info command +a closing parenthesis + +file: OPEN GET_INFO UNDERSCORE +## +## Ends in an error in state: 149. +## +## command -> OPEN GET_INFO . info_flag CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN GET_INFO +## + +093 +a get-info command +a keyword of the form ":symbol" + +file: OPEN GET_MODEL UNDERSCORE +## +## Ends in an error in state: 147. +## +## command -> OPEN GET_MODEL . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN GET_MODEL +## + +094 +a get-model command +a closing parenthesis + +file: OPEN GET_OPTION KEYWORD UNDERSCORE +## +## Ends in an error in state: 145. +## +## command -> OPEN GET_OPTION KEYWORD . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN GET_OPTION KEYWORD +## + +095 +a get-option command +a closing parenthesis + +file: OPEN GET_OPTION UNDERSCORE +## +## Ends in an error in state: 144. +## +## command -> OPEN GET_OPTION . KEYWORD CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN GET_OPTION +## + +096 +a get-option command +a keyword of the form ":symbol" + +file: OPEN GET_PROOF UNDERSCORE +## +## Ends in an error in state: 142. +## +## command -> OPEN GET_PROOF . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN GET_PROOF +## + +097 +a get-proof command +a closing parenthesis + +file: OPEN GET_UNSAT_ASSUMPTIONS UNDERSCORE +## +## Ends in an error in state: 140. +## +## command -> OPEN GET_UNSAT_ASSUMPTIONS . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN GET_UNSAT_ASSUMPTIONS +## + +098 +a get-unsat-assumptions command +a closing parenthesis + +file: OPEN GET_UNSAT_CORE UNDERSCORE +## +## Ends in an error in state: 138. +## +## command -> OPEN GET_UNSAT_CORE . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN GET_UNSAT_CORE +## + +099 +a get-unsat-core command +a closing parenthesis + +file: OPEN GET_VALUE OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 136. +## +## command -> OPEN GET_VALUE OPEN nonempty_list(term) CLOSE . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN GET_VALUE OPEN nonempty_list(term) CLOSE +## + +100 +a get-value command +a closing parenthesis + +file: OPEN GET_VALUE OPEN UNDERSCORE +## +## Ends in an error in state: 46. +## +## command -> OPEN GET_VALUE OPEN . nonempty_list(term) CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN GET_VALUE OPEN +## + +101 +a get-value command +a term + +file: OPEN GET_VALUE UNDERSCORE +## +## Ends in an error in state: 45. +## +## command -> OPEN GET_VALUE . OPEN nonempty_list(term) CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN GET_VALUE +## + +102 +a get-value command +an opening parenthesis to start a list of terms + +file: OPEN POP NUM UNDERSCORE +## +## Ends in an error in state: 43. +## +## command -> OPEN POP NUM . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN POP NUM +## + +103 +a pop command +a closing parenthesis + +file: OPEN POP UNDERSCORE +## +## Ends in an error in state: 42. +## +## command -> OPEN POP . NUM CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN POP +## + +104 +a pop command +a numeral + +file: OPEN PUSH NUM UNDERSCORE +## +## Ends in an error in state: 40. +## +## command -> OPEN PUSH NUM . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN PUSH NUM +## + +105 +a push command +a closing parenthesis + +file: OPEN PUSH UNDERSCORE +## +## Ends in an error in state: 39. +## +## command -> OPEN PUSH . NUM CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN PUSH +## + +106 +a push command +a numeral + +file: OPEN RESET UNDERSCORE +## +## Ends in an error in state: 37. +## +## command -> OPEN RESET . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN RESET +## + +107 +a reset command +a closing parenthesis + +file: OPEN RESET_ASSERTIONS CLOSE UNDERSCORE +## +## Ends in an error in state: 368. +## +## list(command) -> command . list(command) [ EOF ] +## +## The known suffix of the stack is as follows: +## command +## + +116 +an input statement +an opening parenthesis to start a command + +file: OPEN RESET_ASSERTIONS UNDERSCORE +## +## Ends in an error in state: 35. +## +## command -> OPEN RESET_ASSERTIONS . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN RESET_ASSERTIONS +## + +108 +a reset-assertions command +a closing parenthesis + +file: OPEN SET_INFO KEYWORD KEYWORD +## +## Ends in an error in state: 33. +## +## command -> OPEN SET_INFO command_option . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN SET_INFO command_option +## +## WARNING: This example involves spurious reductions. +## This implies that, although the LR(1) items shown above provide an +## accurate view of the past (what has been recognized so far), they +## may provide an INCOMPLETE view of the future (what was expected next). +## In state 3, spurious reduction of production option(attribute_value) -> +## In state 24, spurious reduction of production attribute -> KEYWORD option(attribute_value) +## In state 28, spurious reduction of production command_option -> attribute +## + +109 +a set-info command +a closing parenthesis, or an attribute value; +note that keywords and reserved words (such as '_', 'as', ...) are not +valid attribute values, and thus are not allowed here + +file: OPEN SET_INFO UNDERSCORE +## +## Ends in an error in state: 32. +## +## command -> OPEN SET_INFO . command_option CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN SET_INFO +## + +110 +a set-info command +an attribute of the form "keyword value?" + +file: OPEN SET_LOGIC SYMBOL UNDERSCORE +## +## Ends in an error in state: 30. +## +## command -> OPEN SET_LOGIC SYMBOL . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN SET_LOGIC SYMBOL +## + +111 +a set-logic command +a closing parenthesis + +file: OPEN SET_LOGIC UNDERSCORE +## +## Ends in an error in state: 29. +## +## command -> OPEN SET_LOGIC . SYMBOL CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN SET_LOGIC +## + +112 +a set-logic command +a symbol for the logic name + +file: OPEN SET_OPTION KEYWORD KEYWORD +## +## Ends in an error in state: 26. +## +## command -> OPEN SET_OPTION command_option . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN SET_OPTION command_option +## +## WARNING: This example involves spurious reductions. +## This implies that, although the LR(1) items shown above provide an +## accurate view of the past (what has been recognized so far), they +## may provide an INCOMPLETE view of the future (what was expected next). +## In state 3, spurious reduction of production option(attribute_value) -> +## In state 24, spurious reduction of production attribute -> KEYWORD option(attribute_value) +## In state 28, spurious reduction of production command_option -> attribute +## + +113 +a set-option command +a closing parenthesis, or an attribute value; +note that keywords and reserved words (such as '_', 'as', ...) are not +valid attribute values, and thus are not allowed here + +file: OPEN SET_OPTION KEYWORD OPEN OPEN UNDERSCORE +## +## Ends in an error in state: 9. +## +## s_expr -> OPEN . list(s_expr) CLOSE [ SYMBOL STR PAR OPEN NUM KEYWORD HEX DEC CLOSE BIN AS ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +155 +an s-expression +a literal, symbol, keyword, or another s-expression in parentheses + + +file: OPEN SET_OPTION KEYWORD OPEN SYMBOL UNDERSCORE +## +## Ends in an error in state: 17. +## +## list(s_expr) -> s_expr . list(s_expr) [ CLOSE ] +## +## The known suffix of the stack is as follows: +## s_expr +## + +156 +a list of s-expressions +a closing parenthesis, or another s-expression, i.e. +a literal, symbol, keyword, or an s-expression in parentheses + +file: OPEN SET_OPTION KEYWORD OPEN UNDERSCORE +## +## Ends in an error in state: 6. +## +## attribute_value -> OPEN . list(s_expr) CLOSE [ KEYWORD CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +157 +an attribute value +a closing parenthesis, or an s-expression, i.e. +a literal, symbol, keyword, or an s-expression in parentheses + +file: OPEN SET_OPTION KEYWORD UNDERSCORE +## +## Ends in an error in state: 3. +## +## attribute -> KEYWORD . option(attribute_value) [ KEYWORD CLOSE ] +## +## The known suffix of the stack is as follows: +## KEYWORD +## + +158 +an attribute +either an attribute value as s-expression, +another attribute, or a closing parenthesis + +file: OPEN DEFINE_FUN_REC SYMBOL OPEN PAR UNDERSCORE +## +## Ends in an error in state: 174. +## +## function_def -> SYMBOL OPEN PAR . OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort term CLOSE [ CLOSE ] +## function_def -> SYMBOL OPEN PAR . OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort CLOSE term [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR +## + +159 +a function definition +an opening parenthesis to start a list of sort variables to +parameterize the function definition over + +file: OPEN DEFINE_FUN_REC SYMBOL OPEN PAR OPEN UNDERSCORE +## +## Ends in an error in state: 175. +## +## function_def -> SYMBOL OPEN PAR OPEN . nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort term CLOSE [ CLOSE ] +## function_def -> SYMBOL OPEN PAR OPEN . nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort CLOSE term [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN +## + +160 +a function definition +a sort variable (symbol) + +file: OPEN DEFINE_FUN_REC SYMBOL OPEN PAR OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 178. +## +## function_def -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE . OPEN list(sorted_var) CLOSE sort term CLOSE [ CLOSE ] +## function_def -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE . OPEN list(sorted_var) CLOSE sort CLOSE term [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE +## + +161 +a function definition +an opening parenthesis to start the list of arguments +of the function + +file: OPEN DEFINE_FUN_REC SYMBOL OPEN PAR OPEN SYMBOL CLOSE OPEN UNDERSCORE +## +## Ends in an error in state: 179. +## +## function_def -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN . list(sorted_var) CLOSE sort term CLOSE [ CLOSE ] +## function_def -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN . list(sorted_var) CLOSE sort CLOSE term [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN +## + +162 +a function definition +a sorted variable of the form "(var sort)" + +file: OPEN DEFINE_FUN_REC SYMBOL OPEN PAR OPEN SYMBOL CLOSE OPEN CLOSE UNDERSCORE +## +## Ends in an error in state: 183. +## +## function_def -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE . sort term CLOSE [ CLOSE ] +## function_def -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE . sort CLOSE term [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE +## + +163 +a function definition +a sort for the return type of the function + +file: OPEN DEFINE_FUN_REC SYMBOL OPEN PAR OPEN SYMBOL CLOSE OPEN CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 184. +## +## function_def -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort . term CLOSE [ CLOSE ] +## function_def -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort . CLOSE term [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort +## + +164 +a function definition +a term for the body of the function + +file: OPEN DEFINE_FUN_REC SYMBOL OPEN PAR OPEN SYMBOL CLOSE OPEN CLOSE SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 187. +## +## function_def -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort term . CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort term +## + +165 +a function definition +a closing parenthesis to close the parameterized definition + +file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN PAR UNDERSCORE +## +## Ends in an error in state: 214. +## +## function_dec(sorted_var) -> SYMBOL OPEN PAR . OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR +## + +166 +a function declaration +an opening parenthesis to start a list of sort variables to +parameterize the function sort + +file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN PAR OPEN UNDERSCORE +## +## Ends in an error in state: 215. +## +## function_dec(sorted_var) -> SYMBOL OPEN PAR OPEN . nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN +## + +167 +a function declaration +a sort variable + +file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN PAR OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 217. +## +## function_dec(sorted_var) -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE . OPEN list(sorted_var) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE +## + +168 +a function declaration +an opening parenthesis to start the list of arguments + +file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN PAR OPEN SYMBOL CLOSE OPEN UNDERSCORE +## +## Ends in an error in state: 218. +## +## function_dec(sorted_var) -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN . list(sorted_var) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN +## + +169 +a function declaration +a sorted variable of the form "(var sort)" + +file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN PAR OPEN SYMBOL CLOSE OPEN CLOSE UNDERSCORE +## +## Ends in an error in state: 220. +## +## function_dec(sorted_var) -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE . sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE +## + +170 +a function declaration +a sort for the return type of the function + +file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN PAR OPEN SYMBOL CLOSE OPEN CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 221. +## +## function_dec(sorted_var) -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort . CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort +## + +171 +a list of function declaration +a closing parenthesis to close the function declaration + +file: OPEN SET_OPTION UNDERSCORE +## +## Ends in an error in state: 2. +## +## command -> OPEN SET_OPTION . command_option CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN SET_OPTION +## + +114 +a set-option command +an attribute of the form "keyword value?" + +file: OPEN UNDERSCORE +## +## Ends in an error in state: 1. +## +## command -> OPEN . ASSERT term CLOSE [ OPEN EOF ] +## command -> OPEN . ASSERT OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE term CLOSE CLOSE [ OPEN EOF ] +## command -> OPEN . CHECK_SAT CLOSE [ OPEN EOF ] +## command -> OPEN . CHECK_SAT_ASSUMING OPEN list(prop_literal) CLOSE CLOSE [ OPEN EOF ] +## command -> OPEN . DECLARE_CONST SYMBOL sort CLOSE [ OPEN EOF ] +## command -> OPEN . DECLARE_CONST SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE sort CLOSE CLOSE [ OPEN EOF ] +## command -> OPEN . DECLARE_DATATYPE SYMBOL datatype_dec CLOSE [ OPEN EOF ] +## command -> OPEN . DECLARE_DATATYPES OPEN nonempty_list(sort_dec) CLOSE OPEN nonempty_list(datatype_dec) CLOSE CLOSE [ OPEN EOF ] +## command -> OPEN . DECLARE_FUN function_dec(sort) CLOSE [ OPEN EOF ] +## command -> OPEN . DECLARE_SORT SYMBOL NUM CLOSE [ OPEN EOF ] +## command -> OPEN . DEFINE_FUN function_def CLOSE [ OPEN EOF ] +## command -> OPEN . DEFINE_FUN_REC function_def CLOSE [ OPEN EOF ] +## command -> OPEN . DEFINE_FUNS_REC OPEN nonempty_list(__anonymous_0) CLOSE OPEN nonempty_list(term) CLOSE CLOSE [ OPEN EOF ] +## command -> OPEN . DEFINE_SORT SYMBOL OPEN list(SYMBOL) CLOSE sort CLOSE [ OPEN EOF ] +## command -> OPEN . ECHO STR CLOSE [ OPEN EOF ] +## command -> OPEN . EXIT CLOSE [ OPEN EOF ] +## command -> OPEN . GET_ASSERTIONS CLOSE [ OPEN EOF ] +## command -> OPEN . GET_ASSIGNMENT CLOSE [ OPEN EOF ] +## command -> OPEN . GET_INFO info_flag CLOSE [ OPEN EOF ] +## command -> OPEN . GET_MODEL CLOSE [ OPEN EOF ] +## command -> OPEN . GET_OPTION KEYWORD CLOSE [ OPEN EOF ] +## command -> OPEN . GET_PROOF CLOSE [ OPEN EOF ] +## command -> OPEN . GET_UNSAT_ASSUMPTIONS CLOSE [ OPEN EOF ] +## command -> OPEN . GET_UNSAT_CORE CLOSE [ OPEN EOF ] +## command -> OPEN . GET_VALUE OPEN nonempty_list(term) CLOSE CLOSE [ OPEN EOF ] +## command -> OPEN . POP NUM CLOSE [ OPEN EOF ] +## command -> OPEN . PUSH NUM CLOSE [ OPEN EOF ] +## command -> OPEN . RESET CLOSE [ OPEN EOF ] +## command -> OPEN . RESET_ASSERTIONS CLOSE [ OPEN EOF ] +## command -> OPEN . SET_INFO command_option CLOSE [ OPEN EOF ] +## command -> OPEN . SET_LOGIC SYMBOL CLOSE [ OPEN EOF ] +## command -> OPEN . SET_OPTION command_option CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +115 +a command +a command name + +file: UNDERSCORE +## +## Ends in an error in state: 0. +## +## file' -> . file [ # ] +## +## The known suffix of the stack is as follows: +## +## + +116 +an input statement +an opening parenthesis to start a command + +term: OPEN AS SYMBOL OPEN STR +## +## Ends in an error in state: 62. +## +## identifier -> OPEN . UNDERSCORE SYMBOL nonempty_list(index) CLOSE [ SYMBOL STR OPEN NUM HEX DEC CLOSE BIN ] +## sort -> OPEN . identifier nonempty_list(sort) CLOSE [ SYMBOL STR OPEN NUM HEX DEC CLOSE BIN ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +117 +a sort +an identifier or an underscore + +input: OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN SYMBOL CLOSE SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 467. +## +## command -> OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE sort CLOSE . CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE sort CLOSE +## + +118 +a constant declaration +a closing parenthesis + +input: OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN SYMBOL CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 466. +## +## command -> OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE sort . CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE sort +## + +119 +a constant declaration +a closing parenthesis + +input: OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 465. +## +## command -> OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE . sort CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE +## + +120 +a sort +an identifier or an opening parenthesis + +input: OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN UNDERSCORE +## +## Ends in an error in state: 463. +## +## command -> OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN . nonempty_list(datatype_symbol) CLOSE sort CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN +## + +121 +a list of type variable +an identifier + +input: OPEN DECLARE_CONST SYMBOL OPEN PAR UNDERSCORE +## +## Ends in an error in state: 462. +## +## command -> OPEN DECLARE_CONST SYMBOL OPEN PAR . OPEN nonempty_list(datatype_symbol) CLOSE sort CLOSE CLOSE [ # ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL OPEN PAR +## + +122 +a non empty list of type variable +an opening parenthesis + +input: OPEN DECLARE_CONST SYMBOL OPEN STR +## +## Ends in an error in state: 461. +## +## command -> OPEN DECLARE_CONST SYMBOL OPEN . PAR OPEN nonempty_list(datatype_symbol) CLOSE sort CLOSE CLOSE [ # ] +## identifier -> OPEN . UNDERSCORE SYMBOL nonempty_list(index) CLOSE [ CLOSE ] +## sort -> OPEN . identifier nonempty_list(sort) CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL OPEN +## + +120 +a parametric or monormorphic constant +the keyword par or a sort + + +file: OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN SYMBOL CLOSE SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 334. +## +## command -> OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE sort CLOSE . CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE sort CLOSE +## + +118 +a constant declaration +a closing parenthesis + +file: OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN SYMBOL CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 333. +## +## command -> OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE sort . CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE sort +## + +119 +a constant declaration +a closing parenthesis + +file: OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 332. +## +## command -> OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE . sort CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE +## + +120 +a sort +an identifier or an opening parenthesis + +file: OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN UNDERSCORE +## +## Ends in an error in state: 330. +## +## command -> OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN . nonempty_list(datatype_symbol) CLOSE sort CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL OPEN PAR OPEN +## + +121 +a list of type variable +an identifier + +file: OPEN DECLARE_CONST SYMBOL OPEN PAR UNDERSCORE +## +## Ends in an error in state: 329. +## +## command -> OPEN DECLARE_CONST SYMBOL OPEN PAR . OPEN nonempty_list(datatype_symbol) CLOSE sort CLOSE CLOSE [ OPEN EOF ] +## +## The known suffix of the stack is as follows: +## OPEN DECLARE_CONST SYMBOL OPEN PAR +## + +122 +a non empty list of type variable +an opening parenthesis + +file: OPEN DECLARE_FUN OPEN PAR OPEN SYMBOL CLOSE SYMBOL OPEN CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 279. +## +## function_dec(sort) -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sort) CLOSE sort . CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sort) CLOSE sort +## + +123 +a function declaration +a closing parenthesis + +file: OPEN DECLARE_FUN OPEN PAR OPEN SYMBOL CLOSE SYMBOL OPEN CLOSE UNDERSCORE +## +## Ends in an error in state: 278. +## +## function_dec(sort) -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sort) CLOSE . sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sort) CLOSE +## + +124 +the return type of a function +a sort (identifier or an opening parenthesis) + +file: OPEN DECLARE_FUN OPEN PAR OPEN SYMBOL CLOSE SYMBOL OPEN UNDERSCORE +## +## Ends in an error in state: 276. +## +## function_dec(sort) -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN . list(sort) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN +## + +125 +the type parameters of a function +a sort (identifier or an opening parenthesis) + +file: OPEN DECLARE_FUN OPEN PAR OPEN SYMBOL CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 275. +## +## function_dec(sort) -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL . OPEN list(sort) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL +## + +126 +the type parameters of a function +an opening parenthesis + +file: OPEN DECLARE_FUN OPEN PAR OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 274. +## +## function_dec(sort) -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE . SYMBOL OPEN list(sort) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE +## + +127 +a declaration of a parametric function +the name of the function + +file: OPEN DECLARE_FUN OPEN PAR OPEN UNDERSCORE +## +## Ends in an error in state: 272. +## +## function_dec(sort) -> OPEN PAR OPEN . nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sort) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN +## + +128 +the type variable of a parametric function +an identifier (type variable) + +file: OPEN DECLARE_FUN OPEN PAR UNDERSCORE +## +## Ends in an error in state: 271. +## +## function_dec(sort) -> OPEN PAR . OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sort) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR +## + +129 +a list of type variable +an opening parenthesis + +file: OPEN DECLARE_FUN OPEN UNDERSCORE +## +## Ends in an error in state: 270. +## +## function_dec(sort) -> OPEN . PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sort) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +130 +the declaration of a parametric function +the keyword par + +file: OPEN DEFINE_FUNS_REC OPEN OPEN OPEN PAR OPEN SYMBOL CLOSE SYMBOL OPEN CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 235. +## +## function_dec(sorted_var) -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE sort . CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE sort +## + +131 +definition of recursive function +a closing parenthesis + +file: OPEN DEFINE_FUNS_REC OPEN OPEN OPEN PAR OPEN SYMBOL CLOSE SYMBOL OPEN CLOSE UNDERSCORE +## +## Ends in an error in state: 234. +## +## function_dec(sorted_var) -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE . sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE +## + +132 +a return sort +a sort + +file: OPEN DEFINE_FUNS_REC OPEN OPEN OPEN PAR OPEN SYMBOL CLOSE SYMBOL OPEN UNDERSCORE +## +## Ends in an error in state: 232. +## +## function_dec(sorted_var) -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN . list(sorted_var) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN +## + +133 +a list of formal parameters +a formal parameters: (id sort) + +file: OPEN DEFINE_FUNS_REC OPEN OPEN OPEN PAR OPEN SYMBOL CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 231. +## +## function_dec(sorted_var) -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL . OPEN list(sorted_var) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL +## + +134 +a list of formal parameters +an opening parenthesis + +file: OPEN DEFINE_FUNS_REC OPEN OPEN OPEN PAR OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 230. +## +## function_dec(sorted_var) -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE . SYMBOL OPEN list(sorted_var) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE +## + +135 +a function declaration +the name of the function + +file: OPEN DEFINE_FUNS_REC OPEN OPEN OPEN PAR OPEN UNDERSCORE +## +## Ends in an error in state: 228. +## +## function_dec(sorted_var) -> OPEN PAR OPEN . nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN +## + +136 +a list of type variable +an identifier + +file: OPEN DEFINE_FUNS_REC OPEN OPEN OPEN PAR UNDERSCORE +## +## Ends in an error in state: 227. +## +## function_dec(sorted_var) -> OPEN PAR . OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR +## + +137 +a list of type variable +an open parenthesis + +file: OPEN DEFINE_FUNS_REC OPEN OPEN OPEN UNDERSCORE +## +## Ends in an error in state: 226. +## +## function_dec(sorted_var) -> OPEN . PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE sort CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +138 +a parametric function declaration +the keyword par + +file: OPEN DEFINE_FUN_REC OPEN PAR OPEN SYMBOL CLOSE SYMBOL OPEN CLOSE SYMBOL SYMBOL UNDERSCORE +## +## Ends in an error in state: 205. +## +## function_def -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE sort term . CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE sort term +## + +139 +a function definition +a closing parenthesis + +file: OPEN DEFINE_FUN_REC OPEN PAR OPEN SYMBOL CLOSE SYMBOL OPEN CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 204. +## +## function_def -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE sort . term CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE sort +## + +139 +a function definition +a term for the body of the function + +file: OPEN DEFINE_FUN_REC OPEN PAR OPEN SYMBOL CLOSE SYMBOL OPEN CLOSE UNDERSCORE +## +## Ends in an error in state: 203. +## +## function_def -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE . sort term CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE +## + +140 +a function definition +a sort for the result type + +file: OPEN DEFINE_FUN_REC OPEN PAR OPEN SYMBOL CLOSE SYMBOL OPEN UNDERSCORE +## +## Ends in an error in state: 201. +## +## function_def -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN . list(sorted_var) CLOSE sort term CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN +## + +141 +a list of parameters +a formal parameter: (id sort) + +file: OPEN DEFINE_FUN_REC OPEN PAR OPEN SYMBOL CLOSE SYMBOL UNDERSCORE +## +## Ends in an error in state: 200. +## +## function_def -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL . OPEN list(sorted_var) CLOSE sort term CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL +## + +142 +a list of parameters +an opening parenthesis + +file: OPEN DEFINE_FUN_REC OPEN PAR OPEN SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 199. +## +## function_def -> OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE . SYMBOL OPEN list(sorted_var) CLOSE sort term CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE +## + +143 +a function definition +the name of the function + +file: OPEN DEFINE_FUN_REC OPEN PAR OPEN UNDERSCORE +## +## Ends in an error in state: 197. +## +## function_def -> OPEN PAR OPEN . nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE sort term CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR OPEN +## + +144 +a list of type variable +an identifier + +file: OPEN DEFINE_FUN_REC OPEN PAR UNDERSCORE +## +## Ends in an error in state: 196. +## +## function_def -> OPEN PAR . OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE sort term CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN PAR +## + +145 +a list of type variable +an opening parenthesis + +file: OPEN DEFINE_FUN_REC OPEN UNDERSCORE +## +## Ends in an error in state: 195. +## +## function_def -> OPEN . PAR OPEN nonempty_list(datatype_symbol) CLOSE SYMBOL OPEN list(sorted_var) CLOSE sort term CLOSE [ CLOSE ] +## +## The known suffix of the stack is as follows: +## OPEN +## + +146 +a parametric function definition +the keyword par + +file: OPEN DEFINE_FUN_REC SYMBOL OPEN PAR OPEN SYMBOL CLOSE OPEN CLOSE SYMBOL CLOSE UNDERSCORE +## +## Ends in an error in state: 185. +## +## function_def -> SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort CLOSE . term [ CLOSE ] +## +## The known suffix of the stack is as follows: +## SYMBOL OPEN PAR OPEN nonempty_list(datatype_symbol) CLOSE OPEN list(sorted_var) CLOSE sort CLOSE +## + +147 +a function definition +a term for the function body diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/tokens.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/tokens.mly new file mode 100644 index 0000000000000000000000000000000000000000..976030224d3d21dd7e286ef20be365c9ff1031cd --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/poly/tokens.mly @@ -0,0 +1,47 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +/* Token declarations for Smtlib parser */ + +%token EOF + +%token OPEN CLOSE +%token <string> NUM DEC HEX BIN STR SYMBOL KEYWORD + +/* Currently unused, see lexer. +%token BINARY DECIMAL HEXADECIMAL NUMERAL STRING +*/ +%token UNDERSCORE ATTRIBUTE AS LET EXISTS FORALL MATCH PAR + +%token ASSERT + CHECK_SAT + CHECK_SAT_ASSUMING + DECLARE_CONST + DECLARE_DATATYPE + DECLARE_DATATYPES + DECLARE_FUN + DECLARE_SORT + DEFINE_FUN + DEFINE_FUN_REC + DEFINE_FUNS_REC + DEFINE_SORT + ECHO EXIT + GET_ASSERTIONS + GET_ASSIGNMENT + GET_INFO + GET_MODEL + GET_OPTION + GET_PROOF + GET_UNSAT_ASSUMPTIONS + GET_UNSAT_CORE + GET_VALUE + POP + PUSH + RESET + RESET_ASSERTIONS + SET_INFO + SET_LOGIC + SET_OPTION + +%% + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/ast.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/ast.ml new file mode 100644 index 0000000000000000000000000000000000000000..83332a370b1a22ba827cd88bd803fdacce1ff90d --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/ast.ml @@ -0,0 +1,250 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information. *) + +(** AST requirement for the Smtlib format. + The smtlib format is widely used among SMT solvers, and is the language + of the smtlib benchmark library. Terms are expressed as s-expressions, + and top-level directives include everything needed to use a prover + in an interactive loop (so it includes directive for getting and setting options, + getting information about the solver's internal model, etc...) *) + +module type Id = sig + + type t + (** The type of identifiers *) + + type namespace + (** Namespace for identifiers *) + + val sort : namespace + val term : namespace + val attr : namespace + (** The namespace for sorts (also called typee), terms + and attributes, respectively. *) + + val mk : namespace -> string -> t + (** Make an identifier from a name and namespace. *) + + val indexed : namespace -> string -> string list -> t + (** Create an indexed identifier. *) + +end + +module type Term = sig + + type t + (** The type of terms. *) + + type id + (** The type of identifiers for constants. *) + + type location + (** The type of locations. *) + + val const : ?loc:location -> id -> t + (** Constants, i.e non predefined symbols. This includes both constants + defined by theories, defined locally in a problem, and also quantified variables. *) + + val str : ?loc:location -> string -> t + (** Quoted strings. According to the smtlib manual, these can be interpreted as + either string literals (when the String theory is used), or simply constants *) + + val int : ?loc:location -> string -> t + val real : ?loc:location -> string -> t + val hexa : ?loc:location -> string -> t + val binary : ?loc:location -> string -> t + (** Constants lexically recognised as numbers in different formats. According to the smtlib + manual, these should not always be interpreted as numbers since their interpretation + is actually dependent on the theory set by the problem. *) + + val colon : ?loc:location -> t -> t -> t + (** Juxtaposition of terms, used to annotate terms with their type. *) + + val apply : ?loc:location -> t -> t list -> t + (** Application. *) + + val letand : ?loc:location -> t list -> t -> t + (** Local parrallel bindings. The bindings are a list of terms built using + the [colon] function. *) + + val forall : ?loc:location -> t list -> t -> t + (** Universal quantification. *) + + val exists : ?loc:location -> t list -> t -> t + (** Existencial quantification. *) + + val match_ : ?loc:location -> t -> (t * t) list -> t + (** Pattern matching. The first term is the term to match, + and each tuple in the list is a match case, which is a pair + of a pattern and a match branch. *) + + val sexpr : ?loc:location -> t list -> t + (** S-expressions. Used in smtlib's annotations, *) + + val annot : ?loc:location -> t -> t list -> t + (** Attach a list of attributes (also called annotations) to a term. As written + in the smtlib manual, "Term attributes have no logical meaning -- + semantically, [attr t l] is equivalent to [t]" *) + +end +(** Implementation requirements for Smtlib terms. *) + +module type Statement = sig + + type t + (** The type of statements. *) + + type id + (** The type of identifiers. *) + + type term + (** The type of terms. *) + + type location + (** The type of locations. *) + + (** (Re)starting and terminating *) + + val reset : ?loc:location -> unit -> t + (** Full reset of the prover state. *) + + val set_logic : ?loc:location -> string -> t + (** Set the problem logic. *) + + val set_option : ?loc:location -> term -> t + (** Set the value of a prover option. *) + + val exit : ?loc:location -> unit -> t + (** Exit the interactive loop. *) + + + (** Modifying the assertion stack *) + + val push : ?loc:location -> int -> t + (** Push the given number of new level on the stack of assertions. *) + + val pop : ?loc:location -> int -> t + (** Pop the given number of level on the stack of assertions. *) + + val reset_assertions : ?loc:location -> unit -> t + (** Reset assumed assertions. *) + + + (** Introducing new symbols *) + + val type_decl : ?loc:location -> id -> int -> t + (** Declares a new type constructor with given arity. *) + + val type_def : ?loc:location -> id -> id list -> term -> t + (** Defines an alias for types. [type_def f args body] is such that + later occurences of [f] applied to a list of arguments [l] should + be replaced by [body] where the [args] have been substituted by + their value in [l]. *) + + val datatypes : ?loc:location -> (id * term list * (id * term list) list) list -> t + (** Inductive type definitions. *) + + val fun_decl : ?loc:location -> id -> term list -> term list -> term -> t + (** Declares a new term symbol, and its type. [fun_decl f args ret] + declares [f] as a new function symbol which takes arguments of types + described in [args], and with return type [ret]. *) + + val fun_def : ?loc:location -> id -> term list -> term list -> term -> term -> t + (** Defines a new function. [fun_def f args ret body] is such that + applications of [f] are equal to [body] (module substitution of the arguments), + which should be of type [ret]. *) + + val funs_def_rec : ?loc:location -> (id * term list * term list * term * term) list -> t + (** Declare a list of mutually recursive functions. *) + + + (** Asserting and inspecting formulas *) + + val assert_ : ?loc:location -> term -> t + (** Add a proposition to the current set of assertions. *) + + val get_assertions : ?loc:location -> unit -> t + (** Return the current set of assertions. *) + + + (** Checking for satisfiablity *) + + val check_sat : ?loc:location -> term list -> t + (** Solve the current set of assertions for satisfiability, + under the local assumptions specified. *) + + + (** Models *) + + val get_model : ?loc:location -> unit -> t + (** Return the model found. *) + + val get_value : ?loc:location -> term list -> t + (** Return the value of the given terms in the current model of the solver. *) + + val get_assignment : ?loc:location -> unit -> t + (** Return the values of asserted propositions which have been labelled using + the ":named" attribute. *) + + (** Proofs *) + + val get_proof : ?loc:location -> unit -> t + (** Return the proof of the lastest [check_sat] if it returned unsat, else + is undefined. *) + + val get_unsat_core : ?loc:location -> unit -> t + (** Return the unsat core of the latest [check_sat] if it returned unsat, + else is undefined. *) + + val get_unsat_assumptions : ?loc:location -> unit -> t + (** Return a list of local assumptions (as givne in {!check_sat}, + that is enough to deduce unsat. *) + + (** Inspecting settings *) + + val get_info : ?loc:location -> string -> t + (** Get information (see smtlib manual). *) + + val get_option : ?loc:location -> string -> t + (** Get the value of a prover option. *) + + (** Scripts commands *) + + val echo : ?loc:location -> string -> t + (** Print back as-is, including the double quotes. *) + + val set_info : ?loc:location -> term -> t + (** Set information (see smtlib manual). *) + +end +(** implementation requirement for smtlib statements. *) + + +module View = struct + + module type Ty = sig + + type t + + type view + + val view : t -> view + + end + + module type Term = sig + + type t + + type ty + + type view + + val ty : t -> ty + + val view : t -> view + + end + +end + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/dolmen_smtlib2_v6.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/dolmen_smtlib2_v6.ml index 8cb6c8d06d4d15231a8df43560cde190fca5da3a..967995eca3031c1b077ce03462a0e5749ccf6625 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/dolmen_smtlib2_v6.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/dolmen_smtlib2_v6.ml @@ -1,9 +1,9 @@ (* This file is free software, part of dolmen. See file "LICENSE" formore information *) -module type Id = Ast_smtlib.Id -module type Term = Ast_smtlib.Term -module type Statement = Ast_smtlib.Statement +module type Id = Ast.Id +module type Term = Ast.Term +module type Statement = Ast.Statement module Make (L : Dolmen_intf.Location.S) @@ -11,9 +11,9 @@ module Make (T : Term with type location := L.t and type id := I.t) (S : Statement with type location := L.t and type id := I.t and type term := T.t) = Dolmen_std.Transformer.Make(L)(struct - type token = Tokens_smtlib.token + type token = Tokens.token type statement = S.t let env = [] let incremental = true let error s = Syntax_messages.message s - end)(LexSmtlib)(ParseSmtlib.Make(L)(I)(T)(S)) + end)(Lexer)(Parser.Make(L)(I)(T)(S)) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/dolmen_smtlib2_v6.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/dolmen_smtlib2_v6.mli index 7265fc45293b0b806a5c67ca49c52e0cdb771889..267db8e6df71208f059f429308867bc5a280b02c 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/dolmen_smtlib2_v6.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/dolmen_smtlib2_v6.mli @@ -3,9 +3,9 @@ (** Smtlib language input *) -module type Id = Ast_smtlib.Id -module type Term = Ast_smtlib.Term -module type Statement = Ast_smtlib.Statement +module type Id = Ast.Id +module type Term = Ast.Term +module type Statement = Ast.Statement (** Implementation requirement for the Smtlib format. *) module Make @@ -13,5 +13,5 @@ module Make (I : Id) (T : Term with type location := L.t and type id := I.t) (S : Statement with type location := L.t and type id := I.t and type term := T.t) : - Dolmen_intf.Language.S with type statement = S.t + Dolmen_intf.Language.S with type statement = S.t and type file := L.file (** Functor to generate a parser for the Smtlib format. *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/dune index cda13e0b43ffb2d852f9e3ca0598d95b40b0afe8..ccc1a5c77a93c3c92651cc0962cb7f8062b5fc7f 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/dune @@ -1,66 +1,12 @@ -(ocamllex (modules lexSmtlib)) - -(menhir - (flags (--only-tokens)) - (modules tokens_smtlib) -) - -(menhir - (flags (--explain --table --external-tokens Tokens_smtlib)) - (modules tokens_smtlib parseSmtlib) - (merge_into parseSmtlib) -) - -(rule - (target syntax_messages.ml) - (deps (:tokens tokens_smtlib.mly) - (:parser parseSmtlib.mly) - (:msg syntax.messages)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_smtlib %{tokens} - %{parser} --base %{parser} --compile-errors %{msg}))) -) - - +; Language library definition (library (name dolmen_smtlib2_v6) (public_name dolmen.smtlib2.v6) + (instrumentation (backend bisect_ppx)) (libraries dolmen_std dolmen_intf menhirLib) - (modules Tokens_smtlib LexSmtlib ParseSmtlib Ast_smtlib Syntax_messages Dolmen_smtlib2_v6) -) - - -; Convenience rule to generate a fresh messages file, -; and update an already existing one. -(rule - (target new.messages) - (mode promote-until-clean) - (deps (:tokens tokens_smtlib.mly) - (:parser parseSmtlib.mly)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_smtlib %{tokens} - %{parser} --base %{parser} --list-errors))) + (modules Dolmen_smtlib2_v6 Tokens Lexer Parser Ast Syntax_messages) ) -(rule - (target updated.messages) - (mode promote-until-clean) - (deps (:tokens tokens_smtlib.mly) - (:parser parseSmtlib.mly) - (:msg syntax.messages)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_smtlib %{tokens} - %{parser} --base %{parser} --update-errors %{msg}))) -) - -; Additional rule to add to runtest a check that the messages file is up-to-date -(rule - (alias runtest) - (deps (:tokens tokens_smtlib.mly) - (:parser parseSmtlib.mly) - (:new new.messages) - (:msg syntax.messages)) - (action (run menhir --external-tokens Tokens_smtlib %{tokens} - %{parser} --base %{parser} --compare-errors %{new} --compare-errors %{msg})) -) +; Common include +(include ../../dune.common) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/lexer.mll b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/lexer.mll new file mode 100644 index 0000000000000000000000000000000000000000..a48717717fb800be9fdbfbe50c552a28f2a7688d --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/lexer.mll @@ -0,0 +1,195 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information. *) + +(** {1 Smtlib Lexer} *) + +{ + exception Error + + module T = Dolmen_std.Tok + module M = Map.Make(String) + + open Tokens + + (* Token printing *) + + let keyword_descr s = + T.descr s ~kind:"keyword" + + let reserved_descr s = + T.descr s ~kind:"reserved word" + + let descr token : T.descr = + match (token : token) with + | EOF -> T.descr ~kind:"end of file token" "" + | OPEN -> T.descr ~article:"an" ~kind:"opening partenthesis" "" + | CLOSE -> T.descr ~article:"a" ~kind:"closing parenthesise" "" + | NUM s -> T.descr ~kind:"integer" s + | DEC s -> T.descr ~kind:"decimal" s + | HEX s -> T.descr ~kind:"hexadecimal" s + | BIN s -> T.descr ~kind:"binary" s + | STR s -> T.descr ~kind:"string" s + | SYMBOL s -> T.descr ~kind:"symbol" s + | KEYWORD s -> keyword_descr s + | UNDERSCORE -> reserved_descr "_" + | ATTRIBUTE -> reserved_descr "!" + | AS -> reserved_descr "as" + | LET -> reserved_descr "let" + | EXISTS -> reserved_descr "exists" + | FORALL -> reserved_descr "forall" + | MATCH -> reserved_descr "match" + | PAR -> reserved_descr "par" + | ASSERT -> reserved_descr "assert" + | CHECK_SAT -> reserved_descr "check-sat" + | CHECK_SAT_ASSUMING -> reserved_descr "check-sat-assuming" + | DECLARE_CONST -> reserved_descr "declare-const" + | DECLARE_DATATYPE -> reserved_descr "declare-datatype" + | DECLARE_DATATYPES -> reserved_descr "declare-datatypes" + | DECLARE_FUN -> reserved_descr "declare-fun" + | DECLARE_SORT -> reserved_descr "declare-sort" + | DEFINE_FUN -> reserved_descr "define-fun" + | DEFINE_FUN_REC -> reserved_descr "define-fun-rec" + | DEFINE_FUNS_REC -> reserved_descr "define-funs-rec" + | DEFINE_SORT -> reserved_descr "define-sort" + | ECHO -> reserved_descr "echo" + | EXIT -> reserved_descr "exit" + | GET_ASSERTIONS -> reserved_descr "get-assertions" + | GET_ASSIGNMENT -> reserved_descr "gert-assignment" + | GET_INFO -> reserved_descr "get-info" + | GET_MODEL -> reserved_descr "get-model" + | GET_OPTION -> reserved_descr "get-option" + | GET_PROOF -> reserved_descr "get-proof" + | GET_UNSAT_ASSUMPTIONS -> reserved_descr "get-unsat-assumptions" + | GET_UNSAT_CORE -> reserved_descr "get-unsat-core" + | GET_VALUE -> reserved_descr "get-value" + | POP -> reserved_descr "pop" + | PUSH -> reserved_descr "push" + | RESET -> reserved_descr "reset" + | RESET_ASSERTIONS -> reserved_descr "reset-assertions" + | SET_INFO -> reserved_descr "set-info" + | SET_LOGIC -> reserved_descr "set-logic" + | SET_OPTION -> reserved_descr "set-option" + + (* Token parsing *) + + let bind map (x, v) = M.add x v map + + let reserved_words = + List.fold_left bind M.empty [ + (* reserved words *) + (* These are currently unused in smtlib scripts commands + * (they are only used in logic definitions), hence they are currently + * ignored, given that only scripts are currently parsed. + "BINARY", BINARY; + "DECIMAL", DECIMAL; + "HEXADECIMAL", HEXADECIMAL; + "NUMERAL", NUMERAL; + "STRING", STRING; + *) + "_", UNDERSCORE; + "!", ATTRIBUTE; + "as", AS; + "let", LET; + "exists", EXISTS; + "forall", FORALL; + "match", MATCH; + "par", PAR; + (* command names *) + "assert", ASSERT; + "check-sat", CHECK_SAT; + "check-sat-assuming", CHECK_SAT_ASSUMING; + "declare-const", DECLARE_CONST; + "declare-datatype", DECLARE_DATATYPE; + "declare-datatypes", DECLARE_DATATYPES; + "declare-fun", DECLARE_FUN; + "declare-sort", DECLARE_SORT; + "define-fun", DEFINE_FUN; + "define-fun-rec", DEFINE_FUN_REC; + "define-funs-rec", DEFINE_FUNS_REC; + "define-sort", DEFINE_SORT; + "echo", ECHO; + "exit", EXIT; + "get-assertions", GET_ASSERTIONS; + "get-assignment", GET_ASSIGNMENT; + "get-info", GET_INFO; + "get-model", GET_MODEL; + "get-option", GET_OPTION; + "get-proof", GET_PROOF; + "get-unsat-assumptions", GET_UNSAT_ASSUMPTIONS; + "get-unsat-core", GET_UNSAT_CORE; + "get-value", GET_VALUE; + "pop", POP; + "push", PUSH; + "reset", RESET; + "reset-assertions", RESET_ASSERTIONS; + "set-info", SET_INFO; + "set-logic", SET_LOGIC; + "set-option", SET_OPTION; + ] + + let symbol newline lexbuf s = + (* register the newlines in quoted symbols to maintain correct locations.*) + for i = 0 to (String.length s - 1) do + match s.[i] with + | '\n' -> newline lexbuf + | _ -> () + done; + (* Check whether the symbol is a reserved word. *) + try M.find s reserved_words + with Not_found -> SYMBOL s + +} + +let white_space_char = ['\t' '\n' '\r' ' '] +let printable_char = [' ' - '~' '\128' - '\255'] +let white_space_or_printable = ['\t' '\n' '\r' ' ' - '~' '\128' - '\255'] +let digit = ['0' - '9'] +let letter = ['A' - 'Z' 'a' - 'z'] + +let numeral = '0' | (['1' - '9'] digit*) +let decimal = numeral '.' '0'* numeral + +let hex = ['0' - '9'] | ['A' - 'F'] | ['a' - 'f'] +let hexadecimal = "#x" hex+ + +let bin = ['0' '1'] +let binary = "#b" bin+ + +let ss_first_char = + letter | ['+' '-' '/' '*' '=' '%' '?' '!' '.' '$' '_' '~' '&' '^' '<' '>' '@'] +let ss_char = ss_first_char | digit +let simple_symbol = ss_first_char ss_char* + +let quoted_symbol_char = (white_space_or_printable # ['|' '\\']) + +let keyword = ':' simple_symbol + +let comment = ';' (white_space_or_printable # ['\r' '\n'])* + +rule token newline = parse + (* Whitespace, newlines and comments *) + | eof { EOF } + | [' ' '\t' '\r']+ { token newline lexbuf } + | '\n' { newline lexbuf; token newline lexbuf } + | comment { token newline lexbuf } + + (* SMTLIB tokens *) + | '(' { OPEN } + | ')' { CLOSE } + | numeral as s { NUM s } + | decimal as s { DEC s } + | hexadecimal as s { HEX s } + | binary as s { BIN s } + | '"' { string newline (Buffer.create 42) lexbuf } + | keyword as s { KEYWORD s } + | simple_symbol as s + | '|' (quoted_symbol_char* as s) '|' + { symbol newline lexbuf s } + +and string newline b = parse + | '"' '"' { Buffer.add_char b '"'; string newline b lexbuf } + | '"' { STR (Buffer.contents b) } + | (printable_char | white_space_char) as c + { if c = '\n' then newline lexbuf; + Buffer.add_char b c; string newline b lexbuf } + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/parser.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/parser.mly new file mode 100644 index 0000000000000000000000000000000000000000..d00c23be371dc83c2d650f0b8ef6fb6d1d611591 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/parser.mly @@ -0,0 +1,386 @@ + +(* This file is free software, part of dolmem. See file "LICENSE" for more information *) + +%parameter <L : Dolmen_intf.Location.S> +%parameter <I : Ast.Id> +%parameter <T : Ast.Term with type location := L.t and type id := I.t> +%parameter <S : Ast.Statement with type location := L.t and type id := I.t and type term := T.t> + +%start <T.t> term +%start <S.t list> file +%start <S.t option> input + +%% + +spec_constant: + | s=NUM + { let loc = L.mk_pos $startpos $endpos in T.int ~loc s } + | s=DEC + { let loc = L.mk_pos $startpos $endpos in T.real ~loc s } + | s=HEX + { let loc = L.mk_pos $startpos $endpos in T.hexa ~loc s } + | s=BIN + { let loc = L.mk_pos $startpos $endpos in T.binary ~loc s } + | s=STR + { let loc = L.mk_pos $startpos $endpos in T.str ~loc s } +; + +s_expr: + | c=spec_constant + { c } + | s=SYMBOL + { let loc = L.mk_pos $startpos $endpos in T.const ~loc I.(mk term s) } + | s=KEYWORD + { let loc = L.mk_pos $startpos $endpos in T.const ~loc I.(mk term s) } + | OPEN l=s_expr* CLOSE + { let loc = L.mk_pos $startpos $endpos in T.sexpr ~loc l } +; + +index: + | s=NUM + | s=SYMBOL + { s } + /* Small language extension to support string char literals */ + | s=HEX + { s } +; + +identifier: + | s=SYMBOL + { fun ns -> I.mk ns s } + | OPEN UNDERSCORE s=SYMBOL l=index+ CLOSE + { fun ns -> I.indexed ns s l } +; + +sort: + | s=identifier + { let loc = L.mk_pos $startpos $endpos in T.const ~loc (s I.sort) } + | OPEN f=identifier args=sort+ CLOSE + { let c = + let loc = L.mk_pos $startpos(f) $endpos(f) in + T.const ~loc (f I.sort) + in + let loc = L.mk_pos $startpos $endpos in T.apply ~loc c args } +; + +attribute_value: + | v=spec_constant + { v } + | v=SYMBOL + { let loc = L.mk_pos $startpos $endpos in T.const ~loc I.(mk attr v) } + | OPEN l=s_expr* CLOSE + { let loc = L.mk_pos $startpos $endpos in T.sexpr ~loc l } +; + +attribute: + | s=KEYWORD a=attribute_value? + { + let t = + let loc = L.mk_pos $startpos(s) $endpos(s) in + T.const ~loc I.(mk attr s) + in + match a with + | None -> t + | Some t' -> + let loc = L.mk_pos $startpos $endpos in + T.apply ~loc t [t'] + } +; + +/* +The [(as id ty)] doesn't specify the type of the function [id] +but only its result type +*/ +qual_identifier: + | s=identifier + { let loc = L.mk_pos $startpos $endpos in `NoAs (T.const ~loc (s I.term)) } + | OPEN AS s=identifier ty=sort CLOSE + { let loc = L.mk_pos $startpos $endpos in + `As (T.const ~loc (s I.term),ty) } +; + +var_binding: + | OPEN s=SYMBOL t=term CLOSE + { let c = + let loc = L.mk_pos $startpos(s) $endpos(s) in + T.const ~loc I.(mk term s) + in + let loc = L.mk_pos $startpos $endpos in T.colon ~loc c t } +; + +sorted_var: + | OPEN s=SYMBOL ty=sort CLOSE + { let c = + let loc = L.mk_pos $startpos(s) $endpos(s) in + T.const ~loc I.(mk term s) + in + let loc = L.mk_pos $startpos $endpos in T.colon ~loc c ty } +; + +/* Additional rule for pattern symbols, useful for: + 1- locations in symbol lists in patterns, + 2- menhir '+' syntax doesn't support raw tokens afaik */ +pattern_symbol: + | s=SYMBOL + { let loc = L.mk_pos $startpos $endpos in T.const ~loc I.(mk term s) } +; + +pattern: + | c=pattern_symbol + { c } + | OPEN f=pattern_symbol args=pattern_symbol+ CLOSE + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc f args } +; + +match_case: + | OPEN p=pattern t=term CLOSE + { p, t } +; + +term: + | c=spec_constant + { c } + | s=qual_identifier + { let loc = L.mk_pos $startpos $endpos in + match s with + | `NoAs f -> f + | `As (f,ty) -> T.colon ~loc f ty } + | OPEN s=qual_identifier args=term+ CLOSE + { let loc = L.mk_pos $startpos $endpos in + match s with + | `NoAs f -> T.apply ~loc f args + | `As (f,ty) -> T.colon (T.apply ~loc f args) ty } + | OPEN LET OPEN l=var_binding+ CLOSE t=term CLOSE + { let loc = L.mk_pos $startpos $endpos in T.letand ~loc l t } + | OPEN FORALL OPEN l=sorted_var+ CLOSE t=term CLOSE + { let loc = L.mk_pos $startpos $endpos in T.forall ~loc l t } + | OPEN EXISTS OPEN l=sorted_var+ CLOSE t=term CLOSE + { let loc = L.mk_pos $startpos $endpos in T.exists ~loc l t } + | OPEN MATCH t=term OPEN l=match_case+ CLOSE CLOSE + { let loc = L.mk_pos $startpos $endpos in T.match_ ~loc t l } + | OPEN ATTRIBUTE f=term args=attribute+ CLOSE + { let loc = L.mk_pos $startpos $endpos in T.annot ~loc f args } +; + +info_flag: + /* The following cases are subsumed by the last case, and thus ignored, + most notably because they would force to introduce tokens for specific + keywords even though these rules are syntaxically useless. + | :all-statistics + | :assertion-stack-levels + | :authors + | :error-behavior + | :name + | :reason-unknown + | :version + */ + | s=KEYWORD + { s } +; + +/* This definition is useless (not used in the syntax), + and it would force to match on non-reserved symbols, + which is very, very, very ugly... +b_value: + | true + | false +; +*/ + +/* renamed from option to avoid a name_clash */ +command_option: + /* These cases are subsumed by the last case, and thus ignored, + most notably because they would force to introduce tokens for specific + keywords even though these rules are syntaxically useless. + Also, this allows to ignore the definition of <b_value>, which is problematic. + | :diagnostic-output-channel <string> + | :global-declarations <b_value> + | :interactive-mode <b_value> + | :print-success <b_value> + | :produce-assertions <b_value> + | :produce-assignments <b_value> + | :produce-models <b_value> + | :produce-proofs <b_value> + | :produce-unsat-assumptions <b_value> + | :produce-unsat-cores <b_value> + | :random-seed <numeral> + | :regular-output-channel <string> + | :reproducible-resource-limit <numeral> + | :verbosity <numeral> + */ + | a=attribute + { a } +; + +sort_dec: + | OPEN s=SYMBOL n=NUM CLOSE + { I.(mk sort s), int_of_string n + (* shouldn't raise because of the definition of numeral in lexer *) } +; + +selector_dec: + | OPEN s=SYMBOL ty=sort CLOSE + { let f = + let loc = L.mk_pos $startpos $endpos in + T.const ~loc (I.mk I.term s) + in + let loc = L.mk_pos $startpos $endpos in + T.colon ~loc f ty } +; + +constructor_dec: + | OPEN s=SYMBOL l=selector_dec* CLOSE + { (I.mk I.term s), l } +; + +/* Additional rule for datatype symbols, useful because + menhir '+' syntax does'nt support raw tokens afaik */ +datatype_symbol: + | s=SYMBOL + { let loc = L.mk_pos $startpos $endpos in T.const ~loc I.(mk sort s) } + +datatype_dec: + | OPEN l=constructor_dec+ CLOSE + { [], l } + | OPEN PAR OPEN vars=datatype_symbol+ CLOSE OPEN l=constructor_dec+ CLOSE CLOSE + { vars, l } +; + +function_dec: + | OPEN s=SYMBOL OPEN args=sorted_var* CLOSE ret=sort CLOSE + { I.(mk term s), [], args, ret } + +function_def: + | s=SYMBOL OPEN args=sorted_var* CLOSE ret=sort body=term + { I.(mk term s), [], args, ret, body } + +/* Additional rule for prop_literals symbols, to have lighter + semantic actions in prop_literal reductions. */ +prop_symbol: + | s=pattern_symbol { s } +; + +/* This is a ugly hack, but necessary because the syntax defines + this reduction using a `not` token which doesn't really exists, + since it is not a reserved word, thus forcing us to pattern + match on the string... */ +not_symbol: + | s=SYMBOL + { if not (s = "not") then assert false; + let loc = L.mk_pos $startpos $endpos in T.const ~loc I.(mk term s) } +; + +prop_literal: + | s=prop_symbol + { s } + | OPEN f=not_symbol s=prop_symbol CLOSE + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc f [s] } +; + +command: + | OPEN ASSERT t=term CLOSE + { let loc = L.mk_pos $startpos $endpos in S.assert_ ~loc t } + | OPEN CHECK_SAT CLOSE + { let loc = L.mk_pos $startpos $endpos in S.check_sat ~loc [] } + | OPEN CHECK_SAT_ASSUMING OPEN l=prop_literal* CLOSE CLOSE + { let loc = L.mk_pos $startpos $endpos in S.check_sat ~loc l } + | OPEN DECLARE_CONST s=SYMBOL ty=sort CLOSE + { let loc = L.mk_pos $startpos $endpos in S.fun_decl ~loc I.(mk term s) [] [] ty } + | OPEN DECLARE_DATATYPE s=SYMBOL d=datatype_dec CLOSE + { let vars, constructors = d in + let loc = L.mk_pos $startpos $endpos in + S.datatypes ~loc [I.(mk sort s), vars, constructors] } + | OPEN DECLARE_DATATYPES OPEN l1=sort_dec+ CLOSE OPEN l2=datatype_dec+ CLOSE CLOSE + { let res = + try + List.map2 (fun (s, _) (vars, constructors) -> s, vars, constructors) l1 l2 + with Invalid_argument _ -> + assert false + in + let loc = L.mk_pos $startpos $endpos in + S.datatypes ~loc res } + | OPEN DECLARE_FUN s=SYMBOL OPEN args=sort* CLOSE ty=sort CLOSE + { let id = I.(mk term s) in + let loc = L.mk_pos $startpos $endpos in + S.fun_decl ~loc id [] args ty } + | OPEN DECLARE_SORT s=SYMBOL n=NUM CLOSE + { let id = I.(mk sort s) in + let loc = L.mk_pos $startpos $endpos in + S.type_decl ~loc id (int_of_string n) } + | OPEN DEFINE_FUN f=function_def CLOSE + { let id, vars, args, ret, body = f in + let loc = L.mk_pos $startpos $endpos in + S.fun_def ~loc id vars args ret body } + | OPEN DEFINE_FUN_REC f=function_def CLOSE + { let id, vars, args, ret, body = f in + let loc = L.mk_pos $startpos $endpos in + S.funs_def_rec ~loc [id, vars, args, ret, body] } + /* The syntax technically defines this reduction as having l and l' be the same length, + but that isn't easily expressible in menhir, so the check is delayed */ + | OPEN DEFINE_FUNS_REC OPEN l1=function_dec+ CLOSE OPEN l2=term+ CLOSE CLOSE + { let res = + try List.map2 (fun (id, vars, args, ret) body -> id, vars, args, ret, body) l1 l2 + with Invalid_argument _ -> assert false + in + let loc = L.mk_pos $startpos $endpos in + S.funs_def_rec ~loc res } + | OPEN DEFINE_SORT s=SYMBOL OPEN args=SYMBOL* CLOSE ty=sort CLOSE + { let id = I.(mk sort s) in + let l = List.map I.(mk sort) args in + let loc = L.mk_pos $startpos $endpos in + S.type_def ~loc id l ty } + | OPEN ECHO s=STR CLOSE + { let loc = L.mk_pos $startpos $endpos in + S.echo ~loc s } + + | OPEN EXIT CLOSE + { let loc = L.mk_pos $startpos $endpos in S.exit ~loc () } + + | OPEN GET_ASSERTIONS CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_assertions ~loc () } + | OPEN GET_ASSIGNMENT CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_assignment ~loc () } + | OPEN GET_INFO i=info_flag CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_info ~loc i } + | OPEN GET_MODEL CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_model ~loc () } + | OPEN GET_OPTION k=KEYWORD CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_option ~loc k } + | OPEN GET_PROOF CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_proof ~loc () } + | OPEN GET_UNSAT_ASSUMPTIONS CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_unsat_assumptions ~loc () } + | OPEN GET_UNSAT_CORE CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_unsat_core ~loc () } + | OPEN GET_VALUE OPEN l=term+ CLOSE CLOSE + { let loc = L.mk_pos $startpos $endpos in S.get_value ~loc l } + + | OPEN POP n=NUM CLOSE + { let loc = L.mk_pos $startpos $endpos in S.pop ~loc (int_of_string n) } + | OPEN PUSH n=NUM CLOSE + { let loc = L.mk_pos $startpos $endpos in S.push ~loc (int_of_string n) } + | OPEN RESET CLOSE + { let loc = L.mk_pos $startpos $endpos in S.reset ~loc () } + | OPEN RESET_ASSERTIONS CLOSE + { let loc = L.mk_pos $startpos $endpos in S.reset_assertions ~loc () } + + | OPEN SET_INFO c=command_option CLOSE + { let loc = L.mk_pos $startpos $endpos in S.set_info ~loc c } + | OPEN SET_LOGIC s=SYMBOL CLOSE + { let loc = L.mk_pos $startpos $endpos in S.set_logic ~loc s } + | OPEN SET_OPTION c=command_option CLOSE + { let loc = L.mk_pos $startpos $endpos in S.set_option ~loc c } +; + +file: + | l=command* EOF + { l } +; + +input: + | EOF + { None } + | c=command + { Some c } + +%% diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/syntax.messages b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/syntax.messages index a8dcd49e7756f7f1e21cad97606ba0be9829512f..f91cf2691d76ff700a637837118f7758b90941e0 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/syntax.messages +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/syntax.messages @@ -1180,7 +1180,7 @@ input: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE SYMBOL CLOSE CLOSE OPEN ## ## Ends in an error in state: 352. ## -## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE . OPEN [ # ] +## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE . CLOSE [ # ] ## ## The known suffix of the stack is as follows: ## OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE @@ -1194,7 +1194,7 @@ input: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE SYMBOL CLOSE CLOSE OPEN ## ## Ends in an error in state: 350. ## -## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN . nonempty_list(term) CLOSE OPEN [ # ] +## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN . nonempty_list(term) CLOSE CLOSE [ # ] ## ## The known suffix of the stack is as follows: ## OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN @@ -1208,7 +1208,7 @@ input: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE SYMBOL CLOSE CLOSE UNDER ## ## Ends in an error in state: 349. ## -## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE . OPEN nonempty_list(term) CLOSE OPEN [ # ] +## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE . OPEN nonempty_list(term) CLOSE CLOSE [ # ] ## ## The known suffix of the stack is as follows: ## OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE @@ -1222,7 +1222,7 @@ input: OPEN DEFINE_FUNS_REC OPEN UNDERSCORE ## ## Ends in an error in state: 347. ## -## command -> OPEN DEFINE_FUNS_REC OPEN . nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE OPEN [ # ] +## command -> OPEN DEFINE_FUNS_REC OPEN . nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE CLOSE [ # ] ## ## The known suffix of the stack is as follows: ## OPEN DEFINE_FUNS_REC OPEN @@ -1236,7 +1236,7 @@ input: OPEN DEFINE_FUNS_REC UNDERSCORE ## ## Ends in an error in state: 346. ## -## command -> OPEN DEFINE_FUNS_REC . OPEN nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE OPEN [ # ] +## command -> OPEN DEFINE_FUNS_REC . OPEN nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE CLOSE [ # ] ## ## The known suffix of the stack is as follows: ## OPEN DEFINE_FUNS_REC @@ -1665,9 +1665,9 @@ input: OPEN SET_INFO KEYWORD KEYWORD ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 3, spurious reduction of production option(attribute_value) -> -## In state 22, spurious reduction of production attribute -> KEYWORD option(attribute_value) -## In state 26, spurious reduction of production command_option -> attribute +## In state 3, spurious reduction of production option(attribute_value) -> +## In state 22, spurious reduction of production attribute -> KEYWORD option(attribute_value) +## In state 26, spurious reduction of production command_option -> attribute ## 109 @@ -1731,9 +1731,9 @@ input: OPEN SET_OPTION KEYWORD KEYWORD ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 3, spurious reduction of production option(attribute_value) -> -## In state 22, spurious reduction of production attribute -> KEYWORD option(attribute_value) -## In state 26, spurious reduction of production command_option -> attribute +## In state 3, spurious reduction of production option(attribute_value) -> +## In state 22, spurious reduction of production attribute -> KEYWORD option(attribute_value) +## In state 26, spurious reduction of production command_option -> attribute ## 113 @@ -1770,7 +1770,7 @@ input: OPEN UNDERSCORE ## command -> OPEN . DECLARE_SORT SYMBOL NUM CLOSE [ # ] ## command -> OPEN . DEFINE_FUN function_def CLOSE [ # ] ## command -> OPEN . DEFINE_FUN_REC function_def CLOSE [ # ] -## command -> OPEN . DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE OPEN [ # ] +## command -> OPEN . DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE CLOSE [ # ] ## command -> OPEN . DEFINE_SORT SYMBOL OPEN list(SYMBOL) CLOSE sort CLOSE [ # ] ## command -> OPEN . ECHO STR CLOSE [ # ] ## command -> OPEN . EXIT CLOSE [ # ] @@ -2878,7 +2878,7 @@ file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE SYMBOL CLOSE CLOSE OPEN S ## ## Ends in an error in state: 193. ## -## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE . OPEN [ OPEN EOF ] +## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE . CLOSE [ OPEN EOF ] ## ## The known suffix of the stack is as follows: ## OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE @@ -2892,7 +2892,7 @@ file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE SYMBOL CLOSE CLOSE OPEN U ## ## Ends in an error in state: 191. ## -## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN . nonempty_list(term) CLOSE OPEN [ OPEN EOF ] +## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN . nonempty_list(term) CLOSE CLOSE [ OPEN EOF ] ## ## The known suffix of the stack is as follows: ## OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN @@ -2906,7 +2906,7 @@ file: OPEN DEFINE_FUNS_REC OPEN OPEN SYMBOL OPEN CLOSE SYMBOL CLOSE CLOSE UNDERS ## ## Ends in an error in state: 190. ## -## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE . OPEN nonempty_list(term) CLOSE OPEN [ OPEN EOF ] +## command -> OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE . OPEN nonempty_list(term) CLOSE CLOSE [ OPEN EOF ] ## ## The known suffix of the stack is as follows: ## OPEN DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE @@ -3005,7 +3005,7 @@ file: OPEN DEFINE_FUNS_REC OPEN UNDERSCORE ## ## Ends in an error in state: 181. ## -## command -> OPEN DEFINE_FUNS_REC OPEN . nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE OPEN [ OPEN EOF ] +## command -> OPEN DEFINE_FUNS_REC OPEN . nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE CLOSE [ OPEN EOF ] ## ## The known suffix of the stack is as follows: ## OPEN DEFINE_FUNS_REC OPEN @@ -3019,7 +3019,7 @@ file: OPEN DEFINE_FUNS_REC UNDERSCORE ## ## Ends in an error in state: 180. ## -## command -> OPEN DEFINE_FUNS_REC . OPEN nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE OPEN [ OPEN EOF ] +## command -> OPEN DEFINE_FUNS_REC . OPEN nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE CLOSE [ OPEN EOF ] ## ## The known suffix of the stack is as follows: ## OPEN DEFINE_FUNS_REC @@ -3546,9 +3546,9 @@ file: OPEN SET_INFO KEYWORD KEYWORD ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 3, spurious reduction of production option(attribute_value) -> -## In state 22, spurious reduction of production attribute -> KEYWORD option(attribute_value) -## In state 26, spurious reduction of production command_option -> attribute +## In state 3, spurious reduction of production option(attribute_value) -> +## In state 22, spurious reduction of production attribute -> KEYWORD option(attribute_value) +## In state 26, spurious reduction of production command_option -> attribute ## 109 @@ -3612,9 +3612,9 @@ file: OPEN SET_OPTION KEYWORD KEYWORD ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 3, spurious reduction of production option(attribute_value) -> -## In state 22, spurious reduction of production attribute -> KEYWORD option(attribute_value) -## In state 26, spurious reduction of production command_option -> attribute +## In state 3, spurious reduction of production option(attribute_value) -> +## In state 22, spurious reduction of production attribute -> KEYWORD option(attribute_value) +## In state 26, spurious reduction of production command_option -> attribute ## 113 @@ -3711,7 +3711,7 @@ file: OPEN UNDERSCORE ## command -> OPEN . DECLARE_SORT SYMBOL NUM CLOSE [ OPEN EOF ] ## command -> OPEN . DEFINE_FUN function_def CLOSE [ OPEN EOF ] ## command -> OPEN . DEFINE_FUN_REC function_def CLOSE [ OPEN EOF ] -## command -> OPEN . DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE OPEN [ OPEN EOF ] +## command -> OPEN . DEFINE_FUNS_REC OPEN nonempty_list(function_dec) CLOSE OPEN nonempty_list(term) CLOSE CLOSE [ OPEN EOF ] ## command -> OPEN . DEFINE_SORT SYMBOL OPEN list(SYMBOL) CLOSE sort CLOSE [ OPEN EOF ] ## command -> OPEN . ECHO STR CLOSE [ OPEN EOF ] ## command -> OPEN . EXIT CLOSE [ OPEN EOF ] diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/tokens.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/tokens.mly new file mode 100644 index 0000000000000000000000000000000000000000..976030224d3d21dd7e286ef20be365c9ff1031cd --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/smtlib2/v2.6/tokens.mly @@ -0,0 +1,47 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +/* Token declarations for Smtlib parser */ + +%token EOF + +%token OPEN CLOSE +%token <string> NUM DEC HEX BIN STR SYMBOL KEYWORD + +/* Currently unused, see lexer. +%token BINARY DECIMAL HEXADECIMAL NUMERAL STRING +*/ +%token UNDERSCORE ATTRIBUTE AS LET EXISTS FORALL MATCH PAR + +%token ASSERT + CHECK_SAT + CHECK_SAT_ASSUMING + DECLARE_CONST + DECLARE_DATATYPE + DECLARE_DATATYPES + DECLARE_FUN + DECLARE_SORT + DEFINE_FUN + DEFINE_FUN_REC + DEFINE_FUNS_REC + DEFINE_SORT + ECHO EXIT + GET_ASSERTIONS + GET_ASSIGNMENT + GET_INFO + GET_MODEL + GET_OPTION + GET_PROOF + GET_UNSAT_ASSUMPTIONS + GET_UNSAT_CORE + GET_VALUE + POP + PUSH + RESET + RESET_ASSERTIONS + SET_INFO + SET_LOGIC + SET_OPTION + +%% + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/ast.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/ast.ml new file mode 100644 index 0000000000000000000000000000000000000000..55ca8bffe74a126f712076dade9f5a2d7aa2aca0 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/ast.ml @@ -0,0 +1,167 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +module type Id = sig + + type t + (** The type of identifiers *) + + type namespace + (** The type for namespaces. *) + + val term : namespace + (** Usual namespace, used for temrs, types and propositions. *) + + val decl : namespace + (** Names used to refer to tptp phrases. These are used + in declarations and include statement. *) + + val mk : namespace -> string -> t + (** Make an identifier *) + +end + +module type Term = sig + + type t + (** The type of terms. *) + + type id + (** The type of identifiers *) + + type location + (** The type of locations attached to terms. *) + + val eq_t : ?loc:location -> unit -> t + val neq_t : ?loc:location -> unit -> t + val not_t : ?loc:location -> unit -> t + val or_t : ?loc:location -> unit -> t + val and_t : ?loc:location -> unit -> t + val xor_t : ?loc:location -> unit -> t + val nor_t : ?loc:location -> unit -> t + val nand_t : ?loc:location -> unit -> t + val equiv_t : ?loc:location -> unit -> t + val implies_t : ?loc:location -> unit -> t + val implied_t : ?loc:location -> unit -> t + val pi_t : ?loc:location -> unit -> t + val sigma_t : ?loc:location -> unit -> t + val data_t : ?loc:location -> unit -> t + (** Predefined symbols in tptp. Symbols as standalone terms are necessary + for parsing tptp's THF. {!implied_t} is reverse implication, and + {!data_t} is used in tptp's annotations. {!pi_t} and {!sigma_t} are + the encoding of forall and exists quantifiers as constant in higher-order + logic. *) + + val colon : ?loc:location -> t -> t -> t + (** Juxtaposition of terms, usually used for annotating terms with their type. *) + + val var : ?loc:location -> id -> t + (** Make a variable (in tptp, variable are syntaxically different from constants). *) + + val const : ?loc:location -> id -> t + (** Make a constant. *) + + val distinct : ?loc:location -> id -> t + (** Make a constant whose name possibly contain special characters + (All 'distinct' constants name are enclosed in quotes). *) + + val int : ?loc:location -> string -> t + val rat : ?loc:location -> string -> t + val real : ?loc:location -> string -> t + (** Constants that are syntaxically recognised as numbers. *) + + val apply : ?loc:location -> t -> t list -> t + (** Application. *) + + val ite : ?loc:location -> t -> t -> t -> t + (** Conditional, of the form [ite condition then_branch els_branch]. *) + + val union : ?loc:location -> t -> t -> t + (** Union of types. *) + + val product : ?loc:location -> t -> t -> t + (** Product of types, used for function types with more than one argument. *) + + val arrow : ?loc:location -> t -> t -> t + (** Function type constructor. *) + + val subtype : ?loc:location -> t -> t -> t + (** Comparison of type (used in tptp's THF). *) + + val pi : ?loc:location -> t list -> t -> t + (** Dependant type constructor, used for polymorphic function types. *) + + val letin : ?loc:location -> t list -> t -> t + (** Local binding for terms. *) + + val forall : ?loc:location -> t list -> t -> t + (** Universal propositional quantification. *) + + val exists : ?loc:location -> t list -> t -> t + (** Existencial porpositional quantification. *) + + val lambda : ?loc:location -> t list -> t -> t + (** Function construction. *) + + val choice : ?loc:location -> t list -> t -> t + (** Indefinite description, also called choice operator. *) + + val description : ?loc:location -> t list -> t -> t + (** Definite description. *) + + val sequent : ?loc:location -> t list -> t list -> t + (** Sequents as terms, used as [sequents hyps goals]. *) + +end + +module type Statement = sig + + type t + (** The type of statements. *) + + type id + (** The type of identifiers *) + + type term + (** The type of terms used in statements. *) + + type location + (** The type of locations attached to statements. *) + + val annot : ?loc:location -> term -> term list -> term + (** Terms as annotations for statements. *) + + val include_ : ?loc:location -> string -> id list -> t + (** Include directive. Given the filename, and a list of + names to import (an empty list means import everything). *) + + val tpi : ?loc:location -> ?annot:term -> id -> string -> term -> t + val thf : ?loc:location -> ?annot:term -> id -> string -> term -> t + val tff : ?loc:location -> ?annot:term -> id -> string -> term -> t + val fof : ?loc:location -> ?annot:term -> id -> string -> term -> t + val cnf : ?loc:location -> ?annot:term -> id -> string -> term -> t + (** TPTP statements, used for instance as [tff ~loc ~annot name role t]. + Instructs the prover to register a new directive with the given name, + role and term. Current tptp roles are: + - ["axiom", "hypothesis", "definition", "lemma", "theorem"] acts + as new assertions/declartions + - ["assumption", "conjecture"] are proposition that need to be proved, + and then can be used to prove other propositions. They are equivalent + to the following sequence of smtlib statements: + {ul + {- [push 1]} + {- [assert (not t)]} + {- [check_sat]} + {- [pop 1]} + {- [assert t]} + } + - ["negated_conjecture"] is the same as ["conjecture"], but the given proposition + is false (i.e its negation is the proposition to prove). + - ["type"] declares a new symbol and its type + - ["plain", "unknown", "fi_domain", "fi_functors", "fi_predicates"] are valid + roles with no specified semantics + - any other role is an error + *) + +end + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/dolmen_tptp_v6_3_0.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/dolmen_tptp_v6_3_0.ml index a606bc7689bc75441b20397d9a3fcf20bbcf3342..17a9ca4fec99cd826cb99d8b374c2ef70454cc36 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/dolmen_tptp_v6_3_0.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/dolmen_tptp_v6_3_0.ml @@ -1,9 +1,9 @@ (* This file is free software, part of dolmen. See file "LICENSE" formore information *) -module type Id = Ast_tptp.Id -module type Term = Ast_tptp.Term -module type Statement = Ast_tptp.Statement +module type Id = Ast.Id +module type Term = Ast.Term +module type Statement = Ast.Statement module Make (L : Dolmen_intf.Location.S) @@ -11,10 +11,10 @@ module Make (T : Term with type location := L.t and type id := I.t) (S : Statement with type location := L.t and type id := I.t and type term := T.t) = Dolmen_std.Transformer.Make(L)(struct - type token = Tokens_tptp.token + type token = Tokens.token type statement = S.t let env = ["TPTP"] let incremental = true let error s = Syntax_messages.message s - end)(LexTptp)(ParseTptp.Make(L)(I)(T)(S)) + end)(Lexer)(Parser.Make(L)(I)(T)(S)) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/dolmen_tptp_v6_3_0.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/dolmen_tptp_v6_3_0.mli index b47ca1b255077b3d7b9fdae5a03185b5dcb2a25c..fb25cdb321bffc8116f1219d28951521efdde3b4 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/dolmen_tptp_v6_3_0.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/dolmen_tptp_v6_3_0.mli @@ -3,9 +3,9 @@ (** TPTP language input *) -module type Id = Ast_tptp.Id -module type Term = Ast_tptp.Term -module type Statement = Ast_tptp.Statement +module type Id = Ast.Id +module type Term = Ast.Term +module type Statement = Ast.Statement (** Implementation requirement for the TPTP format. *) module Make @@ -13,6 +13,6 @@ module Make (I : Id) (T : Term with type location := L.t and type id := I.t) (S : Statement with type location := L.t and type id := I.t and type term := T.t) : - Dolmen_intf.Language.S with type statement = S.t + Dolmen_intf.Language.S with type statement = S.t and type file := L.file (** Functor to generate a parser for the TPTP format. *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/dune index 11d8f243cf4c1412d63a7a3e94ed51ca303e097b..a95d407c7701fef64fc835c4c4099bd158ffe947 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/dune @@ -1,65 +1,12 @@ -(ocamllex (modules lexTptp)) - -(menhir - (flags (--only-tokens)) - (modules tokens_tptp) -) - -(menhir - (infer true) - (flags (--explain --table --external-tokens Tokens_tptp)) - (modules tokens_tptp parseTptp) - (merge_into parseTptp) -) - -(rule - (target syntax_messages.ml) - (deps (:tokens tokens_tptp.mly) - (:parser parseTptp.mly) - (:msg syntax.messages)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_tptp %{tokens} - %{parser} --base %{parser} --compile-errors %{msg}))) -) - +; Language library definition (library (name dolmen_tptp_v6_3_0) (public_name dolmen.tptp.v6_3_0) + (instrumentation (backend bisect_ppx)) (libraries dolmen_std dolmen_intf menhirLib) - (modules Tokens_tptp LexTptp ParseTptp Ast_tptp Syntax_messages Dolmen_tptp_v6_3_0) -) - -; Convenience rule to generate a fresh messages file, -; and update an already existing one. -(rule - (target new.messages) - (mode promote-until-clean) - (deps (:tokens tokens_tptp.mly) - (:parser parseTptp.mly)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_tptp %{tokens} - %{parser} --base %{parser} --list-errors))) + (modules Dolmen_tptp_v6_3_0 Tokens Lexer Parser Ast Syntax_messages) ) -(rule - (target updated.messages) - (mode promote-until-clean) - (deps (:tokens tokens_tptp.mly) - (:parser parseTptp.mly) - (:msg syntax.messages)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_tptp %{tokens} - %{parser} --base %{parser} --update-errors %{msg}))) -) - -; Additional rule to add to runtest a check that the messages file is up-to-date -(rule - (alias runtest) - (deps (:tokens tokens_tptp.mly) - (:parser parseTptp.mly) - (:new new.messages) - (:msg syntax.messages)) - (action (run menhir --external-tokens Tokens_tptp %{tokens} - %{parser} --base %{parser} --compare-errors %{new} --compare-errors %{msg})) -) +; Common include +(include ../../dune.common) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/lexer.mll b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/lexer.mll new file mode 100644 index 0000000000000000000000000000000000000000..a4c929f191e248d40df94f3dce33623a320c981d --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/lexer.mll @@ -0,0 +1,244 @@ +(* +Copyright (c) 2013, Simon Cruanes +Copyright (c) 2016, Guillaume Bury +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. Redistributions in binary +form must reproduce the above copyright notice, this list of conditions and the +following disclaimer in the documentation and/or other materials provided with +the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*) + +(** {1 TPTP Lexer} *) + +{ + exception Error + + open Tokens + + module T = Dolmen_std.Tok + + let reserved s = + T.descr s + ~kind:"reserved word" + ~hint:"reserved words cannot be used as identifiers" + + let descr t : T.descr = + match (t: token) with + | EOF -> T.descr ~kind:"end of file token" "" + | DOT -> reserved "." + | COMMA -> reserved "," + | COLON -> reserved ":" + | LEFT_PAREN -> reserved "(" + | RIGHT_PAREN -> reserved ")" + | LEFT_BRACKET -> reserved "[" + | RIGHT_BRACKET -> reserved "]" + | CNF -> reserved "cnf" + | FOF -> reserved "fof" + | TFF -> reserved "tff" + | THF -> reserved "thf" + | TPI -> reserved "tpi" + | INCLUDE -> reserved "include" + | LAMBDA -> reserved "^" + | APPLY -> reserved "@" + | DEFINITE_DESCRIPTION -> reserved "@-" + | INDEFINITE_DESCRIPTION -> reserved "@+" + | FORALL_TY -> reserved "!>" + | FORALL -> reserved "!" + | EXISTS_TY -> reserved "?*" + | EXISTS -> reserved "?" + + | PI -> reserved "!!" + | SIGMA -> reserved "??" + + | LESS -> reserved "<" + | ARROW -> reserved ">" + + | STAR -> reserved "*" + | PLUS -> reserved "+" + + | XOR -> reserved "<~>" + | EQUIV -> reserved "<=>" + | IMPLY -> reserved "=>" + | LEFT_IMPLY -> reserved "<=" + + | NOT -> reserved "~" + | AND -> reserved "&" + | VLINE -> reserved "|" + | NOTAND -> reserved "~&" + | NOTVLINE -> reserved "~|" + + | EQUAL -> reserved "=" + | NOT_EQUAL -> reserved "!=" + | GENTZEN_ARROW -> reserved "-->" + + | ITE_F -> reserved "$ite_f" + | ITE_T -> reserved "$ite_t" + | LET_TF -> reserved "$let_tf" + | LET_FF -> reserved "$let_ff" + | LET_FT -> reserved "$let_ft" + | LET_TT -> reserved "$let_tt" + + | DOLLAR_THF -> reserved "$thf" + | DOLLAR_TFF -> reserved "$tff" + | DOLLAR_FOF -> reserved "$fof" + | DOLLAR_CNF -> reserved "$cnf" + | DOLLAR_FOT -> reserved "$fot" + + | LOWER_WORD s -> T.descr ~kind:"lower word" s + | UPPER_WORD s -> T.descr ~kind:"upper_word" s + | SINGLE_QUOTED s -> T.descr ~kind:"single-quoted word" s + | DISTINCT_OBJECT s -> T.descr ~kind:"distinct object" s + | DOLLAR_WORD s -> T.descr ~kind:"dollar word" s + | DOLLAR_DOLLAR_WORD s -> T.descr ~kind:"double dollar word" s + | REAL s -> T.descr ~kind:"real literal" s + | RATIONAL s -> T.descr ~kind:"rational literal" s + | INTEGER s -> T.descr ~kind:"integer literal" s + +} + +let printable_char = [^ '\n'] +let not_star_slash = ([^ '*']* '*'+ [^ '/' '*'])* [^ '*']* +let comment_line = ['%' '#'] printable_char* +let comment_block = '/' '*' not_star_slash '*' '/' +let comment = comment_line | comment_block + +let sq_char = [^ '\\' '''] | "\\\\" | "\\'" +let do_char = [^ '"' '\\' ] | "\\\\" | "\\\"" +let single_quoted = ''' sq_char+ ''' +let distinct_object = '"' do_char* '"' + +let zero_numeric = '0' +let non_zero_numeric = ['1' - '9'] +let numeric = ['0' - '9'] +let sign = ['+' '-'] + +let dot_decimal = '.' numeric + +let positive_decimal = non_zero_numeric numeric* +let decimal = zero_numeric | positive_decimal +let unsigned_integer = decimal +let signed_integer = sign unsigned_integer +let integer = signed_integer | unsigned_integer +let decimal_fraction = decimal dot_decimal +let decimal_exponent = (decimal | decimal_fraction) ['e' 'E'] integer +let unsigned_real = decimal_fraction | decimal_exponent +let signed_real = sign unsigned_real +let real = signed_real | unsigned_real +let unsigned_rational = decimal '/' positive_decimal +let signed_rational = sign unsigned_rational +let rational = signed_rational | unsigned_rational + +let lower_alpha = ['a' - 'z'] +let upper_alpha = ['A' - 'Z'] +let alpha_numeric = lower_alpha | upper_alpha | numeric | '_' + +let upper_word = upper_alpha alpha_numeric* +let lower_word = lower_alpha alpha_numeric* +let dollar_word = '$' lower_word +let dollar_dollar_word = "$$" lower_word + +rule token newline = parse + | comment + { String.iter (function + | '\n' -> newline lexbuf + | _ -> () + ) (Lexing.lexeme lexbuf); + token newline lexbuf } + + | '\n' { newline lexbuf; token newline lexbuf } + | [' ' '\t' '\r'] { token newline lexbuf } + | eof { EOF } + + | '.' { DOT } + | ',' { COMMA } + | ':' { COLON } + + | '(' { LEFT_PAREN } + | ')' { RIGHT_PAREN } + | '[' { LEFT_BRACKET } + | ']' { RIGHT_BRACKET } + + | '^' { LAMBDA } + | '@' { APPLY } + | "@+" { INDEFINITE_DESCRIPTION } + | "@-" { DEFINITE_DESCRIPTION } + | "!>" { FORALL_TY } + | '!' { FORALL } + | "?*" { EXISTS_TY } + | '?' { EXISTS } + + | "!!" { PI } + | "??" { SIGMA } + + | '<' { LESS } + | '>' { ARROW } + + | '*' { STAR } + | '+' { PLUS } + + | "<~>" { XOR } + | "<=>" { EQUIV } + | "=>" { IMPLY } + | "<=" { LEFT_IMPLY } + + | '~' { NOT } + | '&' { AND } + | '|' { VLINE } + | "~&" { NOTAND } + | "~|" { NOTVLINE } + + | '=' { EQUAL } + | "!=" { NOT_EQUAL } + | "-->" { GENTZEN_ARROW } + + + | lower_word { + match Lexing.lexeme lexbuf with + | "cnf" -> CNF + | "fof" -> FOF + | "tff" -> TFF + | "thf" -> THF + | "tpi" -> TPI + | "include" -> INCLUDE + | s -> LOWER_WORD(s) + } + | dollar_word { + match Lexing.lexeme lexbuf with + | "$cnf" -> DOLLAR_CNF + | "$fof" -> DOLLAR_FOF + | "$tff" -> DOLLAR_TFF + | "$thf" -> DOLLAR_THF + | "$fot" -> DOLLAR_FOT + | "$ite_f" -> ITE_F + | "$ite_t" -> ITE_T + | "$let_tf" -> LET_TF + | "$let_ft" -> LET_FT + | "$let_ff" -> LET_FF + | "$let_tt" -> LET_TT + | s -> DOLLAR_WORD(s) + } + | upper_word { UPPER_WORD(Lexing.lexeme lexbuf) } + | dollar_dollar_word { DOLLAR_DOLLAR_WORD(Lexing.lexeme lexbuf) } + | single_quoted { SINGLE_QUOTED(Lexing.lexeme lexbuf) } + | distinct_object { DISTINCT_OBJECT(Lexing.lexeme lexbuf) } + | integer { INTEGER(Lexing.lexeme lexbuf) } + | rational { RATIONAL(Lexing.lexeme lexbuf) } + | real { REAL(Lexing.lexeme lexbuf) } + + | _ { raise Error } + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/parser.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/parser.mly new file mode 100644 index 0000000000000000000000000000000000000000..d161209ec5f26cec275583078328184393d48d90 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/parser.mly @@ -0,0 +1,875 @@ + +(* This file is free software, part of dolmem. See file "LICENSE" for more information *) + +%parameter <L : Dolmen_intf.Location.S> +%parameter <I : Ast.Id> +%parameter <T : Ast.Term + with type location := L.t and type id := I.t> +%parameter <S : Ast.Statement + with type location := L.t and type id := I.t and type term := T.t> + +%start <S.t list> file +%start <S.t option> input + +%% + +/* Hand-written following syntax.bnf */ + +/* Complete file, i.e Top-level declarations */ + +file: + | l=tptp_input* EOF { l } + +input: + | i=tptp_input + { Some i } + | EOF + { None } + +tptp_input: + | i=annotated_formula + | i=tptp_include + { i } + +/* Formula records */ + +annotated_formula: + | f=thf_annotated + | f=tff_annotated + | f=fof_annotated + | f=cnf_annotated + | f=tpi_annotated + { f } + +tpi_annotated: + | TPI LEFT_PAREN s=name COMMA r=formula_role COMMA + f=tpi_formula annot=annotations RIGHT_PAREN DOT + { let loc = L.mk_pos $startpos $endpos in S.tpi ~loc ?annot s r f } + +thf_annotated: + | THF LEFT_PAREN s=name COMMA r=formula_role COMMA + f=thf_formula annot=annotations RIGHT_PAREN DOT + { let loc = L.mk_pos $startpos $endpos in S.thf ~loc ?annot s r f } + +tff_annotated: + | TFF LEFT_PAREN s=name COMMA r=formula_role COMMA + f=tff_formula annot=annotations RIGHT_PAREN DOT + { let loc = L.mk_pos $startpos $endpos in S.tff ~loc ?annot s r f } + +fof_annotated: + | FOF LEFT_PAREN s=name COMMA r=formula_role COMMA + f=fof_formula annot=annotations RIGHT_PAREN DOT + { let loc = L.mk_pos $startpos $endpos in S.fof ~loc ?annot s r f } + +cnf_annotated: + | CNF LEFT_PAREN s=name COMMA r=formula_role COMMA + f=cnf_formula annot=annotations RIGHT_PAREN DOT + { let loc = L.mk_pos $startpos $endpos in S.cnf ~loc ?annot s r f } + +annotations: + | COMMA s=source i=optional_info + { let loc = L.mk_pos $startpos $endpos in Some (S.annot ~loc s i) } + | { None } + +tpi_formula: + | f=fof_formula { f } + +formula_role: + | s=LOWER_WORD { s } + + +/* THF formulas */ + +thf_formula: + | f=thf_sequent + | f=thf_logic_formula + { f } + +thf_logic_formula: + | f=thf_binary_formula + | f=thf_unitary_formula + | f=thf_type_formula + | f=thf_subtype + { f } + +thf_binary_formula: + | f=thf_binary_pair + | f=thf_binary_tuple + | f=thf_binary_type + { f } + +thf_binary_pair: + | f=thf_unitary_formula c=thf_pair_connective g=thf_unitary_formula + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc c [f; g] } + +thf_binary_tuple: + | f=thf_or_formula + | f=thf_and_formula + | f=thf_apply_formula + { f } + +thf_or_formula: + | f=thf_unitary_formula VLINE g=thf_unitary_formula + | f=thf_or_formula VLINE g=thf_unitary_formula + { let op = let loc = L.mk_pos $startpos($2) $endpos($2) in T.or_t ~loc () in + let loc = L.mk_pos $startpos $endpos in T.apply ~loc op [f; g] } + +thf_and_formula: + | f=thf_unitary_formula AND g=thf_unitary_formula + | f=thf_and_formula AND g=thf_unitary_formula + { let op = let loc = L.mk_pos $startpos($2) $endpos($2) in T.and_t ~loc () in + let loc = L.mk_pos $startpos $endpos in T.apply ~loc op [f; g] } + +thf_apply_formula: + | f=thf_unitary_formula APPLY g=thf_unitary_formula + | f=thf_apply_formula APPLY g=thf_unitary_formula + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc f [g] } + +thf_unitary_formula: + | f=thf_quantified_formula + | f=thf_unary_formula + | f=thf_atom + | f=thf_conditional + | f=thf_let + | LEFT_PAREN f=thf_logic_formula RIGHT_PAREN + { f } + +thf_quantified_formula: + | q=thf_quantifier LEFT_BRACKET l=thf_variable_list RIGHT_BRACKET COLON f=thf_unitary_formula + { let loc = L.mk_pos $startpos $endpos in q ~loc l f } + +thf_variable_list: + | v=thf_variable + { [ v ] } + | v=thf_variable COMMA l=thf_variable_list + { v :: l } + +thf_variable: + | v=thf_typed_variable + | v=variable + { v } + +thf_typed_variable: + | c=variable COLON ty=thf_top_level_type + { let loc = L.mk_pos $startpos $endpos in T.colon ~loc c ty } + +thf_unary_formula: + | c=thf_unary_connective LEFT_PAREN f=thf_logic_formula RIGHT_PAREN + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc c [f] } + +thf_atom: + | t=term + | t=thf_conn_term + { t } + +thf_conditional: + | ITE_F LEFT_PAREN cond=thf_logic_formula COMMA + if_then=thf_logic_formula COMMA if_else=thf_logic_formula RIGHT_PAREN + { let loc = L.mk_pos $startpos $endpos in T.ite ~loc cond if_then if_else } + +thf_let: + | LET_TF LEFT_PAREN l=thf_let_term_defn COMMA f=thf_formula RIGHT_PAREN + | LET_FF LEFT_PAREN l=thf_let_formula_defn COMMA f=thf_formula RIGHT_PAREN + { let loc = L.mk_pos $startpos $endpos in T.letin ~loc [l] f } + +thf_let_term_defn: + | f=thf_quantified_formula { f } + +thf_let_formula_defn: + | f=thf_quantified_formula { f } + +thf_type_formula: + | f=thf_typeable_formula COLON ty=thf_top_level_type + { let loc = L.mk_pos $startpos $endpos in T.colon ~loc f ty } + +thf_typeable_formula: + | f=thf_atom + | LEFT_PAREN f=thf_logic_formula RIGHT_PAREN + { f } + +thf_subtype: + | t=constant subtype_sign u=constant + { let loc = L.mk_pos $startpos $endpos in T.subtype ~loc t u } + +thf_top_level_type: + | f=thf_logic_formula { f } + +thf_unitary_type: + | f=thf_unitary_formula { f } + +thf_binary_type: + | t=thf_mapping_type + | t=thf_xprod_type + | t=thf_union_type + { t } + +thf_mapping_type: + | arg=thf_unitary_type ARROW ret=thf_unitary_type + | arg=thf_unitary_type ARROW ret=thf_mapping_type + { let loc = L.mk_pos $startpos $endpos in T.arrow ~loc arg ret } + +thf_xprod_type: + | left=thf_unitary_type STAR right=thf_unitary_type + | left=thf_xprod_type STAR right=thf_unitary_type + { let loc = L.mk_pos $startpos $endpos in T.product ~loc left right } + +thf_union_type: + | left=thf_unitary_type PLUS right=thf_unitary_type + | left=thf_union_type PLUS right=thf_unitary_type + { let loc = L.mk_pos $startpos $endpos in T.union ~loc left right } + +thf_sequent: + | LEFT_PAREN s = thf_sequent RIGHT_PAREN + { s } + | hyp=thf_tuple GENTZEN_ARROW goal=thf_tuple + { let loc = L.mk_pos $startpos $endpos in T.sequent ~loc hyp goal } + +thf_tuple: + | LEFT_BRACKET RIGHT_BRACKET + { [] } + | LEFT_BRACKET l = thf_tuple_list RIGHT_BRACKET + { l } + +thf_tuple_list: + | f=thf_logic_formula + { [ f ] } + | f=thf_logic_formula COMMA l=thf_tuple_list + { f :: l } + + +/* TFF formula */ + +tff_formula: + | f=tff_logic_formula + | f=tff_typed_atom + | f=tff_sequent + { f } + +tff_logic_formula: + | f=tff_binary_formula + | f=tff_unitary_formula + { f } + +tff_binary_formula: + | f=tff_binary_nonassoc + | f=tff_binary_assoc + { f } + +tff_binary_nonassoc: + | f=tff_unitary_formula c=binary_connective g=tff_unitary_formula + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc c [f; g] } + +tff_binary_assoc: + | f=tff_or_formula + | f=tff_and_formula + { f } + +tff_or_formula: + | f=tff_unitary_formula VLINE g=tff_unitary_formula + | f=tff_or_formula VLINE g=tff_unitary_formula + { let op = let loc = L.mk_pos $startpos($2) $endpos($2) in T.or_t ~loc () in + let loc = L.mk_pos $startpos $endpos in T.apply ~loc op [f; g] } + +tff_and_formula: + | f=tff_unitary_formula AND g=tff_unitary_formula + | f=tff_and_formula AND g=tff_unitary_formula + { let op = let loc = L.mk_pos $startpos($2) $endpos($2) in T.and_t ~loc () in + let loc = L.mk_pos $startpos $endpos in T.apply ~loc op [f; g] } + +tff_unitary_formula: + | f=tff_quantified_formula + | f=tff_unary_formula + | f=atomic_formula + | f=tff_conditional + | f=tff_let + | LEFT_PAREN f=tff_logic_formula RIGHT_PAREN + { f } + +tff_quantified_formula: + | q=fol_quantifier LEFT_BRACKET l=tff_variable_list RIGHT_BRACKET COLON f=tff_unitary_formula + { let loc = L.mk_pos $startpos $endpos in q ~loc l f } + +tff_variable_list: + | v=tff_variable + { [ v ] } + | v=tff_variable COMMA l=tff_variable_list + { v :: l } + +tff_variable: + | v=tff_typed_variable + | v=variable + { v } + +tff_typed_variable: + | v=variable COLON ty=tff_atomic_type + { let loc = L.mk_pos $startpos $endpos in T.colon ~loc v ty } + +tff_unary_formula: + | u=unary_connective f=tff_unitary_formula + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc u [f] } + | f=fol_infix_unary + { f } + +tff_conditional: + | ITE_F LEFT_PAREN cond=tff_logic_formula COMMA if_then=tff_logic_formula COMMA + if_else=tff_logic_formula RIGHT_PAREN + { let loc = L.mk_pos $startpos $endpos in T.ite ~loc cond if_then if_else } + +tff_let: + | LET_TF LEFT_PAREN l=tff_let_term_defn COMMA f=tff_formula RIGHT_PAREN + | LET_FF LEFT_PAREN l=tff_let_formula_defn COMMA f=tff_formula RIGHT_PAREN + { let loc = L.mk_pos $startpos $endpos in T.letin ~loc [l] f } + +tff_let_term_defn: + | FORALL LEFT_BRACKET l=tff_variable_list RIGHT_BRACKET COLON t=tff_let_term_defn + { let loc = L.mk_pos $startpos $endpos in T.forall ~loc l t } + | t=tff_let_term_binding + { t } + +tff_let_term_binding: + | t=term EQUAL u=term + { let f = let loc = L.mk_pos $startpos($2) $endpos($2) in T.eq_t ~loc () in + let loc = L.mk_pos $startpos $endpos in T.apply ~loc f [t; u] } + | LEFT_PAREN t=tff_let_term_binding RIGHT_PAREN + { t } + +tff_let_formula_defn: + | FORALL LEFT_BRACKET l=tff_variable_list RIGHT_BRACKET COLON t=tff_let_formula_defn + { let loc = L.mk_pos $startpos $endpos in T.forall ~loc l t } + | t=tff_let_formula_binding + { t } + +tff_let_formula_binding: + | t=atomic_formula EQUIV u=tff_unitary_formula + { let f = let loc = L.mk_pos $startpos($2) $endpos($2) in T.equiv_t ~loc () in + let loc = L.mk_pos $startpos $endpos in T.apply ~loc f [t; u] } + | LEFT_PAREN t=tff_let_formula_binding RIGHT_PAREN + { t } + +tff_sequent: + | hyp=tff_tuple GENTZEN_ARROW goal=tff_tuple + { let loc = L.mk_pos $startpos $endpos in T.sequent ~loc hyp goal } + | LEFT_PAREN t=tff_sequent RIGHT_PAREN + { t } + +tff_tuple: + | LEFT_BRACKET RIGHT_BRACKET + { [] } + | LEFT_BRACKET l=tff_tuple_list RIGHT_BRACKET + { l } + +tff_tuple_list: + | f=tff_logic_formula + { [ f ] } + | f=tff_logic_formula COMMA l=tff_tuple_list + { f :: l } + +tff_typed_atom: + | t=tff_untyped_atom COLON ty=tff_top_level_type + { let loc = L.mk_pos $startpos $endpos in T.colon ~loc t ty } + | LEFT_PAREN t=tff_typed_atom RIGHT_PAREN + { t } + +tff_untyped_atom: + | f=tptp_functor + | f=system_functor + { f } + +tff_top_level_type: + | t=tff_atomic_type + | t=tff_mapping_type + | t=tff_quantified_type + | LEFT_PAREN t=tff_top_level_type RIGHT_PAREN + { t } + +tff_quantified_type: + | FORALL_TY LEFT_BRACKET l=tff_variable_list RIGHT_BRACKET COLON t=tff_monotype + { let loc = L.mk_pos $startpos $endpos in T.pi ~loc l t } + +tff_monotype: + | t=tff_atomic_type + | LEFT_PAREN t=tff_mapping_type RIGHT_PAREN + { t } + +tff_unitary_type: + | t=tff_atomic_type + | LEFT_PAREN t=tff_xprod_type RIGHT_PAREN + { t } + +tff_atomic_type: + | t=atomic_word + | t=defined_type + | t=variable + { t } + | f=atomic_word LEFT_PAREN l=tff_type_arguments RIGHT_PAREN + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc f l } + +tff_type_arguments: + | t=tff_atomic_type + { [ t ] } + | t=tff_atomic_type COMMA l=tff_type_arguments + { t :: l } + +tff_mapping_type: + | arg=tff_unitary_type ARROW ret=tff_atomic_type + { let loc = L.mk_pos $startpos $endpos in T.arrow ~loc arg ret } + +tff_xprod_type: + | t=tff_unitary_type STAR u=tff_atomic_type + | t=tff_xprod_type STAR u=tff_atomic_type + { let loc = L.mk_pos $startpos $endpos in T.product ~loc t u } + + +/* FOF formulas */ + +fof_formula: + | f=fof_logic_formula + | f=fof_sequent + { f } + +fof_logic_formula: + | f=fof_binary_formula + | f=fof_unitary_formula + { f } + +fof_binary_formula: + | f=fof_binary_nonassoc + | f=fof_binary_assoc + { f } + +fof_binary_nonassoc: + | f=fof_unitary_formula c=binary_connective g=fof_unitary_formula + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc c [f; g] } + +fof_binary_assoc: + | f=fof_or_formula + | f=fof_and_formula + { f } + +fof_or_formula: + | f=fof_unitary_formula VLINE g=fof_unitary_formula + | f=fof_or_formula VLINE g=fof_unitary_formula + { let op = let loc = L.mk_pos $startpos($2) $endpos($2) in T.or_t ~loc () in + let loc = L.mk_pos $startpos $endpos in T.apply ~loc op [f; g] } + +fof_and_formula: + | f=fof_unitary_formula AND g=fof_unitary_formula + | f=fof_and_formula AND g=fof_unitary_formula + { let op = let loc = L.mk_pos $startpos($2) $endpos($2) in T.and_t ~loc () in + let loc = L.mk_pos $startpos $endpos in T.apply ~loc op [f; g] } + +fof_unitary_formula: + | f=fof_quantified_formula + | f=fof_unary_formula + | f=atomic_formula + | LEFT_PAREN f=fof_logic_formula RIGHT_PAREN + { f } + +fof_quantified_formula: + | q=fol_quantifier LEFT_BRACKET l=fof_variable_list RIGHT_BRACKET COLON f=fof_unitary_formula + { let loc = L.mk_pos $startpos $endpos in q ~loc l f } + +fof_variable_list: + | v=variable + { [ v ] } + | v=variable COMMA l=fof_variable_list + { v :: l } + +fof_unary_formula: + | c=unary_connective f=fof_unitary_formula + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc c [f] } + | f=fol_infix_unary + { f } + +fof_sequent: + | hyp=fof_tuple GENTZEN_ARROW goal=fof_tuple + { let loc = L.mk_pos $startpos $endpos in T.sequent ~loc hyp goal } + | LEFT_PAREN t=fof_sequent RIGHT_PAREN + { t } + +fof_tuple: + | LEFT_BRACKET RIGHT_BRACKET + { [] } + | LEFT_BRACKET l=fof_tuple_list RIGHT_BRACKET + { l } + +fof_tuple_list: + | f=fof_logic_formula + { [ f ] } + | f=fof_logic_formula COMMA l=fof_tuple_list + { f :: l } + +cnf_formula: + | LEFT_PAREN f=disjunction RIGHT_PAREN | f=disjunction + { f } + +disjunction: + | x=literal + { x } + | f=disjunction VLINE x=literal + { let op = let loc = L.mk_pos $startpos($2) $endpos($2) in T.or_t ~loc () in + let loc = L.mk_pos $startpos $endpos in T.apply ~loc op [f; x] } + +literal: + | f=atomic_formula + { f } + | c=unary_negation f=atomic_formula + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc c [f] } + | f=fol_infix_unary + { f } + + +/* Special formulas */ + +thf_conn_term: + | t=thf_pair_connective + | t=assoc_connective + | t=thf_unary_connective + { t } + +fol_infix_unary: + | t=term f=infix_inequality u=term + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc f [t; u] } + + +/* THF connective */ + +thf_quantifier: + | q=fol_quantifier + { q } + | LAMBDA + { T.lambda } + | FORALL_TY + { T.forall } + | EXISTS_TY + { T.exists } + | DEFINITE_DESCRIPTION + { T.description } + | INDEFINITE_DESCRIPTION + { T.choice } + +thf_pair_connective: + | t=infix_equality + | t=infix_inequality + | t=binary_connective + { t } + +thf_unary_connective: + | c=unary_connective + { c } + /* These two quantifiers have been removed from THF0, and will come back in THF1 + when it is released, so it doesn't really matter how we handle them right now*/ + | PI + { let loc = L.mk_pos $startpos $endpos in T.pi_t ~loc () } + | SIGMA + { let loc = L.mk_pos $startpos $endpos in T.sigma_t ~loc () } + +subtype_sign: + | LESS LESS { () } + +fol_quantifier: + | FORALL + { T.forall } + | EXISTS + { T.exists } + +binary_connective: + | EQUIV + { let loc = L.mk_pos $startpos $endpos in T.equiv_t ~loc () } + | IMPLY + { let loc = L.mk_pos $startpos $endpos in T.implies_t ~loc () } + | LEFT_IMPLY + { let loc = L.mk_pos $startpos $endpos in T.implied_t ~loc () } + | XOR + { let loc = L.mk_pos $startpos $endpos in T.xor_t ~loc () } + | NOTVLINE + { let loc = L.mk_pos $startpos $endpos in T.nor_t ~loc () } + | NOTAND + { let loc = L.mk_pos $startpos $endpos in T.nand_t ~loc () } + +assoc_connective: + | VLINE + { let loc = L.mk_pos $startpos $endpos in T.or_t ~loc () } + | AND + { let loc = L.mk_pos $startpos $endpos in T.and_t ~loc () } + +unary_connective: + | c=unary_negation + { c } + +unary_negation: + | NOT + { let loc = L.mk_pos $startpos $endpos in T.not_t ~loc () } + +defined_type: + | t=atomic_defined_word + { t } + +/* First order atoms */ + +atomic_formula: + | f=plain_atomic_formula + | f=defined_atomic_formula + | f=system_atomic_formula + { f } + +plain_atomic_formula: + | t=plain_term + { t } + +defined_atomic_formula: + | f=defined_plain_formula + | f=defined_infix_formula + { f } + +defined_plain_formula: + | f=defined_plain_term + { f } + +defined_infix_formula: + | t=term c=defined_infix_pred u=term + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc c [t; u] } + +defined_infix_pred: + | t=infix_equality + { t } + +infix_equality: + | EQUAL + { let loc = L.mk_pos $startpos $endpos in T.eq_t ~loc () } + +infix_inequality: + | NOT_EQUAL + { let loc = L.mk_pos $startpos $endpos in T.neq_t ~loc () } + +system_atomic_formula: + | t=system_term + { t } + +/* First order terms */ + +term: + | t=function_term + | t=variable + | t=conditional_term + | t=let_term + { t } + +function_term: + | t=plain_term + | t=defined_term + | t=system_term + { t } + +plain_term: + | c=constant + { c } + | f=tptp_functor LEFT_PAREN l=arguments RIGHT_PAREN + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc f l } + +constant: + | c=tptp_functor + { c } + +tptp_functor: + | w=atomic_word + { w } + +defined_term: + | t=defined_atom + | t=defined_atomic_term + { t } + +defined_atom: + | a=number + | a=distinct_object + { a } + +defined_atomic_term: + | t=defined_plain_term + { t } + +defined_plain_term: + | c=defined_constant + { c } + | f=defined_functor LEFT_PAREN l=arguments RIGHT_PAREN + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc f l } + +defined_constant: + | c=defined_functor + { c } + +defined_functor: + | f=atomic_defined_word + { f } + +system_term: + | c=system_constant + { c } + | f=system_functor LEFT_PAREN l=arguments RIGHT_PAREN + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc f l } + +system_constant: + | f=system_functor + { f } + +system_functor: + | f=atomic_system_word + { f } + +variable: + | s=UPPER_WORD + { let loc = L.mk_pos $startpos $endpos in + T.var ~loc (I.mk I.term s) } + +arguments: + | t=term + { [ t ] } + | t=term COMMA l=arguments + { t :: l } + +conditional_term: + | ITE_T LEFT_PAREN cond=tff_logic_formula COMMA if_then=term COMMA if_else=term RIGHT_PAREN + { let loc = L.mk_pos $startpos $endpos in T.ite ~loc cond if_then if_else } + +let_term: + | LET_FT LEFT_PAREN l=tff_let_formula_defn COMMA t=term RIGHT_PAREN + | LET_TT LEFT_PAREN l=tff_let_term_defn COMMA t=term RIGHT_PAREN + { let loc = L.mk_pos $startpos $endpos in T.letin ~loc [l] t } + + +/* Formula sources */ + +source: + | t=general_term + { t } + +optional_info: + | COMMA i=useful_info + { i } + | { [] } + +useful_info: + | l=general_list + { l } + + +/* Inlcude directives */ + +tptp_include: + | INCLUDE LEFT_PAREN f=file_name g=formula_section RIGHT_PAREN DOT + { let loc = L.mk_pos $startpos $endpos in S.include_ ~loc f g } + +formula_section: + | COMMA LEFT_BRACKET l=name_list RIGHT_BRACKET + { l } + | { [] } + +name_list: + | n=name + { [ n ] } + | n=name COMMA l=name_list + { n :: l } + +general_term: + | d=general_data + { d } + | l=general_list + { let f = let loc = L.mk_pos $startpos $endpos in T.data_t ~loc () in + let loc = L.mk_pos $startpos $endpos in T.apply ~loc f l } + | d=general_data COLON t=general_term + { let loc = L.mk_pos $startpos $endpos in T.colon ~loc d t } + +general_data: + | d=atomic_word + | d=general_function + | d=variable + | d=number + | d=distinct_object + | d=formula_data + { d } + +general_function: + | f=atomic_word LEFT_PAREN l=general_terms RIGHT_PAREN + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc f l } + +formula_data: + | DOLLAR_THF LEFT_PAREN f=thf_formula RIGHT_PAREN + | DOLLAR_TFF LEFT_PAREN f=tff_formula RIGHT_PAREN + | DOLLAR_FOF LEFT_PAREN f=fof_formula RIGHT_PAREN + | DOLLAR_CNF LEFT_PAREN f=cnf_formula RIGHT_PAREN + | DOLLAR_FOT LEFT_PAREN f=term RIGHT_PAREN + { f } + +general_list: + | LEFT_BRACKET RIGHT_BRACKET + { [] } + | LEFT_BRACKET l=general_terms RIGHT_BRACKET + { l } + +general_terms: + | t=general_term + { [ t ] } + | t=general_term COMMA l=general_terms + { t :: l } + +/* General purposes */ + +/* + name: atomic_word | integer + + this production has been expanded to + produce ids instead of terms +*/ +name: + | s=LOWER_WORD + | s=SINGLE_QUOTED + | s=INTEGER + { I.mk I.decl s } + +atomic_word: + | s=LOWER_WORD + | s=SINGLE_QUOTED + { let loc = L.mk_pos $startpos $endpos in + T.const ~loc (I.mk I.term s) } + +atomic_defined_word: + | s=DOLLAR_WORD + { let loc = L.mk_pos $startpos $endpos in + T.const ~loc (I.mk I.term s) } + +atomic_system_word: + | s=DOLLAR_DOLLAR_WORD + { let loc = L.mk_pos $startpos $endpos in + T.const ~loc (I.mk I.term s) } + +number: + | n=integer + | n=rational + | n=real + { n } + +file_name: + | s=SINGLE_QUOTED + { let n = String.length s in String.sub s 1 (n - 2) } + +/* Wrapper around some lexical definitions */ + +distinct_object: + | s=DISTINCT_OBJECT + { let loc = L.mk_pos $startpos $endpos in + T.distinct ~loc (I.mk I.term s) } + +integer: + | n=INTEGER + { let loc = L.mk_pos $startpos $endpos in T.int ~loc n } + +rational: + | n=RATIONAL + { let loc = L.mk_pos $startpos $endpos in T.rat ~loc n } + +real: + | n=REAL + { let loc = L.mk_pos $startpos $endpos in T.real ~loc n } + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/syntax.messages b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/syntax.messages index 56bb319aba8904bb66459f8315c8478d10a7fca6..e8d08c618f4a062c685b1c87554d76d67ad9188d 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/syntax.messages +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/syntax.messages @@ -131,13 +131,13 @@ input: FOF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED RIGHT_B ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula -## In state 270, spurious reduction of production fof_logic_formula -> fof_unitary_formula -## In state 451, spurious reduction of production fof_formula -> fof_logic_formula +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula +## In state 270, spurious reduction of production fof_logic_formula -> fof_unitary_formula +## In state 451, spurious reduction of production fof_formula -> fof_logic_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -299,14 +299,14 @@ input: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA DOLLAR_WORD RIGHT_BRA ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 50, spurious reduction of production defined_constant -> defined_functor -## In state 52, spurious reduction of production defined_plain_term -> defined_constant -## In state 116, spurious reduction of production defined_plain_formula -> defined_plain_term -## In state 117, spurious reduction of production defined_atomic_formula -> defined_plain_formula -## In state 119, spurious reduction of production atomic_formula -> defined_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula -## In state 230, spurious reduction of production tff_formula -> tff_logic_formula +## In state 50, spurious reduction of production defined_constant -> defined_functor +## In state 52, spurious reduction of production defined_plain_term -> defined_constant +## In state 116, spurious reduction of production defined_plain_formula -> defined_plain_term +## In state 117, spurious reduction of production defined_atomic_formula -> defined_plain_formula +## In state 119, spurious reduction of production atomic_formula -> defined_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula +## In state 230, spurious reduction of production tff_formula -> tff_logic_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -444,9 +444,9 @@ input: THF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA XOR RIGHT_BRACKET ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom -## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula -## In state 421, spurious reduction of production thf_formula -> thf_logic_formula +## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom +## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula +## In state 421, spurious reduction of production thf_formula -> thf_logic_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -548,14 +548,14 @@ input: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED RIGHT_B ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula -## In state 270, spurious reduction of production fof_logic_formula -> fof_unitary_formula -## In state 451, spurious reduction of production fof_formula -> fof_logic_formula -## In state 496, spurious reduction of production tpi_formula -> fof_formula +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula +## In state 270, spurious reduction of production fof_logic_formula -> fof_unitary_formula +## In state 451, spurious reduction of production fof_formula -> fof_logic_formula +## In state 496, spurious reduction of production tpi_formula -> fof_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -682,12 +682,12 @@ file: CNF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LEFT_PAREN SINGLE_QUOT ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 464, spurious reduction of production literal -> atomic_formula -## In state 459, spurious reduction of production disjunction -> literal +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 464, spurious reduction of production literal -> atomic_formula +## In state 459, spurious reduction of production disjunction -> literal ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -778,12 +778,12 @@ file: CNF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED XOR ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 464, spurious reduction of production literal -> atomic_formula -## In state 459, spurious reduction of production disjunction -> literal +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 464, spurious reduction of production literal -> atomic_formula +## In state 459, spurious reduction of production disjunction -> literal ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -885,13 +885,13 @@ file: FOF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED RIGHT_BR ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula -## In state 270, spurious reduction of production fof_logic_formula -> fof_unitary_formula -## In state 451, spurious reduction of production fof_formula -> fof_logic_formula +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula +## In state 270, spurious reduction of production fof_logic_formula -> fof_unitary_formula +## In state 451, spurious reduction of production fof_formula -> fof_logic_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1128,14 +1128,14 @@ file: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA DOLLAR_WORD RIGHT_BRAC ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 50, spurious reduction of production defined_constant -> defined_functor -## In state 52, spurious reduction of production defined_plain_term -> defined_constant -## In state 116, spurious reduction of production defined_plain_formula -> defined_plain_term -## In state 117, spurious reduction of production defined_atomic_formula -> defined_plain_formula -## In state 119, spurious reduction of production atomic_formula -> defined_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula -## In state 230, spurious reduction of production tff_formula -> tff_logic_formula +## In state 50, spurious reduction of production defined_constant -> defined_functor +## In state 52, spurious reduction of production defined_plain_term -> defined_constant +## In state 116, spurious reduction of production defined_plain_formula -> defined_plain_term +## In state 117, spurious reduction of production defined_atomic_formula -> defined_plain_formula +## In state 119, spurious reduction of production atomic_formula -> defined_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula +## In state 230, spurious reduction of production tff_formula -> tff_logic_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1251,12 +1251,12 @@ file: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA ITE_F LEFT_PAREN SINGL ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1286,12 +1286,12 @@ file: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA ITE_F LEFT_PAREN SINGL ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1321,12 +1321,12 @@ file: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA ITE_F LEFT_PAREN SINGL ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1405,12 +1405,12 @@ file: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LEFT_BRACKET SINGLE_QU ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1453,8 +1453,8 @@ file: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LEFT_PAREN SINGLE_QUOT ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 218, spurious reduction of production tff_top_level_type -> tff_atomic_type -## In state 219, spurious reduction of production tff_typed_atom -> tff_untyped_atom COLON tff_top_level_type +## In state 218, spurious reduction of production tff_top_level_type -> tff_atomic_type +## In state 219, spurious reduction of production tff_typed_atom -> tff_untyped_atom COLON tff_top_level_type ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1472,12 +1472,12 @@ file: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LEFT_PAREN SINGLE_QUOT ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 187, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula +## In state 187, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1509,13 +1509,13 @@ file: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LET_FF LEFT_PAREN SING ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 187, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula -## In state 230, spurious reduction of production tff_formula -> tff_logic_formula +## In state 187, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula +## In state 230, spurious reduction of production tff_formula -> tff_logic_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1545,13 +1545,13 @@ file: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LET_FF LEFT_PAREN SING ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 37, spurious reduction of production system_constant -> system_functor -## In state 39, spurious reduction of production system_term -> system_constant -## In state 112, spurious reduction of production system_atomic_formula -> system_term -## In state 113, spurious reduction of production atomic_formula -> system_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 176, spurious reduction of production tff_let_formula_binding -> atomic_formula EQUIV tff_unitary_formula -## In state 106, spurious reduction of production tff_let_formula_defn -> tff_let_formula_binding +## In state 37, spurious reduction of production system_constant -> system_functor +## In state 39, spurious reduction of production system_term -> system_constant +## In state 112, spurious reduction of production system_atomic_formula -> system_term +## In state 113, spurious reduction of production atomic_formula -> system_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 176, spurious reduction of production tff_let_formula_binding -> atomic_formula EQUIV tff_unitary_formula +## In state 106, spurious reduction of production tff_let_formula_defn -> tff_let_formula_binding ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1593,13 +1593,13 @@ file: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LET_TF LEFT_PAREN UPPE ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 187, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula -## In state 230, spurious reduction of production tff_formula -> tff_logic_formula +## In state 187, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula +## In state 230, spurious reduction of production tff_formula -> tff_logic_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1702,12 +1702,12 @@ file: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED AND DOLL ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 37, spurious reduction of production system_constant -> system_functor -## In state 39, spurious reduction of production system_term -> system_constant -## In state 112, spurious reduction of production system_atomic_formula -> system_term -## In state 113, spurious reduction of production atomic_formula -> system_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 156, spurious reduction of production tff_and_formula -> tff_unitary_formula AND tff_unitary_formula +## In state 37, spurious reduction of production system_constant -> system_functor +## In state 39, spurious reduction of production system_term -> system_constant +## In state 112, spurious reduction of production system_atomic_formula -> system_term +## In state 113, spurious reduction of production atomic_formula -> system_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 156, spurious reduction of production tff_and_formula -> tff_unitary_formula AND tff_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1846,7 +1846,7 @@ file: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED COLON LE ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 218, spurious reduction of production tff_top_level_type -> tff_atomic_type +## In state 218, spurious reduction of production tff_top_level_type -> tff_atomic_type ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1976,7 +1976,7 @@ file: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED COLON UP ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 218, spurious reduction of production tff_unitary_type -> tff_atomic_type +## In state 218, spurious reduction of production tff_unitary_type -> tff_atomic_type ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2070,12 +2070,12 @@ file: TFF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED VLINE DO ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 37, spurious reduction of production system_constant -> system_functor -## In state 39, spurious reduction of production system_term -> system_constant -## In state 112, spurious reduction of production system_atomic_formula -> system_term -## In state 113, spurious reduction of production atomic_formula -> system_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 149, spurious reduction of production tff_or_formula -> tff_unitary_formula VLINE tff_unitary_formula +## In state 37, spurious reduction of production system_constant -> system_functor +## In state 39, spurious reduction of production system_term -> system_constant +## In state 112, spurious reduction of production system_atomic_formula -> system_term +## In state 113, spurious reduction of production atomic_formula -> system_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 149, spurious reduction of production tff_or_formula -> tff_unitary_formula VLINE tff_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2240,8 +2240,8 @@ file: THF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA ITE_F LEFT_PAREN XOR C ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom -## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula +## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom +## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2259,8 +2259,8 @@ file: THF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA ITE_F LEFT_PAREN XOR C ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom -## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula +## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom +## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2278,8 +2278,8 @@ file: THF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA ITE_F LEFT_PAREN XOR R ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom -## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula +## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom +## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2310,11 +2310,11 @@ file: THF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LAMBDA LEFT_BRACKET UP ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom -## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula -## In state 374, spurious reduction of production thf_top_level_type -> thf_logic_formula -## In state 424, spurious reduction of production thf_typed_variable -> variable COLON thf_top_level_type -## In state 432, spurious reduction of production thf_variable -> thf_typed_variable +## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom +## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula +## In state 374, spurious reduction of production thf_top_level_type -> thf_logic_formula +## In state 424, spurious reduction of production thf_typed_variable -> variable COLON thf_top_level_type +## In state 432, spurious reduction of production thf_variable -> thf_typed_variable ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2480,8 +2480,8 @@ file: THF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LEFT_BRACKET XOR RIGHT ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom -## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula +## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom +## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2526,8 +2526,8 @@ file: THF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LEFT_PAREN XOR RIGHT_B ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom -## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula +## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom +## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2570,9 +2570,9 @@ file: THF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LET_FF LEFT_PAREN LAMB ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom -## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula -## In state 421, spurious reduction of production thf_formula -> thf_logic_formula +## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom +## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula +## In state 421, spurious reduction of production thf_formula -> thf_logic_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2638,9 +2638,9 @@ file: THF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LET_TF LEFT_PAREN LAMB ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom -## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula -## In state 421, spurious reduction of production thf_formula -> thf_logic_formula +## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom +## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula +## In state 421, spurious reduction of production thf_formula -> thf_logic_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2723,8 +2723,8 @@ file: THF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SIGMA LEFT_PAREN XOR R ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom -## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula +## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom +## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -2953,9 +2953,9 @@ file: THF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA XOR RIGHT_BRACKET ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom -## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula -## In state 421, spurious reduction of production thf_formula -> thf_logic_formula +## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom +## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula +## In state 421, spurious reduction of production thf_formula -> thf_logic_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3022,8 +3022,8 @@ file: THF LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA XOR STAR LEFT_PAREN XO ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom -## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula +## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom +## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3188,8 +3188,8 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA DOLLAR_DOLLAR_WORD STA ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 37, spurious reduction of production system_constant -> system_functor -## In state 39, spurious reduction of production system_term -> system_constant +## In state 37, spurious reduction of production system_constant -> system_functor +## In state 39, spurious reduction of production system_term -> system_constant ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3233,8 +3233,8 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA DOLLAR_WORD STAR ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 50, spurious reduction of production defined_constant -> defined_functor -## In state 52, spurious reduction of production defined_plain_term -> defined_constant +## In state 50, spurious reduction of production defined_constant -> defined_functor +## In state 52, spurious reduction of production defined_plain_term -> defined_constant ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3386,12 +3386,12 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA ITE_T LEFT_PAREN SINGL ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3470,12 +3470,12 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LEFT_BRACKET SINGLE_QU ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula -## In state 270, spurious reduction of production fof_logic_formula -> fof_unitary_formula +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula +## In state 270, spurious reduction of production fof_logic_formula -> fof_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3518,12 +3518,12 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LEFT_PAREN SINGLE_QUOT ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula -## In state 270, spurious reduction of production fof_logic_formula -> fof_unitary_formula +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula +## In state 270, spurious reduction of production fof_logic_formula -> fof_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3602,12 +3602,12 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LET_FT LEFT_PAREN LEFT ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 37, spurious reduction of production system_constant -> system_functor -## In state 39, spurious reduction of production system_term -> system_constant -## In state 112, spurious reduction of production system_atomic_formula -> system_term -## In state 113, spurious reduction of production atomic_formula -> system_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 176, spurious reduction of production tff_let_formula_binding -> atomic_formula EQUIV tff_unitary_formula +## In state 37, spurious reduction of production system_constant -> system_functor +## In state 39, spurious reduction of production system_term -> system_constant +## In state 112, spurious reduction of production system_atomic_formula -> system_term +## In state 113, spurious reduction of production atomic_formula -> system_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 176, spurious reduction of production tff_let_formula_binding -> atomic_formula EQUIV tff_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3661,13 +3661,13 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LET_FT LEFT_PAREN SING ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 37, spurious reduction of production system_constant -> system_functor -## In state 39, spurious reduction of production system_term -> system_constant -## In state 112, spurious reduction of production system_atomic_formula -> system_term -## In state 113, spurious reduction of production atomic_formula -> system_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 176, spurious reduction of production tff_let_formula_binding -> atomic_formula EQUIV tff_unitary_formula -## In state 106, spurious reduction of production tff_let_formula_defn -> tff_let_formula_binding +## In state 37, spurious reduction of production system_constant -> system_functor +## In state 39, spurious reduction of production system_term -> system_constant +## In state 112, spurious reduction of production system_atomic_formula -> system_term +## In state 113, spurious reduction of production atomic_formula -> system_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 176, spurious reduction of production tff_let_formula_binding -> atomic_formula EQUIV tff_unitary_formula +## In state 106, spurious reduction of production tff_let_formula_defn -> tff_let_formula_binding ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3697,10 +3697,10 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA LET_FT LEFT_PAREN SING ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3935,12 +3935,12 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED AND DOLL ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 37, spurious reduction of production system_constant -> system_functor -## In state 39, spurious reduction of production system_term -> system_constant -## In state 112, spurious reduction of production system_atomic_formula -> system_term -## In state 113, spurious reduction of production atomic_formula -> system_atomic_formula -## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula -## In state 274, spurious reduction of production fof_and_formula -> fof_unitary_formula AND fof_unitary_formula +## In state 37, spurious reduction of production system_constant -> system_functor +## In state 39, spurious reduction of production system_term -> system_constant +## In state 112, spurious reduction of production system_atomic_formula -> system_term +## In state 113, spurious reduction of production atomic_formula -> system_atomic_formula +## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula +## In state 274, spurious reduction of production fof_and_formula -> fof_unitary_formula AND fof_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -3970,13 +3970,13 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED COMMA DO ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 464, spurious reduction of production literal -> atomic_formula -## In state 459, spurious reduction of production disjunction -> literal -## In state 466, spurious reduction of production cnf_formula -> disjunction +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 464, spurious reduction of production literal -> atomic_formula +## In state 459, spurious reduction of production disjunction -> literal +## In state 466, spurious reduction of production cnf_formula -> disjunction ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -4018,13 +4018,13 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED COMMA DO ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula -## In state 270, spurious reduction of production fof_logic_formula -> fof_unitary_formula -## In state 451, spurious reduction of production fof_formula -> fof_logic_formula +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula +## In state 270, spurious reduction of production fof_logic_formula -> fof_unitary_formula +## In state 451, spurious reduction of production fof_formula -> fof_logic_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -4102,13 +4102,13 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED COMMA DO ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 187, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula -## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula -## In state 230, spurious reduction of production tff_formula -> tff_logic_formula +## In state 187, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 145, spurious reduction of production tff_unitary_formula -> atomic_formula +## In state 146, spurious reduction of production tff_logic_formula -> tff_unitary_formula +## In state 230, spurious reduction of production tff_formula -> tff_logic_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -4162,9 +4162,9 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED COMMA DO ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom -## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula -## In state 421, spurious reduction of production thf_formula -> thf_logic_formula +## In state 379, spurious reduction of production thf_unitary_formula -> thf_atom +## In state 341, spurious reduction of production thf_logic_formula -> thf_unitary_formula +## In state 421, spurious reduction of production thf_formula -> thf_logic_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -4219,8 +4219,8 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED COMMA LE ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 478, spurious reduction of production general_term -> general_data -## In state 473, spurious reduction of production general_terms -> general_term +## In state 478, spurious reduction of production general_term -> general_data +## In state 473, spurious reduction of production general_terms -> general_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -4251,8 +4251,8 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED COMMA SI ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 478, spurious reduction of production general_term -> general_data -## In state 473, spurious reduction of production general_terms -> general_term +## In state 478, spurious reduction of production general_term -> general_data +## In state 473, spurious reduction of production general_terms -> general_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -4331,8 +4331,8 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED COMMA UP ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 478, spurious reduction of production general_term -> general_data -## In state 492, spurious reduction of production source -> general_term +## In state 478, spurious reduction of production general_term -> general_data +## In state 492, spurious reduction of production source -> general_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -4412,14 +4412,14 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED RIGHT_BR ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant -## In state 114, spurious reduction of production plain_atomic_formula -> plain_term -## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula -## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula -## In state 270, spurious reduction of production fof_logic_formula -> fof_unitary_formula -## In state 451, spurious reduction of production fof_formula -> fof_logic_formula -## In state 496, spurious reduction of production tpi_formula -> fof_formula +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant +## In state 114, spurious reduction of production plain_atomic_formula -> plain_term +## In state 115, spurious reduction of production atomic_formula -> plain_atomic_formula +## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula +## In state 270, spurious reduction of production fof_logic_formula -> fof_unitary_formula +## In state 451, spurious reduction of production fof_formula -> fof_logic_formula +## In state 496, spurious reduction of production tpi_formula -> fof_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -4450,8 +4450,8 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED STAR ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 32, spurious reduction of production constant -> tptp_functor -## In state 55, spurious reduction of production plain_term -> constant +## In state 32, spurious reduction of production constant -> tptp_functor +## In state 55, spurious reduction of production plain_term -> constant ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -4495,12 +4495,12 @@ file: TPI LEFT_PAREN SINGLE_QUOTED COMMA LOWER_WORD COMMA SINGLE_QUOTED VLINE DO ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 37, spurious reduction of production system_constant -> system_functor -## In state 39, spurious reduction of production system_term -> system_constant -## In state 112, spurious reduction of production system_atomic_formula -> system_term -## In state 113, spurious reduction of production atomic_formula -> system_atomic_formula -## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula -## In state 272, spurious reduction of production fof_or_formula -> fof_unitary_formula VLINE fof_unitary_formula +## In state 37, spurious reduction of production system_constant -> system_functor +## In state 39, spurious reduction of production system_term -> system_constant +## In state 112, spurious reduction of production system_atomic_formula -> system_term +## In state 113, spurious reduction of production atomic_formula -> system_atomic_formula +## In state 268, spurious reduction of production fof_unitary_formula -> atomic_formula +## In state 272, spurious reduction of production fof_or_formula -> fof_unitary_formula VLINE fof_unitary_formula ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -4664,4 +4664,3 @@ file: XOR ## <YOUR SYNTAX ERROR MESSAGE HERE> - diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/tokens.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/tokens.mly new file mode 100644 index 0000000000000000000000000000000000000000..ebbaacc4aca16998bdb955894293633ee266c816 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/tptp/v6.3.0/tokens.mly @@ -0,0 +1,94 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +/* Token declarations for Tptp parser */ + +%token EOF + +%token DOT +%token COMMA +%token COLON + +%token LEFT_PAREN +%token RIGHT_PAREN +%token LEFT_BRACKET +%token RIGHT_BRACKET + +%token CNF +%token FOF +%token TFF +%token THF +%token TPI +%token INCLUDE + +%token LAMBDA +%token APPLY +%token DEFINITE_DESCRIPTION +%token INDEFINITE_DESCRIPTION +%token FORALL_TY +%token FORALL +%token EXISTS_TY +%token EXISTS + +%token PI +%token SIGMA + +%token LESS +%token ARROW + +%token STAR +%token PLUS + +%token XOR +%token EQUIV +%token IMPLY +%token LEFT_IMPLY + +%token NOT +%token AND +%token VLINE +%token NOTAND +%token NOTVLINE + +%token EQUAL +%token NOT_EQUAL +%token GENTZEN_ARROW + +%token ITE_F +%token ITE_T +%token LET_TF +%token LET_FF +%token LET_FT +%token LET_TT + +%token DOLLAR_THF +%token DOLLAR_TFF +%token DOLLAR_FOF +%token DOLLAR_CNF +%token DOLLAR_FOT + +%token <string> LOWER_WORD +%token <string> UPPER_WORD +%token <string> SINGLE_QUOTED +%token <string> DISTINCT_OBJECT +%token <string> DOLLAR_WORD +%token <string> DOLLAR_DOLLAR_WORD +%token <string> REAL +%token <string> RATIONAL +%token <string> INTEGER + +/* + +%left VLINE +%left AND +%nonassoc EQUIV +%nonassoc XOR +%nonassoc IMPLY +%nonassoc LEFT_IMPLY +%nonassoc NOTVLINE +%nonassoc NOTAND + +*/ + +%% + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/ast.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/ast.ml new file mode 100644 index 0000000000000000000000000000000000000000..df374aad550ecfae5be6be1bee3717ad04460900 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/ast.ml @@ -0,0 +1,153 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information. *) + +module type Id = sig + + type t + (** The type of identifiers *) + + type namespace + (** The type of namespaces for identifiers *) + + val term : namespace + (** The naemspace for terms, types, and pretty much everything *) + + val mk : namespace -> string -> t + (** Make identifiers from a namespace and a string. *) + +end + +module type Term = sig + + type t + (** The type of terms. *) + + type id + (** The type of identifiers *) + + type location + (** The type of locations attached to terms. *) + + val tType : ?loc:location -> unit -> t + val prop : ?loc:location -> unit -> t + val ty_int : ?loc:location -> unit -> t + val wildcard : ?loc:location -> unit -> t + val true_ : ?loc:location -> unit -> t + val false_ : ?loc:location -> unit -> t + (** Standard pre-defined constants. *) + + val quoted : ?loc:location -> string -> t + (** Create an attribute from a quoted string. *) + + val const : ?loc:location -> id -> t + (** Create a new constant. *) + + val int : ?loc:location -> string -> t + (** Create an integer constant from a string. *) + + val apply : ?loc:location -> t -> t list -> t + (** Application of terms. *) + + val colon : ?loc:location -> t -> t -> t + (** Juxtaposition of terms, usually used for annotating terms with types. *) + + val arrow : ?loc:location -> t -> t -> t + (** Arow, i.e function type constructor, currifyed. *) + + val eq : ?loc:location -> t -> t -> t + (** Make an equality between terms. *) + + val neq : ?loc:location -> t list -> t + (** Make an disequality between terms. *) + + val not_ : ?loc:location -> t -> t + val or_ : ?loc:location -> t list -> t + val and_ : ?loc:location -> t list -> t + val imply : ?loc:location -> t -> t -> t + val equiv : ?loc:location -> t -> t -> t + (** Usual propositional functions. *) + + val ite : ?loc:location -> t -> t -> t -> t + (** Conditional construction. *) + + val pi : ?loc:location -> t list -> t -> t + (** Dependant product, or polymorphic type quantification. + Used to build polymorphic function types such as, + [Pi [a] (Arrow a a)]. *) + + val letin : ?loc:location -> t list -> t -> t + (** Local term binding. *) + + val forall : ?loc:location -> t list -> t -> t + (** Universal propositional quantification. *) + + val exists : ?loc:location -> t list -> t -> t + (** Existencial propositional qantification. *) + + val match_ : ?loc:location -> t -> (t * t) list -> t + (** Pattern matching. The first term is the term to match, + and each tuple in the list is a match case, which is a pair + of a pattern and a match branch. *) + + val lambda : ?loc:location -> t list -> t -> t + (** Create a lambda. *) + + val uminus : ?loc:location -> t -> t + (** Arithmetic unary minus. *) + + val add : ?loc:location -> t -> t -> t + (** Arithmetic addition. *) + + val sub : ?loc:location -> t -> t -> t + (** Arithmetic substraction. *) + + val mult : ?loc:location -> t -> t -> t + (** Arithmetic multiplication. *) + + val lt : ?loc:location -> t -> t -> t + (** Arithmetic "lesser than" comparison (strict). *) + + val leq : ?loc:location -> t -> t -> t + (** Arithmetic "lesser or equal" comparison. *) + + val gt : ?loc:location -> t -> t -> t + (** Arithmetic "greater than" comparison (strict). *) + + val geq : ?loc:location -> t -> t -> t + (** Arithmetic "greater or equal" comparison. *) + +end + +module type Statement = sig + + type t + (** The type of statements. *) + + type id + (** The type of identifiers *) + + type term + (** The type of terms used in statements. *) + + type location + (** The type of locations attached to statements. *) + + val import : ?loc:location -> string -> t + + val data : ?loc:location -> ?attrs:term list -> t list -> t + val defs : ?loc:location -> ?attrs:term list -> t list -> t + + val rewrite : ?loc:location -> ?attrs:term list -> term -> t + val goal : ?loc:location -> ?attrs:term list -> term -> t + val assume : ?loc:location -> ?attrs:term list -> term -> t + val lemma : ?loc:location -> ?attrs:term list -> term -> t + + val decl : ?loc:location -> ?attrs:term list -> + id -> term -> t + val definition : ?loc:location -> ?attrs:term list -> + id -> term -> term list -> t + val inductive : ?loc:location -> ?attrs:term list -> + id -> term list -> (id * term list) list -> t + +end + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/dolmen_zf.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/dolmen_zf.ml index 9896c38bdc1a156982fbf7d4b01300ee128fd335..2734c84691ee0732f11d36f6ea5526f6c9018251 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/dolmen_zf.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/dolmen_zf.ml @@ -1,9 +1,9 @@ (* This file is free software, part of dolmen. See file "LICENSE" formore information *) -module type Id = Ast_zf.Id -module type Term = Ast_zf.Term -module type Statement = Ast_zf.Statement +module type Id = Ast.Id +module type Term = Ast.Term +module type Statement = Ast.Statement module Make (L : Dolmen_intf.Location.S) @@ -11,10 +11,10 @@ module Make (T : Term with type location := L.t and type id := I.t) (S : Statement with type location := L.t and type id := I.t and type term := T.t) = Dolmen_std.Transformer.Make(L)(struct - type token = Tokens_zf.token + type token = Tokens.token type statement = S.t let env = [] let incremental = true let error s = Syntax_messages.message s - end)(LexZf)(ParseZf.Make(L)(I)(T)(S)) + end)(Lexer)(Parser.Make(L)(I)(T)(S)) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/dolmen_zf.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/dolmen_zf.mli index ab1a2153b97e276b1636454d3a0cd7d1a52b9965..2161f4c648c2da48e192767dbf6059ace317832a 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/dolmen_zf.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/dolmen_zf.mli @@ -3,9 +3,9 @@ (** Zipperposition format input *) -module type Id = Ast_zf.Id -module type Term = Ast_zf.Term -module type Statement = Ast_zf.Statement +module type Id = Ast.Id +module type Term = Ast.Term +module type Statement = Ast.Statement (** Implementation requirements for the Zipperposition format. *) module Make @@ -13,6 +13,6 @@ module Make (I : Id) (T : Term with type location := L.t and type id := I.t) (S : Statement with type location := L.t and type id := I.t and type term := T.t) : - Dolmen_intf.Language.S with type statement = S.t + Dolmen_intf.Language.S with type statement = S.t and type file := L.file (** Functor to generate a parser for the Zipperposition format. *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/dune index e5f9a895d63af7c4be23547101e2266ce9e3067b..014031f1196fb06fd20cddf32750fd36a0fed930 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/dune @@ -1,64 +1,12 @@ -(ocamllex (modules lexZf)) - -(menhir - (flags (--only-tokens)) - (modules tokens_zf) -) - -(menhir - (flags (--explain --table --external-tokens Tokens_zf)) - (modules tokens_zf parseZf) - (merge_into parseZf) -) - -(rule - (target syntax_messages.ml) - (deps (:tokens tokens_zf.mly) - (:parser parseZf.mly) - (:msg syntax.messages)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_zf %{tokens} - %{parser} --base %{parser} --compile-errors %{msg}))) -) - +; Language library definition (library (name dolmen_zf) (public_name dolmen.zf) + (instrumentation (backend bisect_ppx)) (libraries dolmen_std dolmen_intf menhirLib) - (modules Tokens_zf LexZf ParseZf Ast_zf Syntax_messages Dolmen_zf) -) - -; Convenience rule to generate a fresh messages file, -; and update an already existing one. -(rule - (target new.messages) - (mode promote-until-clean) - (deps (:tokens tokens_zf.mly) - (:parser parseZf.mly)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_zf %{tokens} - %{parser} --base %{parser} --list-errors))) + (modules Dolmen_zf Tokens Lexer Parser Ast Syntax_messages) ) -(rule - (target updated.messages) - (mode promote-until-clean) - (deps (:tokens tokens_zf.mly) - (:parser parseZf.mly) - (:msg syntax.messages)) - (action (with-stdout-to %{target} - (run menhir --external-tokens Tokens_zf %{tokens} - %{parser} --base %{parser} --update-errors %{msg}))) -) - -; Additional rule to add to runtest a check that the messages file is up-to-date -(rule - (alias runtest) - (deps (:tokens tokens_zf.mly) - (:parser parseZf.mly) - (:new new.messages) - (:msg syntax.messages)) - (action (run menhir --external-tokens Tokens_zf %{tokens} - %{parser} --base %{parser} --compare-errors %{new} --compare-errors %{msg})) -) +; Common include +(include ../dune.common) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/lexer.mll b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/lexer.mll new file mode 100644 index 0000000000000000000000000000000000000000..e6d44600b49edf15ccf387c087eda8611cdbd1a0 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/lexer.mll @@ -0,0 +1,182 @@ + +(* This file is free software, part of Zipperposition. See file "license" for more details. *) + +(** {1 Lexer for Zipperposition Formulas} *) + +{ + exception Error + + module T = Dolmen_std.Tok + + open Tokens + + let reserved s = + T.descr s + ~kind:"reserved word" + ~hint:"reserved words cannot be used as identifiers" + + let descr token : T.descr = + match (token : token) with + | EOF -> T.descr ~kind:"end of file token" "" + + | LEFT_PAREN -> reserved "(" + | RIGHT_PAREN -> reserved ")" + | LEFT_BRACKET -> reserved "{" + | RIGHT_BRACKET -> reserved "}" + + | WILDCARD -> reserved "_" + | COMMA -> reserved "," + | DOT -> reserved "." + | SEMI_COLON -> reserved ";" + | COLON -> reserved ":" + | EQDEF -> reserved ":=" + | WHERE -> reserved "where" + | AND -> reserved "and" + + | LOGIC_TRUE -> reserved "true" + | LOGIC_FALSE -> reserved "false" + | LOGIC_AND -> reserved "&&" + | LOGIC_OR -> reserved "||" + | LOGIC_NOT -> reserved "~" + | LOGIC_IMPLY -> reserved "=>" + | LOGIC_FORALL -> reserved "forall" + | LOGIC_EXISTS -> reserved "exists" + | LOGIC_EQ -> reserved "=" + | LOGIC_NEQ -> reserved "!=" + | LOGIC_EQUIV -> reserved "<=>" + + | ARITH_PLUS -> reserved "+" + | ARITH_MINUS -> reserved "-" + | ARITH_PRODUCT -> reserved "*" + | ARITH_LT -> reserved "<" + | ARITH_LEQ -> reserved "<=" + | ARITH_GT -> reserved ">" + | ARITH_GEQ -> reserved ">=" + + | IF -> reserved "if" + | THEN -> reserved "then" + | ELSE -> reserved "else" + + | MATCH -> reserved "match" + | WITH -> reserved "with" + | END -> reserved "end" + | FUN -> reserved "fun" + + | INT -> reserved "int" + | PROP -> reserved "prop" + | TYPE -> reserved "type" + + | ASSERT -> reserved "assert" + | DATA -> reserved "data" + | DEF -> reserved "def" + | VAL -> reserved "val" + | GOAL -> reserved "goal" + | REWRITE -> reserved "rewrite" + | LEMMA -> reserved "lemma" + | INCLUDE -> reserved "include" + + | ARROW -> reserved "->" + | PI -> reserved "pi" + | VERTICAL_BAR -> reserved "|" + + | LOWER_WORD s -> T.descr ~kind:"lower word" s + | UPPER_WORD s -> T.descr ~kind:"upper word" s + | QUOTED s -> T.descr ~kind:"quoted word" s + | INTEGER s -> T.descr ~kind:"integer" s + +} + +let printable_char = [^ '\n'] +let comment_line = '#' printable_char* + +let numeric = ['0' - '9'] +let lower_alpha = ['a' - 'z'] +let upper_alpha = ['A' - 'Z'] +let alpha_numeric = lower_alpha | upper_alpha | numeric | '_' + +let upper_word = upper_alpha alpha_numeric* +let lower_word = lower_alpha alpha_numeric* + +(* let quoted = '"' ([^ '"'] | '\\' '"')* '"' *) + +let zero_numeric = '0' +let non_zero_numeric = ['1' - '9'] +let numeric = ['0' - '9'] +let sign = ['+' '-'] + +let dot_decimal = '.' numeric + +let positive_decimal = non_zero_numeric numeric* +let decimal = zero_numeric | positive_decimal +let unsigned_integer = decimal +let signed_integer = sign unsigned_integer +let integer = signed_integer | unsigned_integer + +rule token newline = parse + | eof { EOF } + | '\n' { newline lexbuf; token newline lexbuf } + | [' ' '\t' '\r'] { token newline lexbuf } + | comment_line { token newline lexbuf } + | '(' { LEFT_PAREN } + | ')' { RIGHT_PAREN } + | '[' { LEFT_BRACKET } + | ']' { RIGHT_BRACKET } + | '.' { DOT } + | ',' { COMMA } + | '_' { WILDCARD } + | ':' { COLON } + | ';' { SEMI_COLON } + | "=" { LOGIC_EQ } + | "!=" { LOGIC_NEQ } + | ":=" { EQDEF } + | "->" { ARROW } + | "val" { VAL } + | "def" { DEF } + | "where" { WHERE } + | "type" { TYPE } + | "prop" { PROP } + | "int" { INT } + | "assert" { ASSERT } + | "lemma" { LEMMA } + | "goal" { GOAL } + | "and" { AND } + | "rewrite" { REWRITE } + | "true" { LOGIC_TRUE } + | "false" { LOGIC_FALSE } + | "pi" { PI } + | "if" { IF } + | "then" { THEN } + | "else" { ELSE } + | "match" { MATCH } + | "with" { WITH } + | "end" { END } + | "data" { DATA } + | "fun" { FUN } + | "&&" { LOGIC_AND } + | "||" { LOGIC_OR } + | "|" { VERTICAL_BAR } + | "~" { LOGIC_NOT } + | "*" { ARITH_PRODUCT } + | "+" { ARITH_PLUS } + | "-" { ARITH_MINUS } + | "<" { ARITH_LT } + | "<=" { ARITH_LEQ } + | ">" { ARITH_GT } + | ">=" { ARITH_GEQ } + | "forall" { LOGIC_FORALL } + | "exists" { LOGIC_EXISTS } + | "=>" { LOGIC_IMPLY } + | "<=>" { LOGIC_EQUIV } + | "include" { INCLUDE } + | lower_word { LOWER_WORD(Lexing.lexeme lexbuf) } + | upper_word { UPPER_WORD(Lexing.lexeme lexbuf) } + | integer { INTEGER(Lexing.lexeme lexbuf) } + | '"' { quoted newline (Buffer.create 42) lexbuf } + | _ { raise Error } + +(* we unquote during lexing rather then during the parsing *) +and quoted newline b = parse + | '"' { QUOTED(Buffer.contents b) } + | '\\' '"' { Buffer.add_char b '"'; quoted newline b lexbuf } + | _ as c { if c = '\n' then newline lexbuf; + Buffer.add_char b c; quoted newline b lexbuf } + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/parser.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/parser.mly new file mode 100644 index 0000000000000000000000000000000000000000..4579f17a142e18cbbb108a3da0063010e7f179e5 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/parser.mly @@ -0,0 +1,280 @@ + +(* This file is free software, part of Zipperposition. See file "license" for more details. *) + +(** {1 Parser for Zipperposition Formulas} *) + +%parameter <L : Dolmen_intf.Location.S> +%parameter <I : Ast.Id> +%parameter <T : Ast.Term with type location := L.t and type id := I.t> +%parameter <S : Ast.Statement with type location := L.t and type id := I.t and type term := T.t> + +%start <S.t list> file +%start <S.t option> input + +%% +name: + | w=LOWER_WORD + | w=UPPER_WORD + { I.mk I.term w } + +raw_var: + | s=name + { let loc = L.mk_pos $startpos $endpos in T.const ~loc s } + +wildcard: + | WILDCARD + { let loc = L.mk_pos $startpos $endpos in T.wildcard ~loc () } + +t_type: + | TYPE + { let loc = L.mk_pos $startpos $endpos in T.tType ~loc () } + +var_or_wildcard: + | v=raw_var + | v=wildcard + { v } + +typed_var_block: + | v=raw_var + | v=wildcard + { [ v ] } + | LEFT_PAREN l=raw_var+ COLON t=term RIGHT_PAREN + { let loc = L.mk_pos $startpos $endpos in + List.map (fun x -> T.colon ~loc x t) l } + +typed_var_list: + | l=typed_var_block + { l } + | l=typed_var_block l2=typed_var_list + { l @ l2 } + +typed_ty_var_block: + | v=raw_var + { [ v ] } + | v=raw_var COLON ty=t_type + { let loc = L.mk_pos $startpos $endpos in + [ T.colon ~loc v ty ] + } + | LEFT_PAREN l=raw_var+ COLON ty=t_type RIGHT_PAREN + { let loc = L.mk_pos $startpos $endpos in + List.map (fun x -> T.colon ~loc x ty) l + } + +typed_ty_var_list: + | l=typed_ty_var_block + { l } + | l=typed_ty_var_block l2=typed_ty_var_list + { l @ l2 } + +var: + | v=raw_var + | v=wildcard + { v } + +const: + | TYPE + { let loc = L.mk_pos $startpos $endpos in T.tType ~loc () } + | PROP + { let loc = L.mk_pos $startpos $endpos in T.prop ~loc () } + | INT + { let loc = L.mk_pos $startpos $endpos in T.ty_int ~loc () } + | LOGIC_TRUE + { let loc = L.mk_pos $startpos $endpos in T.true_ ~loc () } + | LOGIC_FALSE + { let loc = L.mk_pos $startpos $endpos in T.false_ ~loc () } + +match_branch: + | VERTICAL_BAR c=raw_var vars=var_or_wildcard* ARROW rhs=term + { let pattern = + let loc = L.mk_pos $startpos(c) $endpos(vars) in + T.apply ~loc c vars + in + (pattern,rhs) } + +atomic_term: + | v=var + { v } + | t=const + { t } + | i=INTEGER + { let loc = L.mk_pos $startpos $endpos in T.int ~loc i } + | LEFT_PAREN t=term RIGHT_PAREN + { t } + | MATCH t=term WITH l=match_branch+ END + { let loc = L.mk_pos $startpos $endpos in T.match_ ~loc t l } + +apply_term: + | t=atomic_term + { t } + | t=atomic_term u=atomic_term+ + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc t u } + | ARITH_MINUS t=apply_term + { let loc = L.mk_pos $startpos $endpos in T.uminus ~loc t } + +mult_term: + | t=apply_term + { t } + | a=apply_term ARITH_PRODUCT b=mult_term + { let loc = L.mk_pos $startpos $endpos in T.mult ~loc a b } + +%inline PLUS_OP: + | ARITH_PLUS + { T.add } + | ARITH_MINUS + { T.sub } + +plus_term: + | t=mult_term + { t } + | a=mult_term o=PLUS_OP b=plus_term + { let loc = Some (L.mk_pos $startpos $endpos) in o ?loc a b } + +%inline ARITH_OP: + | ARITH_LT + { T.lt } + | ARITH_LEQ + { T.leq } + | ARITH_GT + { T.gt } + | ARITH_GEQ + { T.geq } + +arith_op_term: + | t=plus_term + { t } + | a=plus_term o=ARITH_OP b=plus_term + { let loc = Some (L.mk_pos $startpos $endpos) in o ?loc a b } + +not_term: + | t=arith_op_term + { t } + | LOGIC_NOT t=arith_op_term + { let loc = L.mk_pos $startpos $endpos in T.not_ ~loc t } + +eq_term: + | t=not_term + { t } + | t=not_term LOGIC_EQ u=not_term + { let loc = L.mk_pos $startpos $endpos in T.eq ~loc t u } + | t=not_term LOGIC_NEQ u=not_term + { let loc = L.mk_pos $startpos $endpos in T.neq ~loc [t; u] } + +and_term: + | t=eq_term + { t } + | t=eq_term LOGIC_AND u=and_term + { let loc = L.mk_pos $startpos $endpos in T.and_ ~loc [t; u] } + +or_term: + | t=and_term + { t } + | t=and_term LOGIC_OR u=or_term + { let loc = L.mk_pos $startpos $endpos in T.or_ ~loc [t; u] } + | t=and_term LOGIC_IMPLY u=or_term + { let loc = L.mk_pos $startpos $endpos in T.imply ~loc t u } + | t=and_term LOGIC_EQUIV u=or_term + { let loc = L.mk_pos $startpos $endpos in T.equiv ~loc t u } + +term: + | t=or_term + { t } + | LOGIC_FORALL vars=typed_var_list DOT t=term + { let loc = L.mk_pos $startpos $endpos in T.forall ~loc vars t } + | LOGIC_EXISTS vars=typed_var_list DOT t=term + { let loc = L.mk_pos $startpos $endpos in T.exists ~loc vars t } + | FUN vars=typed_var_list DOT t=term + { let loc = L.mk_pos $startpos $endpos in T.lambda ~loc vars t } + | t=apply_term ARROW u=term + { let loc = L.mk_pos $startpos $endpos in T.arrow ~loc t u } + | PI vars=typed_ty_var_list DOT t=term + { let loc = L.mk_pos $startpos $endpos in T.pi ~loc vars t } + | IF a=term THEN b=term ELSE c=term + { let loc = L.mk_pos $startpos $endpos in T.ite ~loc a b c } + /* + | error + { let loc = L.mk_pos $startpos $endpos in + let msg = Format.dprintf ": expected a term" in + raise (L.Syntax_error (loc, msg)) } + */ + +constructor: + | v=name l=atomic_term* + { v, l } + +constructors: + | VERTICAL_BAR? l=separated_nonempty_list(VERTICAL_BAR, constructor) + { l } + +type_def: + | t=name vars=raw_var* EQDEF l=constructors + { let loc = L.mk_pos $startpos $endpos in S.inductive ~loc t vars l } + +mutual_types: + | l=separated_nonempty_list(AND, type_def) + { l } + +attr: + | a=atomic_attr + { a } + | s=raw_var l=atomic_attr+ + { let loc = L.mk_pos $startpos $endpos in T.apply ~loc s l } + +atomic_attr: + | s=raw_var + { s } + | s=QUOTED + { let loc = L.mk_pos $startpos $endpos in T.quoted ~loc s } + | LEFT_PAREN a=attr RIGHT_PAREN + { a } + +attrs: + | LEFT_BRACKET l=separated_nonempty_list(COMMA, attr) RIGHT_BRACKET + { l } + | { [] } + +def: + | id=name COLON ty=term EQDEF t=term + { let v = + let loc = L.mk_pos $startpos $endpos in + T.const ~loc id + in + let loc = L.mk_pos $startpos $endpos in + let eq = T.eq ~loc v t in + S.definition ~loc id ty [eq] } + | id=name COLON ty=term WHERE rules=separated_nonempty_list(SEMI_COLON, term) + { let loc = L.mk_pos $startpos $endpos in + S.definition ~loc id ty rules } + +statement: + | INCLUDE s=QUOTED DOT + { let loc = L.mk_pos $startpos $endpos in S.import ~loc s } + | VAL attrs=attrs v=name COLON t=term DOT + { let loc = L.mk_pos $startpos $endpos in S.decl ~loc ~attrs v t } + | DEF attrs=attrs l=separated_nonempty_list(AND,def) DOT + { let loc = L.mk_pos $startpos $endpos in S.defs ~loc ~attrs l } + | REWRITE attrs=attrs t=term DOT + { let loc = L.mk_pos $startpos $endpos in S.rewrite ~loc ~attrs t } + | ASSERT attrs=attrs t=term DOT + { let loc = L.mk_pos $startpos $endpos in S.assume ~loc ~attrs t } + | LEMMA attrs=attrs t=term DOT + { let loc = L.mk_pos $startpos $endpos in S.lemma ~loc ~attrs t } + | GOAL attrs=attrs t=term DOT + { let loc = L.mk_pos $startpos $endpos in S.goal ~loc ~attrs t } + | DATA attrs=attrs l=mutual_types DOT + { let loc = L.mk_pos $startpos $endpos in S.data ~loc ~attrs l } + /* + | error + { let loc = L.mk_pos $startpos $endpos in + let msg = Format.dprintf ": expected a statement" in + raise (L.Syntax_error (loc, msg)) } + */ + +input: + | EOF { None } + | s=statement { Some s } + +file: + | l=statement* EOF { l } + +%% + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/syntax.messages b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/syntax.messages index 8a2109ea1beb146fceafe40dd19a92961ba79b4c..46e2f1fc5db6ebe8912f55f064cdaad6cadc6394 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/syntax.messages +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/syntax.messages @@ -23,15 +23,15 @@ input: ASSERT WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -121,15 +121,15 @@ input: GOAL WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -195,15 +195,15 @@ input: LEMMA WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -245,15 +245,15 @@ input: REWRITE WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -295,15 +295,15 @@ input: VAL UPPER_WORD COLON WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -381,15 +381,15 @@ file: ASSERT WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -564,16 +564,16 @@ file: DEF UPPER_WORD COLON INT EQDEF INT WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term -## In state 173, spurious reduction of production def -> name COLON term EQDEF term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term +## In state 173, spurious reduction of production def -> name COLON term EQDEF term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -616,15 +616,15 @@ file: DEF UPPER_WORD COLON WILDCARD WHERE WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -655,15 +655,15 @@ file: DEF UPPER_WORD COLON WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -731,15 +731,15 @@ file: GOAL WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -805,15 +805,15 @@ file: LEMMA WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -891,15 +891,15 @@ file: REWRITE IF WILDCARD THEN WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -929,15 +929,15 @@ file: REWRITE IF WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -991,15 +991,15 @@ file: REWRITE LEFT_PAREN WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1053,15 +1053,15 @@ file: REWRITE LOGIC_FORALL LEFT_PAREN UPPER_WORD COLON WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1141,7 +1141,7 @@ file: REWRITE LOGIC_NOT WILDCARD ARROW ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 62, spurious reduction of production apply_term -> atomic_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1171,15 +1171,15 @@ file: REWRITE MATCH WILDCARD WHERE ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1198,16 +1198,16 @@ file: REWRITE MATCH WILDCARD WITH VERTICAL_BAR UPPER_WORD ARROW INT WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term -## In state 132, spurious reduction of production match_branch -> VERTICAL_BAR raw_var list(var_or_wildcard) ARROW term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term +## In state 132, spurious reduction of production match_branch -> VERTICAL_BAR raw_var list(var_or_wildcard) ARROW term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1410,11 +1410,11 @@ file: REWRITE WILDCARD ARITH_GEQ INT ARITH_LT ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 74, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 84, spurious reduction of production arith_op_term -> plus_term ARITH_GEQ plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 74, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 84, spurious reduction of production arith_op_term -> plus_term ARITH_GEQ plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1541,12 +1541,12 @@ file: REWRITE WILDCARD LOGIC_EQ INT LOGIC_NEQ ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 74, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 91, spurious reduction of production eq_term -> not_term LOGIC_EQ not_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 74, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 91, spurious reduction of production eq_term -> not_term LOGIC_EQ not_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1650,15 +1650,15 @@ file: REWRITE WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1688,8 +1688,8 @@ file: VAL LEFT_BRACKET LEFT_PAREN UPPER_WORD RIGHT_BRACKET ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 7, spurious reduction of production atomic_attr -> raw_var -## In state 15, spurious reduction of production attr -> atomic_attr +## In state 7, spurious reduction of production atomic_attr -> raw_var +## In state 15, spurious reduction of production attr -> atomic_attr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1744,8 +1744,8 @@ file: VAL LEFT_BRACKET UPPER_WORD RIGHT_PAREN ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 7, spurious reduction of production atomic_attr -> raw_var -## In state 15, spurious reduction of production attr -> atomic_attr +## In state 7, spurious reduction of production atomic_attr -> raw_var +## In state 15, spurious reduction of production attr -> atomic_attr ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1801,15 +1801,15 @@ file: VAL UPPER_WORD COLON WILDCARD WITH ## This implies that, although the LR(1) items shown above provide an ## accurate view of the past (what has been recognized so far), they ## may provide an INCOMPLETE view of the future (what was expected next). -## In state 62, spurious reduction of production apply_term -> atomic_term -## In state 95, spurious reduction of production mult_term -> apply_term -## In state 71, spurious reduction of production plus_term -> mult_term -## In state 68, spurious reduction of production arith_op_term -> plus_term -## In state 89, spurious reduction of production not_term -> arith_op_term -## In state 86, spurious reduction of production eq_term -> not_term -## In state 92, spurious reduction of production and_term -> eq_term -## In state 98, spurious reduction of production or_term -> and_term -## In state 85, spurious reduction of production term -> or_term +## In state 62, spurious reduction of production apply_term -> atomic_term +## In state 95, spurious reduction of production mult_term -> apply_term +## In state 71, spurious reduction of production plus_term -> mult_term +## In state 68, spurious reduction of production arith_op_term -> plus_term +## In state 89, spurious reduction of production not_term -> arith_op_term +## In state 86, spurious reduction of production eq_term -> not_term +## In state 92, spurious reduction of production and_term -> eq_term +## In state 98, spurious reduction of production or_term -> and_term +## In state 85, spurious reduction of production term -> or_term ## <YOUR SYNTAX ERROR MESSAGE HERE> @@ -1861,4 +1861,3 @@ file: WITH ## <YOUR SYNTAX ERROR MESSAGE HERE> - diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/tokens.mly b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/tokens.mly new file mode 100644 index 0000000000000000000000000000000000000000..8cf57071d29cf3eea60b3adfb34486f548685343 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/languages/zf/tokens.mly @@ -0,0 +1,75 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +/* Token declarations for Zf parser */ + +%token EOF + +%token LEFT_PAREN +%token RIGHT_PAREN +%token LEFT_BRACKET +%token RIGHT_BRACKET + +%token WILDCARD +%token COMMA +%token DOT +%token SEMI_COLON +%token COLON +%token EQDEF +%token WHERE +%token AND + +%token LOGIC_TRUE +%token LOGIC_FALSE +%token LOGIC_AND +%token LOGIC_OR +%token LOGIC_NOT +%token LOGIC_IMPLY +%token LOGIC_FORALL +%token LOGIC_EXISTS +%token LOGIC_EQ +%token LOGIC_NEQ +%token LOGIC_EQUIV + +%token ARITH_PLUS +%token ARITH_MINUS +%token ARITH_PRODUCT +%token ARITH_LT +%token ARITH_LEQ +%token ARITH_GT +%token ARITH_GEQ + +%token IF +%token THEN +%token ELSE + +%token MATCH +%token WITH +%token END +%token FUN + +%token INT +%token PROP +%token TYPE + +%token ASSERT +%token DATA +%token DEF +%token VAL +%token GOAL +%token REWRITE +%token LEMMA +%token INCLUDE + +%token ARROW +%token PI +%token VERTICAL_BAR + +%token <string> LOWER_WORD +%token <string> UPPER_WORD +%token <string> QUOTED +%token <string> INTEGER + +%% + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/code.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/code.ml new file mode 100644 index 0000000000000000000000000000000000000000..625f6e4e0b3e704b2868740893a0405ad6a296e2 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/code.ml @@ -0,0 +1,95 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +(* Exit codes *) +(* ************************************************************************* *) + +type t = { + code : int; (* codes are unique for each exit code *) + descr : string; + category : string; + mutable abort : bool; +} + +let hash t = t.code +let equal t t' = t.code = t'.code +let compare t t' = compare t.code t'.code + + +(* Exit with a code and code status *) +(* ************************************************************************* *) + +let is_abort t = t.abort +let abort t = t.abort <- true +let error t = t.abort <- false + +let exit t = + if t.abort then Unix.kill (Unix.getpid ()) Sys.sigabrt; + exit t.code + + +(* Manipulation *) +(* ************************************************************************* *) + +let counter = ref 0 +let errors = ref [] + +(* The create function should only be used for error exit codes, + the ok exit code (i.e. [0]) is create manually and not included + in the errors list. *) +let create ~category ~descr = + incr counter; + let code = !counter in + (* cmdliner uses retcode 124 for cli errors *) + assert (0 < code && code < 124); + let t = { + code; descr; + category; + abort = false; + } in + errors := t :: !errors; + t + +(* *) +let errors () = List.rev !errors + +let descr t = t.code, t.descr +let category t = t.category + +(* Special values *) +(* ************************************************************************* *) + +let ok = { + code = 0; + descr = "the success exit code"; + category = "N/A"; + abort = false; +} + +let bug = { + code = 125; + descr = "on unexpected internal errors (bugs)"; + category = "Internal"; + abort = false; +} + +(* Predefined values *) +(* ************************************************************************* *) + +let generic = + create + ~category:"Generic" + ~descr:"on generic error" +let limit = + create + ~category:"Limits" + ~descr:"upon reaching limits (time, memory, etc..)" +let parsing = + create + ~category:"Parsing" + ~descr:"on parsing errors" +let typing = + create + ~category:"Typing" + ~descr:"on typing errors" + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/code.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/code.mli new file mode 100644 index 0000000000000000000000000000000000000000..53749c89f004167daa62ba4283421bbe9ddc2ec4 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/code.mli @@ -0,0 +1,74 @@ + +(* This file is free software, part of Dolmen. See file "LICENSE" for more details. *) + +(** {2 Exit codes} *) + +type t +(** An exit code, i.e. an integer between 0 and 126. *) + +val hash : t -> int +val equal : t -> t -> bool +val compare : t -> t -> int + + +(** {2 Manipulating error codes} *) + +val create : category:string -> descr:string -> t +(** Create a new exit code. The string given is used as a description + for the exit code. The create code is active by default. *) + +val descr : t -> int * string +(** Return the actual integer associated to the code, *) + +val category : t -> string +(** Category (used mainly for report documentation). *) + +val errors : unit -> t list +(** Return the list of all created error exit codes. *) + + +(** {2 Special exit codes} *) + +val ok : t +(** The [0] exit code, signalling everything ran fine. *) + +val bug : t +(** Unexpected errors. This uses retcode [125] since this is also + what cmdliner uses. This code will not appear in the list + returned by {errors}. *) + + + +(** {2 Predefined exit codes} *) + +val generic : t +(** Generic exit code. *) + +val limit : t +(* Exit code for when limits are exceeded. *) + +val parsing : t +(** Exit code for parsing errors. *) + +val typing : t +(** Exit codes for typing errors. *) + + + +(** {2 Exit code status} *) + +val exit : t -> _ +(** Exit with the given code. + Note: exit codes can be silenced, in which case the process + will exit with an exit code of [0]. *) + +val is_abort : t -> bool +(** Whether an exit code is active. *) + +val abort : t -> unit +(** Make the exit code abort instead of properly exiting. *) + +val error : t -> unit +(** Make the exit code properly exit. *) + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/dune index 19f869f0e0923bc5a5125767eb9b8101e8ce9674..432b5cc79faec910ead27b6f12676b77a87fb66e 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/dune @@ -1,9 +1,10 @@ (library (name dolmen_loop) (public_name dolmen_loop) + (instrumentation (backend bisect_ppx)) (libraries ; External deps - gen unix fmt + gen unix fmt pp_loc ; main dolmen deps , with versioned languages deps dolmen dolmen.intf dolmen.std dolmen.class @@ -12,8 +13,10 @@ dolmen_type ) (modules + ; Useful utilities + Report ; Interfaces Expr_intf Typer_intf Headers_intf State_intf ; Implementations - Logic State Parser Typer Headers Pipeline) + Logic Code State Parser Typer Headers Pipeline) ) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/expr_intf.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/expr_intf.ml index 72e4f8605f4522a234a01a83d4da7cab90fa50eb..d43225b1d41301d1a2ca2d30933358a1d7efe6b5 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/expr_intf.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/expr_intf.ml @@ -9,12 +9,28 @@ module type S = sig type ty type ty_var - type ty_const + type ty_cst type term type term_var - type term_const + type term_cst type formula end + +module type Print = sig + + include S + + val ty : Format.formatter -> ty -> unit + val ty_var : Format.formatter -> ty_var -> unit + val ty_cst : Format.formatter -> ty_cst -> unit + + val term : Format.formatter -> term -> unit + val term_var : Format.formatter -> term_var -> unit + val term_cst : Format.formatter -> term_cst -> unit + + val formula : Format.formatter -> formula -> unit + +end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/headers.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/headers.ml index 8a9c58a56dc47247576c036155629b3e54f2025c..dec9219c79cf2abfc074fa5001b04e327dd3f6dc 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/headers.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/headers.ml @@ -8,7 +8,6 @@ Note that this does *not* include the checking of constraints between successivestatements such as what smtlib specifies, see flow.ml - The way this check is implemented/done: - a number of meta-data fields are defines in module [Field], together with ways to parse/print them. @@ -101,10 +100,10 @@ module Field = struct match s with (* Language version *) - | { Id.ns = Id.Attr; Id.name = ":smt-lib-version"; } -> + | { Id.ns = Attr; Id.name = Simple ":smt-lib-version"; } -> begin match args with | [ { Ast.term = Ast.Symbol { - Id.ns = Id.Value Id.Real; Id.name = version }; _ } ] -> + Id.ns = Value Real; Id.name = Simple version; }; _ } ] -> if check_version_number version then Ok (Lang_version, version) else @@ -114,31 +113,32 @@ module Field = struct end (* Problem source *) - | { Id.ns = Id.Attr; Id.name = ":source"; } -> + | { Id.ns = Attr; Id.name = Simple ":source"; } -> begin match args with | [ { Ast.term = Ast.Symbol { - Id.ns = Id.Attr; Id.name = descr }; _ } ] -> + Id.ns = Attr; Id.name = Simple descr }; _ } ] -> Ok (Problem_source, descr) | [] -> Error (loc, "empty value for :source") | { Ast.loc; _ } :: _ -> Error (loc, "Expected a single symbol as description") end (* Problem license *) - | { Id.ns = Id.Attr; Id.name = ":license"; } -> + | { Id.ns = Attr; Id.name = Simple ":license"; } -> begin match args with | [ { Ast.term = Ast.Symbol { - Id.ns = Id.Value Id.String; Id.name = license }; _ } ] -> + Id.ns = Value String; Id.name = Simple license }; _ } ] -> Ok (Problem_license, license) | [] -> Error (loc, "empty value for :license") | { Ast.loc; _ } :: _ -> Error (loc, "Expected a single string in quotes") end (* Problem category *) - | { Id.ns = Id.Attr; Id.name = ":category"; } -> + | { Id.ns = Attr; Id.name = Simple ":category"; } -> begin match args with | [ { Ast.term = Ast.Symbol { - Id.ns = Id.Value Id.String; - Id.name = (("crafted"|"random"|"industrial") as category) }; _ } ] -> + Id.ns = Value String; + Id.name = Simple (("crafted"|"random"|"industrial") as category) + }; _ }; ] -> Ok (Problem_category, category) | [] -> Error (loc, "empty value for :category") | { Ast.loc; _ } :: _ -> @@ -147,10 +147,10 @@ module Field = struct (* Problem status *) - | { Id.ns = Id.Attr; Id.name = ":status"; } -> + | { Id.ns = Attr; Id.name = Simple ":status"; } -> begin match args with | [ { Ast.term = Ast.Symbol { - Id.name = (("sat"|"unsat"|"unknown") as status) ; _ }; _ } ] -> + Id.name = Simple (("sat"|"unsat"|"unknown") as status) ; _ }; _ }; ] -> Ok (Problem_status, status) | _ -> Error (loc, "Expected sat|unsat|unknown") end @@ -169,6 +169,46 @@ module Field = struct end +(* Header errors & warnings *) +(* ************************************************************************ *) + +let code = + Code.create + ~category:"Header" + ~descr:"on header errors" + +let missing_header_error = + Report.Error.mk ~code ~mnemonic:"header-missing" + ~message:(fun fmt (lang, missing) -> + let pp_sep fmt () = Format.fprintf fmt ",@ " in + Format.fprintf fmt "The following header fields are missing: %a" + (Format.pp_print_list ~pp_sep (Field.print ?lang)) missing) + ~name:"Missing header statement" () + +let invalid_header_value_error = + Report.Error.mk ~code ~mnemonic:"header-invalid-value" + ~message:(fun fmt (field, lang, msg) -> + Format.fprintf fmt "Invalid value for header %a: %s" + (Field.print ?lang) field msg) + ~name:"Invalid header value" () + +let bad_header_payload = + Report.Error.mk ~code ~mnemonic:"header-bad-payload" + ~message:(fun fmt msg -> + Format.fprintf fmt "Could not parse the header: %s" msg) + ~name:"Incorrect header payload" () + +let empty_header_field = + Report.Warning.mk ~code ~mnemonic:"empty-header-field" + ~message:(fun fmt (lang, l) -> + let pp_sep fmt () = Format.fprintf fmt ",@ " in + Format.fprintf fmt + "The following header fields are missing and thus \ + default values will be assumed: %a" + (Format.pp_print_list ~pp_sep (Field.print ?lang)) l) + ~name:"Header field with a missing value" () + + (* Headers *) (* ************************************************************************ *) @@ -231,10 +271,7 @@ module Pipe(State : State_intf.Header_pipe match List.filter (fun f -> not (mem h f)) wanted with | [] -> st | missing -> - let pp_sep fmt () = Format.fprintf fmt ",@ " in - State.warn st "The following header fields are missing and thus \ - default values will be assumed: %a" - (Format.pp_print_list ~pp_sep (Field.print ?lang)) missing + State.warn st empty_header_field (lang, missing) let check_required st h = let lang = State.input_lang st in @@ -245,10 +282,7 @@ module Pipe(State : State_intf.Header_pipe in match List.filter (fun f -> not (mem h f)) required with | [] -> st - | missing -> - let pp_sep fmt () = Format.fprintf fmt ",@ " in - State.error st "The following header fields are missing: %a" - (Format.pp_print_list ~pp_sep (Field.print ?lang)) missing + | missing -> State.error st missing_header_error (lang, missing) let check st = if not (State.check_headers st) then st @@ -262,10 +296,14 @@ module Pipe(State : State_intf.Header_pipe (* Incremental checks and construction of the header set *) - let error st loc fmt = + let error st loc err param = let file = State.input_file_loc st in let loc : Dolmen.Std.Loc.full = { file; loc; } in - State.error ~loc st fmt + State.error ~loc st err param + + let invalid_header_value st loc field msg = + let lang = State.input_lang st in + error st loc invalid_header_value_error (field, lang, msg) let check_header st loc field value = match (field : Field.t) with @@ -274,14 +312,16 @@ module Pipe(State : State_intf.Header_pipe | None -> st | Some v -> if v = value then st - else error st loc "This language version must be: %s" v + else invalid_header_value st loc Lang_version + (Format.sprintf "language version must be: %s" v) end | Problem_license -> begin match State.allowed_licenses st with | [] -> st | allowed -> if List.mem value allowed then st - else error st loc "This is not an allowed license" + else invalid_header_value st loc Problem_license + "this license is not in the list of allowed licenses" end | _ -> st @@ -295,7 +335,8 @@ module Pipe(State : State_intf.Header_pipe | { descr = Set_info t; loc; _ } -> begin match Field.parse ?lang t with | Not_a_header -> st - | Error (loc, msg) -> error st loc "%s" msg + | Error (loc, msg) -> + error st loc bad_header_payload msg | Ok (field, value) -> let st = check_header st loc field value in let st = State.set_header_state st (set h field value) in @@ -305,8 +346,7 @@ module Pipe(State : State_intf.Header_pipe if mem h Problem_status then State.set_header_state st (remove h Problem_status) else - error st loc "This statement lacks a %s header" - (Field.name lang Problem_status) + error st loc missing_header_error (lang, [Problem_status]) | _ -> st in st, c diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/logic.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/logic.ml index 9b5ef8526160d5d3cde2d86cac90c60a8d4b35b0..94a8736dac5f04953d2e93de2641c5b145b9b424 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/logic.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/logic.ml @@ -10,4 +10,5 @@ module P = Dolmen.Class.Logic.Make (Dolmen.Std.Term) (Dolmen.Std.Statement) -include (P : Dolmen.Class.Logic.S with type statement := Dolmen.Std.Statement.t) +include (P : Dolmen.Class.Logic.S with type statement := Dolmen.Std.Statement.t + and type file := Dolmen.Std.Loc.file) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/logic.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/logic.mli index 4b3c3c056f1c77ebdd50a0beef8542011d9776ac..aeb1939423501f6071977e13e05e1176c498ef46 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/logic.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/logic.mli @@ -4,4 +4,5 @@ (** This is an instanciation of the Logic class with the standard implementation of parsed terms and statements of Dolmen. *) include Dolmen.Class.Logic.S with type statement := Dolmen.Std.Statement.t + and type file := Dolmen.Std.Loc.file diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/parser.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/parser.ml index 91afc5d0638a8ef47d54a7c21c22803fe93cdc4f..280357b88d99a4edab259352bda62447c75c74ac 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/parser.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/parser.ml @@ -1,6 +1,56 @@ (* This file is free software, part of Dolmen. See file "LICENSE" for more details. *) +(* Parsing errors *) +(* ************************************************************************ *) + +let extension_not_found = + Report.Error.mk ~code:Code.generic ~mnemonic:"ext-unknown" + ~message:(fun fmt ext -> + Format.fprintf fmt + "@[<hv>The following extension was not recognized: '%s'.@ %s" ext + "Please use a recognised extension or specify an input language on the command line") + ~name:"File extension unknown" () + +let file_not_found = + Report.Error.mk ~code:Code.generic ~mnemonic:"file-not-found" + ~message:(fun fmt (dir, f) -> + if dir = "." then + Format.fprintf fmt "File not found: '%s'" f + else + Format.fprintf fmt + "File not found: '%s' in directory '%s'" f dir) + ~name:"File not Found" () + +let input_lang_changed = + Report.Error.mk ~code:Code.generic ~mnemonic:"input-lang-changed" + ~message:(fun fmt (old_lang, new_lang) -> + Format.fprintf fmt + "Input language changed from %s to %s (probably because of an include statement)" + (Logic.string_of_language old_lang) + (Logic.string_of_language new_lang)) + ~name:"Input language change" () + +let lexing_error = + Report.Error.mk ~code:Code.parsing ~mnemonic:"lexing-error" + ~message:(fun fmt lex -> + Format.fprintf fmt + "Lexing error: invalid character '%s'" lex) + ~name:"Lexing error" () + +let parsing_error = + Report.Error.mk ~code:Code.parsing ~mnemonic:"parsing-error" + ~message:(fun fmt perr -> + match perr with + | `Regular msg -> + Format.fprintf fmt "%t" msg + | `Advanced (prod, lexed, expected) -> + Format.fprintf fmt + "@[<v>@[<hv>while parsing %t,@ read %t,@]@ @[<hov>but expected %t.@]@]" + prod lexed expected) + ~name:"Parsing error" () + + (* Pipe functor *) (* ************************************************************************ *) @@ -15,6 +65,14 @@ module Pipe module S = Dolmen.Std.Statement + let set_lang ?loc st l = + match State.input_lang st with + | None -> State.set_lang st l + | Some l' -> + if l = l' + then State.set_lang st l + else State.error ?loc st input_lang_changed (l', l) + (* Parsing *) (* ************************************************************************ *) @@ -29,65 +87,87 @@ module Pipe match gen () with | Some _ as res -> res | None -> cl (); None + | exception exn -> + let bt = Printexc.get_raw_backtrace () in + cl (); + Printexc.raise_with_backtrace exn bt in aux let wrap_parser g = fun st -> if State.is_interactive st then Format.printf "%s @?" (State.prelude st); - State.start `Parsing; - let ret = g () in - State.stop `Parsing; - ret + match g () with + | ret -> st, ret + | exception Dolmen.Std.Loc.Uncaught (loc, exn, bt) -> + let file = State.input_file_loc st in + let st = + State.error st ~loc:{ file; loc; } Report.Error.uncaught_exn (exn, bt) + in + st, None + | exception Dolmen.Std.Loc.Lexing_error (loc, lex) -> + let file = State.input_file_loc st in + let st = State.error st ~loc:{ file; loc; } lexing_error lex in + st, None + | exception Dolmen.Std.Loc.Syntax_error (loc, perr) -> + let file = State.input_file_loc st in + let st = State.error st ~loc:{ file; loc; } parsing_error perr in + st, None let parse prelude st = - State.start `Parsing; (* Parse the input *) let st', g = - match State.input_source st with - | `Stdin -> - let lang, gen, _ = Logic.parse_input - ?language:(State.input_lang st) - (`Stdin (Logic.Smtlib2 `Latest)) - in - State.set_lang st lang, gen - | `Raw (filename, contents) -> - let lang = - match State.input_lang st with - | Some l -> l - | None -> - let res, _, _ = Logic.of_filename filename in - res - in - let lang, gen, cl = Logic.parse_input - ~language:lang (`Raw (filename, lang, contents)) in - State.set_lang st lang, gen_finally gen cl - | `File f -> - let s = Dolmen.Std.Statement.include_ f [] in - (* Auto-detect input format *) - let lang = - match State.input_lang st with - | Some l -> l - | None -> - let res, _, _ = Logic.of_filename f in - res - in - (* Formats Dimacs and Tptp are descriptive and lack the emission - of formal solve/prove instructions, so we need to add them. *) - let s' = - match lang with - | Logic.Zf - | Logic.ICNF - | Logic.Smtlib2 _ - | Logic.Alt_ergo -> s - | Logic.Dimacs - | Logic.Tptp _ -> - Dolmen.Std.Statement.pack [s; Dolmen.Std.Statement.prove ()] - in - State.set_lang st lang, - (Gen.singleton s') + try + match State.input_source st with + | `Stdin -> + let lang, file_loc, gen, _ = Logic.parse_input + ?language:(State.input_lang st) + (`Stdin (Logic.Smtlib2 `Latest)) + in + let st = State.set_input_file_loc st file_loc in + let st = set_lang st lang in + st, gen + | `Raw (filename, contents) -> + let lang = + match State.input_lang st with + | Some l -> l + | None -> + let res, _, _ = Logic.of_filename filename in + res + in + let lang, file_loc, gen, cl = Logic.parse_input + ~language:lang (`Raw (filename, lang, contents)) in + let st = State.set_input_file_loc st file_loc in + let st = set_lang st lang in + st, gen_finally gen cl + | `File f -> + let s = Dolmen.Std.Statement.include_ f [] in + (* Auto-detect input format *) + let lang = + match State.input_lang st with + | Some l -> l + | None -> + let res, _, _ = Logic.of_filename f in + res + in + (* Formats Dimacs and Tptp are descriptive and lack the emission + of formal solve/prove instructions, so we need to add them. *) + let s' = + match lang with + | Logic.Zf + | Logic.ICNF + | Logic.Smtlib2 _ + | Logic.Alt_ergo -> s + | Logic.Dimacs + | Logic.Tptp _ -> + Dolmen.Std.Statement.pack [s; Dolmen.Std.Statement.prove ()] + in + set_lang st lang, + (Gen.singleton s') + with + | Logic.Extension_not_found ext -> + State.error st extension_not_found ext, Gen.empty in - State.stop `Parsing; (* Wrap the resulting parser *) st', wrap_parser (Gen.append (Gen.of_list prelude) g) @@ -105,64 +185,36 @@ module Pipe ) let expand st c = - State.start `Include; let ret = match c with | { S.descr = S.Pack l; _ } -> - st, `Gen (merge, Gen.of_list l) + st, `Gen (merge, wrap_parser (Gen.of_list l)) (* TODO: filter the statements by passing some stions *) | { S.descr = S.Include file; _ } -> - let loc = c.loc in + let loc = { Dolmen.Std.Loc.file = State.input_file_loc st; loc = c.loc; } in let language = State.input_lang st in let dir = State.input_dir st in begin match Logic.find ?language ~dir file with | None -> - let loc = { Dolmen.Std.Loc.file = State.input_file_loc st; loc; } in - State.file_not_found ~loc ~dir ~file + State.error ~loc st file_not_found (dir, file), `Ok | Some file -> - let file_loc = Dolmen.Std.Loc.mk_file file in - let st = State.set_input_file_loc st file_loc in begin match State.input_mode st with | None | Some `Incremental -> - let lang, gen, cl = Logic.parse_input ?language (`File file) in - let st = State.set_lang st lang in - st, `Gen (merge, gen_finally gen cl) + let lang, file_loc, gen, cl = Logic.parse_input ?language (`File file) in + let st = State.set_input_file_loc st file_loc in + let st = set_lang ~loc st lang in + st, `Gen (merge, wrap_parser (gen_finally gen cl)) | Some `Full -> - let lang, l = Logic.parse_file_lazy ?language file in - let st = State.set_lang st lang in - st, `Gen (merge, gen_of_llist l) + let lang, file_loc, l = Logic.parse_file_lazy ?language file in + let st = State.set_input_file_loc st file_loc in + let st = set_lang ~loc st lang in + st, `Gen (merge, wrap_parser (gen_of_llist l)) end end | _ -> (st, `Ok) in - State.stop `Include; ret -(* - (* Header & Automaton flow checking *) - (* ************************************************************************ *) - - - let first_mode ~check_headers lang = - match (lang: Parser.language) with - | Smtlib2 _ when check_headers -> Start { expect = Lang_Version; } - | _ -> Assert - - let next_header lang current_header = - match (lang: Parser.language) with - | Smtlib2 _ -> - begin match (current_header : header) with - | Lang_Version -> Some Problem_Logic - | Problem_Logic -> Some Problem_Source - | Problem_Source -> Some Problem_License - | Problem_License -> Some Problem_Category - | Problem_Category -> Some Problem_Status - | Problem_Status -> None - end - | _ -> None -*) - - end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/parser.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/parser.mli index d6d4649afdffdc48b7a44274e196ea47d4a967d4..ed6d5e53dd1f6f969780f4d70c4673de83f03cde 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/parser.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/parser.mli @@ -12,13 +12,14 @@ module Pipe val parse : Dolmen.Std.Statement.t list -> State.t -> - State.t * (State.t -> Dolmen.Std.Statement.t option) + State.t * (State.t -> State.t * Dolmen.Std.Statement.t option) (** Parsing function. Reads a list of prelude statements, and the state and returns a tuple of the new state (including the detected input language), together with a statement generator. *) val expand : State.t -> Dolmen.Std.Statement.t -> - State.t * [ `Ok | `Gen of (State.t -> State.t -> State.t) * Dolmen.Std.Statement.t Gen.t ] + State.t * [ `Ok | `Gen of (State.t -> State.t -> State.t) * + (State.t -> State.t * Dolmen.Std.Statement.t option) ] (** Expand statements (such as includes). Returns the new state, and either: - [ `Ok ], which means the statement can be propagated as is - [ `Gen (flat, g) ], if the statement expands into a generator [g]. The bool [flat] diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/pipeline.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/pipeline.ml index 955dbcdbc1118165ac21b072d7d3bca3d4ca173e..a8a5ea502a53e74341583537fb3f493d2baac573 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/pipeline.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/pipeline.ml @@ -1,16 +1,18 @@ (* This file is free software, part of dolmen. See file "LICENSE" for more information *) -module Make(State : State_intf.Pipeline) = struct +exception Sigint +exception Out_of_time +exception Out_of_space - exception Sigint - exception Out_of_time - exception Out_of_space +module Make(State : State_intf.Pipeline) = struct (* GC alarm for time/space limits *) (* ************************************************************************ *) - (* This function analyze the current size of the heap *) + (* This function analyze the current size of the heap + TODO: take into account the minor heap size + TODO: should we only consider the live words ? *) let check size_limit = function () -> let heap_size = (Gc.quick_stat ()).Gc.heap_words in let s = float heap_size *. float Sys.word_size /. 8. in @@ -25,14 +27,18 @@ module Make(State : State_intf.Pipeline) = struct TODO: this does not work on windows. TODO: allow to use the time limit only for some passes *) let setup_alarm t s = - let _ = Unix.setitimer Unix.ITIMER_REAL - Unix.{it_value = t; it_interval = 0.01 } in - Gc.create_alarm (check s) + if t <> infinity then + ignore (Unix.setitimer Unix.ITIMER_REAL + Unix.{it_value = t; it_interval = 0.01 }); + if s <> infinity then (Some (Gc.create_alarm (check s))) + else None let delete_alarm alarm = - let _ = Unix.setitimer Unix.ITIMER_REAL - Unix.{it_value = 0.; it_interval = 0. } in - Gc.delete_alarm alarm + (* it's alwyas safe to delete the timer here, + even if none was present before. *) + ignore (Unix.setitimer Unix.ITIMER_REAL + Unix.{it_value = 0.; it_interval = 0. }); + match alarm with None -> () | Some alarm -> Gc.delete_alarm alarm (* The Unix.timer works by sending a Sys.sigalrm, so in order to use it, we catch it and raise the Out_of_time exception. *) @@ -52,10 +58,9 @@ module Make(State : State_intf.Pipeline) = struct (* Pipeline and execution *) (* ************************************************************************ *) - type 'a gen = 'a Gen.t type 'st merge = 'st -> 'st -> 'st type ('a, 'b) cont = [ `Done of 'a | `Continue of 'b ] - type ('st, 'a) fix = [ `Ok | `Gen of 'st merge * 'a gen ] + type ('st, 'a) fix = [ `Ok | `Gen of 'st merge * ('st -> 'st * 'a option) ] type 'st k_exn = { k : 'a. 'st -> Printexc.raw_backtrace -> exn -> 'a; } type ('st, 'a, 'b) op = { @@ -146,11 +151,11 @@ module Make(State : State_intf.Pipeline) = struct end and eval_gen_fold : type st a. - exn: st k_exn -> (st, a, unit) t -> st -> a gen -> st = + exn: st k_exn -> (st, a, unit) t -> st -> (st -> st * a option) -> st = fun ~exn pipe st g -> - match g () with - | None -> st - | Some x -> + match g st with + | st, None -> st + | st, Some x -> let st', () = eval ~exn pipe st x in eval_gen_fold ~exn pipe st' g | exception e -> @@ -160,8 +165,8 @@ module Make(State : State_intf.Pipeline) = struct (* Aux function to eval a pipeline on the current value of a generator. *) let run_aux ~exn pipe g st = match g st with - | None -> None - | Some x -> Some (eval ~exn pipe st x) + | st, None -> `Done st + | st, Some x -> `Continue (eval ~exn pipe st x) | exception e -> let bt = Printexc.get_raw_backtrace () in exn.k st bt e @@ -171,15 +176,15 @@ module Make(State : State_intf.Pipeline) = struct (so all expanded values count toward the same limit). *) let rec run : type a. - finally:(State.t -> exn option -> State.t) -> - (State.t -> a option) -> State.t -> (State.t, a, unit) t -> State.t + finally:(State.t -> (Printexc.raw_backtrace * exn) option -> State.t) -> + (State.t -> State.t * a option) -> State.t -> (State.t, a, unit) t -> State.t = fun ~finally g st pipe -> let exception Exn of State.t * Printexc.raw_backtrace * exn in let time = State.time_limit st in let size = State.size_limit st in let al = setup_alarm time size in let exn = { k = fun st bt e -> - (* delete alamr as soon as possible *) + (* delete alarm as soon as possible *) let () = delete_alarm al in (* go the the correct handler *) raise (Exn (st, bt, e)); @@ -189,13 +194,13 @@ module Make(State : State_intf.Pipeline) = struct match run_aux ~exn pipe g st with (* End of the run, yay ! *) - | None -> + | `Done st -> let () = delete_alarm al in st (* Regular case, we finished running the pipeline on one input value, let's get to the next one. *) - | Some (st', ()) -> + | `Continue (st', ()) -> let () = delete_alarm al in let st'' = try finally st' None with _ -> st' in run ~finally g st'' pipe @@ -203,15 +208,14 @@ module Make(State : State_intf.Pipeline) = struct (* "Normal" exception case: the exn was raised by an operator, and caught then re-raised by the {exn} cotinuation passed to run_aux *) | exception Exn (st, bt, e) -> + (* delete alarm *) + let () = delete_alarm al in (* Flush stdout and print a newline in case the exn was raised in the middle of printing *) Format.pp_print_flush Format.std_formatter (); Format.pp_print_flush Format.err_formatter (); - (* Print the backtrace if requested *) - if Printexc.backtrace_status () then - Printexc.print_raw_backtrace stdout bt; (* Go on running the rest of the pipeline. *) - let st' = finally st (Some e) in + let st' = finally st (Some (bt,e)) in run ~finally g st' pipe (* Exception case for exceptions, that can realisically occur for all @@ -227,11 +231,8 @@ module Make(State : State_intf.Pipeline) = struct raised in the middle of printing *) Format.pp_print_flush Format.std_formatter (); Format.pp_print_flush Format.err_formatter (); - (* Print the backtrace if requested *) - if Printexc.backtrace_status () then - Printexc.print_raw_backtrace stdout bt; (* Go on running the rest of the pipeline. *) - let st' = finally st (Some e) in + let st' = finally st (Some (bt,e)) in run ~finally g st' pipe end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/pipeline.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/pipeline.mli index f08d26dec33fccb002af4ee047f65865dd4626dc..b0ed190bf2332b373dbcf770458e19d87029e7ea 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/pipeline.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/pipeline.mli @@ -10,13 +10,13 @@ performing a fixpoint expansion. *) +exception Sigint +exception Out_of_time +exception Out_of_space + module Make(State : State_intf.Pipeline) : sig (** Concrete pipelines. *) - exception Sigint - exception Out_of_time - exception Out_of_space - (** {2 Type definitions } *) type ('st, 'a, 'b) op @@ -30,7 +30,7 @@ module Make(State : State_intf.Pipeline) : sig type 'st merge = 'st -> 'st -> 'st (** Merge function used at the end of a fixpoint to get the resulting state. *) - type ('st, 'a) fix = [ `Ok | `Gen of 'st merge * 'a Gen.t ] + type ('st, 'a) fix = [ `Ok | `Gen of 'st merge * ('st -> ('st * 'a option)) ] (** Type used to fixpoint expanding statements such as includes. *) type ('a, 'b) cont = [ `Done of 'a | `Continue of 'b ] @@ -86,8 +86,8 @@ module Make(State : State_intf.Pipeline) : sig (** Evaluate a pipeline to a function. *) val run : - finally:(State.t -> exn option -> State.t) -> - (State.t -> 'a option) -> State.t -> + finally:(State.t -> (Printexc.raw_backtrace * exn) option -> State.t) -> + (State.t -> State.t * 'a option) -> State.t -> (State.t, 'a, unit) t -> State.t (** Loop the evaluation of a pipeline over a generator, and starting options. @param finally a function called at the end of every iteration (even if diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/report.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/report.ml new file mode 100644 index 0000000000000000000000000000000000000000..0b093ea5ae78921e2d4036f993a93e6788efdb31 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/report.ml @@ -0,0 +1,298 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +(* Types *) +(* ************************************************************************* *) + +type _ kind = + | Error : { id : int; } -> [> `Error ] kind + | Warning : { id : int; } -> [> `Warning ] kind + +type ('kind, 'param) aux = { + + (* report identification *) + kind : 'kind kind; + code : Code.t; + + (* cli interaction *) + mnemonic : string; + + (* short message printing *) + message : Format.formatter -> 'param -> unit; + hints : ('param -> (Format.formatter -> unit) option) list; + + (* long documentation *) + name : string; + doc : Format.formatter -> unit; + + } + +type any = Any : (_, _) aux -> any +type 'a error = ([ `Error ], 'a) aux +type 'a warning = ([ `Warning ], 'a) aux +type any_error = Any_err : _ error -> any_error +type any_warning = Any_warn : _ warning -> any_warning + + +(* Common functions *) +(* ************************************************************************* *) + +let code report = report.code +let name report = report.name +let mnemonic report = report.mnemonic + +let print_doc fmt report = + Format.fprintf fmt "%t" report.doc + +let print fmt (report, param) = + report.message fmt param + +let print_hints fmt (report, param) = + List.iter (fun hint -> + match hint param with + | None -> () + | Some pp -> + Format.fprintf fmt "@\n@[<hov 2>Hint: %t@]" pp + ) report.hints + + + +(* Creation and fetching *) +(* ************************************************************************* *) + +let locked = ref false +let error_count = ref 0 +let warning_count = ref 0 +let mnemonics_table = Hashtbl.create 113 + +let no_doc fmt = + Format.fprintf fmt "No documentation yet" + +let mk_aux + ~kind ~code + ~mnemonic + ~message ?(hints=[]) + ~name ?(doc=no_doc) () = + assert (not !locked && not (Hashtbl.mem mnemonics_table mnemonic)); + let res = { code; kind; mnemonic; message; hints; name; doc; } in + Hashtbl.add mnemonics_table mnemonic (Any res); + res + +let mk_error + ?(code=Code.bug) ~mnemonic ~message ?hints ~name ?doc () : _ error = + let id = incr error_count; !error_count in + mk_aux ~kind:(Error { id; }) ~code ~mnemonic ~message ?hints ~name ?doc () + +let mk_warning + ?(code=Code.bug) ~mnemonic ~message ?hints ~name ?doc () : _ warning = + let id = !warning_count in + incr warning_count; + mk_aux ~kind:(Warning { id; }) ~code ~mnemonic ~message ?hints ~name ?doc () + + +(* Warnings interface *) +(* ************************************************************************* *) + +module Warning = struct + + type 'a t = 'a warning + + let mk = mk_warning + let code = code + let name = name + let mnemonic = mnemonic + let print = print + let print_doc = print_doc + let print_hints = print_hints + + module Status = struct + + type t = + | Disabled + | Enabled + | Fatal + + let print fmt = function + | Disabled -> Format.fprintf fmt "disabled" + | Enabled -> Format.fprintf fmt "enabled" + | Fatal -> Format.fprintf fmt "fatal" + + let to_string t = + Format.asprintf "%a" print t + + let merge s s' = + match s, s' with + | Disabled, Disabled -> Disabled + | _, Fatal | Fatal, _ -> Fatal + | _, _ -> Enabled + + end + +end + +(* Error interface *) +(* ************************************************************************* *) + +module Error = struct + + type 'a t = 'a error + + let mk = mk_error + let code = code + let name = name + let mnemonic = mnemonic + let print = print + let print_doc = print_doc + let print_hints = print_hints + + let user_interrupt : _ t = + mk ~code:Code.limit ~mnemonic:"user-interrupt" + ~message:(fun fmt () -> + Format.fprintf fmt "User Interrupt") + ~name:"User Interrupt" () + + let timeout : _ t = + mk ~code:Code.limit ~mnemonic:"timeout" + ~message:(fun fmt () -> + Format.fprintf fmt "Time limit reached") + ~name:"Timeout" () + + let spaceout : _ t = + mk ~code:Code.limit ~mnemonic:"spaceout" + ~message:(fun fmt () -> + Format.fprintf fmt "Memory limit reached") + ~name:"Out of space" () + + let internal_error = + mk ~code:Code.bug ~mnemonic:"internal-error" + ~message:(fun fmt t -> + Format.fprintf fmt "%t" t) + ~name:"Internal Error" () + + let uncaught_exn = + mk ~code:Code.bug ~mnemonic:"uncaught-exn" + ~message:(fun fmt (exn, bt) -> + Format.fprintf fmt + "Uncaught exception:@\n%s%a%s" + (Printexc.to_string exn) + (if Printexc.backtrace_status () + then Format.pp_print_newline + else (fun _ _ -> ())) () + (if Printexc.backtrace_status () + then Printexc.raw_backtrace_to_string bt + else "")) + ~name:"Uncaught exception" () + +end + +(* Reports *) +(* ************************************************************************* *) + +module T = struct + + type all = [ `All ] + type err = [ `Error of any_error ] + type warn = [ `Warning of any_warning ] + + type t = [ all | err | warn ] + + let name = function + | `All -> "All warnings" + | `Error Any_err e -> name e + | `Warning Any_warn w -> name w + + let mnemonic = function + | `All -> "all" + | `Error Any_err e -> mnemonic e + | `Warning Any_warn w -> mnemonic w + + let kind = function + | `All -> "group" + | `Error _ -> "error" + | `Warning _ -> "warning" + + let category = function + | `All -> "General" + | `Error Any_err e -> Code.category (code e) + | `Warning Any_warn w -> Code.category (code w) + + let doc = function + | `Error Any_err e -> e.doc + | `Warning Any_warn w -> w.doc + | `All -> + Format.dprintf "%a" Format.pp_print_text + "The group of all warnings. Its main use is when specifying \ + a set of warnings to enable/disable/make fatal when using the \ + '-w' option of Dolmen." + + let find_mnemonic = function + | "all" -> Some `All + | mnemonic -> + begin match Hashtbl.find_opt mnemonics_table mnemonic with + | Some (Any ({ kind = Warning _; _ } as w)) -> + Some (`Warning (Any_warn w)) + | Some (Any ({ kind = Error _; _ } as e)) -> + Some (`Error (Any_err e)) + | None -> None + end + + let list () = + Hashtbl.fold (fun _ any acc -> + let elt = + match any with + | Any ({ kind = Warning _; _ } as w) -> + `Warning (Any_warn w) + | Any ({ kind = Error _; _ } as e) -> + `Error (Any_err e) + in + elt :: acc + ) mnemonics_table [`All] + +end + +(* Configuration *) +(* ************************************************************************* *) + +module Conf = struct + + type t = { + warnings : Warning.Status.t array; + } + + let mk ~default = + locked := true; + { warnings = Array.make !warning_count default; } + + let _copy t = + { warnings = Array.copy t.warnings; } + + let _get conf i = + conf.warnings.(i) + + let _set_inplace conf i status = + conf.warnings.(i) <- status + + let _update_inplace conf i status = + _set_inplace conf i (Warning.Status.merge status (_get conf i)) + + let status conf (warning : _ warning) = + let (Warning { id; }) = warning.kind in + _get conf id + + let update f param conf = function + | `All -> + for i = 0 to Array.length conf.warnings do + f conf i param + done; + conf + | `Warning (Any_warn { kind = Warning { id; }; _}) -> + let conf = _copy conf in + f conf id param; + conf + + let fatal conf w = update _update_inplace Fatal conf w + let enable conf w = update _update_inplace Enabled conf w + let disable conf w = update _set_inplace Disabled conf w + let set_enabled conf w = update _set_inplace Enabled conf w + +end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/report.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/report.mli new file mode 100644 index 0000000000000000000000000000000000000000..e662b20b14982d063c62e4b464eaac2c6224c0fb --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/report.mli @@ -0,0 +1,176 @@ + +(* This file is free software, part of Dolmen. See file "LICENSE" for more details. *) + +(** {2 Some types} *) + +type 'a error +type 'a warning + +type any_error = Any_err : _ error -> any_error +type any_warning = Any_warn : _ warning -> any_warning + + +(** {2 Reports} *) + +module T : sig + + type all = [ `All ] + type err = [ `Error of any_error ] + type warn = [ `Warning of any_warning ] + + type t = [ all | err | warn ] + + val list : unit -> t list + (** List all reports. *) + + val find_mnemonic : string -> t option + (** Find the warning/error/group associated to a mnemonic. *) + + val name : [< t ] -> string + (** Name of a report. *) + + val mnemonic : [< t ] -> string + (** mnemonic of a report. *) + + val kind : [< t ] -> string + (** kind of a report. *) + + val category : [< t ] -> string + (** category of a report. *) + + val doc : [< t ] -> (Format.formatter -> unit) + (** documentation for a report. *) + +end + + +(** {2 Errors} *) + +module Error : sig + + type 'a t = 'a error + (** The type of errors, parameterized by their payload/parameters. *) + + val code : _ t -> Code.t + (** Return the return code of an error. *) + + val name : _ t -> string + (** Return the name/short description of an error. *) + + val mnemonic : _ t -> string + (** Return the mnemonic of an error. *) + + val print : Format.formatter -> ('a t * 'a) -> unit + (** Print an error. *) + + val print_hints : Format.formatter -> ('a t * 'a) -> unit + (** Print an error's hints. *) + + val print_doc : Format.formatter -> _ t -> unit + (** Print the (long) documentation for an error. *) + + val user_interrupt : unit t + (** Error for a user interrupt. *) + + val timeout : unit t + (** Error for timeouts. *) + + val spaceout : unit t + (** Error for spaceouts. *) + + val internal_error : (Format.formatter -> unit) t + (** Internal error, the param is a delayed printer + (typically created using `Format.dprintf`). *) + + val uncaught_exn : (exn * Printexc.raw_backtrace) t + (** Error for an uncaught exn (together with a backtrace). *) + + val mk : + ?code:Code.t -> + mnemonic:string -> + message:(Format.formatter -> 'a -> unit) -> + ?hints:('a -> (Format.formatter -> unit) option) list -> + name:string -> ?doc:(Format.formatter -> unit) -> + unit -> 'a t + (** Create a new error. *) + +end + +(** {2 Warnings} *) + +module Warning : sig + + type 'a t = 'a warning + (** The type of warnings, parameterized by their payload/parameters. *) + + val code : _ t -> Code.t + (** Return the return code of an error. *) + + val name : _ t -> string + (** Return the name (short description) of a warning. *) + + val mnemonic : _ t -> string + (** Return the mnemonic of a warning. *) + + val print : Format.formatter -> ('a t * 'a) -> unit + (** Print a warning. *) + + val print_hints : Format.formatter -> ('a t * 'a) -> unit + (** Print an warning's hints. *) + + val print_doc : Format.formatter -> _ t -> unit + (** Print the (long) documentation of a warning. *) + + val mk : + ?code:Code.t -> + mnemonic:string -> + message:(Format.formatter -> 'a -> unit) -> + ?hints:('a -> (Format.formatter -> unit) option) list -> + name:string -> ?doc:(Format.formatter -> unit) -> + unit -> 'a t + (** Create a new warning. *) + + module Status : sig + + type t = + | Disabled + | Enabled + | Fatal (**) + (** The status of a report. *) + + val print : Format.formatter -> t -> unit + (** Print a status. *) + + val to_string : t -> string + (** Print into a string. *) + + end + +end + +(** {2 Report configuration} *) + +module Conf : sig + + type t + (** The type of configuration for reports. *) + + val mk : default:Warning.Status.t -> t + (** Create a configuration with a default status for warnings. *) + + val status : t -> _ Warning.t -> Warning.Status.t + (** Status for an individual warning. *) + + val disable : t -> [ `All | `Warning of any_warning ] -> t + (** Disable the warning. *) + + val enable : t -> [ `All | `Warning of any_warning ] -> t + (** Enable the warning. *) + + val fatal : t -> [ `All | `Warning of any_warning ] -> t + (** Make fatal the warning. *) + + val set_enabled : t -> [ `All | `Warning of any_warning ] -> t + (** Force the warning to be exactly enabled (and not fatal). *) + +end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/state.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/state.ml index e1a85b4d1b2e0db518c21229dc5f094135e38997..298a0b2028e54951405c7d25c5cae6e8eb00a668 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/state.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/state.ml @@ -1,18 +1,6 @@ (* This file is free software, part of dolmen. See file "LICENSE" for more information *) -(* Type definition & Exceptions *) -(* ************************************************************************* *) - -type perm = - | Allow - | Warn - | Error - -exception File_not_found of Dolmen.Std.Loc.full * string * string - -exception Input_lang_changed of Logic.language * Logic.language - (* Type definition *) (* ************************************************************************* *) @@ -22,11 +10,10 @@ type solve_state = unit type 'solve state = { - (* Debug option *) + (* Debug, warnings, and error options *) debug : bool; - - (* Warning/Error options *) - context : bool; + reports : Report.Conf.t; + loc_style : [ `Short | `Contextual ]; max_warn : int; cur_warn : int; @@ -54,7 +41,6 @@ type 'solve state = { (* Typechecking state *) type_state : ty_state; type_check : bool; - type_strict : bool; (* Solving state *) solve_state : 'solve; @@ -66,41 +52,82 @@ type 'solve state = { type t = solve_state state -(* Debug/print the state *) -(* ************************************************************************* *) - -let debug fmt st = - Format.fprintf fmt - "@[<hv 2>{@ file: %s; }@]" (Dolmen.Std.Loc.file_name st.input_file_loc) +exception Error of t (* State and locations *) (* ************************************************************************* *) -let pp_loc fmt o = +let loc_input st (loc : Dolmen.Std.Loc.loc) = + (* sanity check to avoid pp_loc trying to read and/or print + too much when printing the source code snippet) *) + if loc.max_line_length >= 150 || + loc.stop_line - loc.start_line >= 100 then + None + else begin + match st.loc_style, st.input_source with + | _, `Stdin -> None + | `Short, _ -> None + | `Contextual, `File filename -> + let full_filename = Filename.concat st.input_dir filename in + let input = Pp_loc.Input.file full_filename in + Some input + | `Contextual, `Raw (_, contents) -> + let input = Pp_loc.Input.string contents in + Some input + end + +let pp_loc st fmt o = match o with | None -> () | Some loc -> if Dolmen.Std.Loc.is_dummy loc then () - else Format.fprintf fmt "%a:@ " Dolmen.Std.Loc.fmt loc - -let error ?loc _ format = + else begin + match loc_input st loc with + | None -> + Format.fprintf fmt "%a:@ " + Fmt.(styled `Bold @@ styled (`Fg (`Hi `White)) Dolmen.Std.Loc.fmt) loc + | Some input -> + let locs = Dolmen.Std.Loc.lexing_positions loc in + Format.fprintf fmt "%a:@ %a" + Fmt.(styled `Bold @@ styled (`Fg (`Hi `White)) Dolmen.Std.Loc.fmt) loc + (Pp_loc.pp ~max_lines:3 ~input) [locs] + end + +let error ?loc st error payload = let loc = Dolmen.Std.Misc.opt_map loc Dolmen.Std.Loc.full_loc in - Format.kfprintf (fun _ -> exit 1) Format.err_formatter - ("@[<v>%a%a @[<hov>" ^^ format ^^ "@]@]@.") - Fmt.(styled `Bold @@ styled (`Fg (`Hi `White)) pp_loc) loc + let aux _ = Code.exit (Report.Error.code error) in + Format.kfprintf aux Format.err_formatter + ("@[<v>%a%a @[<hov>%a@]%a@]@.") + (pp_loc st) loc Fmt.(styled `Bold @@ styled (`Fg (`Hi `Red)) string) "Error" + Report.Error.print (error, payload) + Report.Error.print_hints (error, payload) -let warn ?loc st format = +let warn ?loc st warn payload = let loc = Dolmen.Std.Misc.opt_map loc Dolmen.Std.Loc.full_loc in - let aux _ = { st with cur_warn = st.cur_warn + 1; } in - if st.cur_warn >= st.max_warn then - Format.ikfprintf aux Format.err_formatter format - else + match Report.Conf.status st.reports warn with + | Disabled -> st + | Enabled -> + let aux _ = { st with cur_warn = st.cur_warn + 1; } in + if st.cur_warn >= st.max_warn then + aux st + else + Format.kfprintf aux Format.err_formatter + ("@[<v>%a%a @[<hov>%a@]%a@]@.") + (pp_loc st) loc + Fmt.(styled `Bold @@ styled (`Fg (`Hi `Magenta)) string) "Warning" + Report.Warning.print (warn, payload) + Report.Warning.print_hints (warn, payload) + + | Fatal -> + let aux _ = Code.exit (Report.Warning.code warn) in Format.kfprintf aux Format.err_formatter - ("@[<v>%a%a @[<hov>" ^^ format ^^ "@]@]@.") - Fmt.(styled `Bold @@ styled (`Fg (`Hi `White)) pp_loc) loc - Fmt.(styled `Bold @@ styled (`Fg (`Hi `Magenta)) string) "Warning" + ("@[<v>%a%a @[<hov>%a@]%a@]@.") + (pp_loc st) loc + Fmt.(styled `Bold @@ styled (`Fg (`Hi `Red)) string) "Fatal Warning" + Report.Warning.print (warn, payload) + Report.Warning.print_hints (warn, payload) let flush st () = let aux _ = { st with cur_warn = 0; } in @@ -141,48 +168,40 @@ let ty_state { type_state; _ } = type_state let set_ty_state st type_state = { st with type_state; } let typecheck st = st.type_check -let strict_typing { type_strict; _ } = type_strict let is_interactive = function | { input_source = `Stdin; _ } -> true | _ -> false -let prelude _ = "prompt>" +let prelude st = + match st.input_lang with + | None -> "prompt> @?" + | Some l -> + Format.asprintf "(%s)# @?" (Logic.string_of_language l) (* Setting language *) (* ************************************************************************* *) +let full_mode_switch = + Report.Warning.mk ~code:Code.generic ~mnemonic:"full-mode-switch" + ~message:(fun fmt lang -> + Format.fprintf fmt + "The@ %s@ format@ does@ not@ support@ \ + incremental@ mode,@ switching@ to@ full@ mode" + lang) + ~name:"Forced switch to full mode" () + let switch_to_full_mode lang t = let old_mode = input_mode t in let t = set_mode t `Full in match old_mode with - | Some `Incremental -> - warn t - "The@ %s@ format@ does@ not@ support@ \ - incremental@ mode,@ switching@ to@ full@ mode" - lang + | Some `Incremental -> warn t full_mode_switch lang | _ -> t -let set_lang_aux t l = +let set_lang t l = let t = { t with input_lang = Some l; } in match l with - | Logic.Alt_ergo -> switch_to_full_mode "Alt-Ergo" t + | Logic.Alt_ergo -> + switch_to_full_mode "Alt-Ergo" t | _ -> t -let set_lang t l = - match t.input_lang with - | None -> set_lang_aux t l - | Some l' -> - if l = l' - then set_lang_aux t l - else raise (Input_lang_changed (l', l)) - -(* Full state *) -(* ************************************************************************* *) - -let start _ = () -let stop _ = () - -let file_not_found ~loc ~dir ~file = - raise (File_not_found (loc, dir, file)) - diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/state_intf.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/state_intf.ml index df8675549c8871a9955fafe52b91124b0d4ee37a..97957ab21b3d5848d20815f667f8f86482a7f299 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/state_intf.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/state_intf.ml @@ -13,13 +13,6 @@ type source = [ | `Raw of string * string ] -type phase = [ - | `Parsing - | `Include - | `Typing - | `Solving -] - type mode = [ | `Full | `Incremental @@ -27,12 +20,32 @@ type mode = [ (** {1 Signatures} *) +module type Common = sig + + type t + (** The type of state *) + + exception Error of t + (** Convenient exception. *) + + val warn : + ?loc:Dolmen.Std.Loc.full -> + t -> 'a Report.Warning.t -> 'a -> t + (** Emit a warning *) + + val error : + ?loc:Dolmen.Std.Loc.full -> + t -> 'a Report.Error.t -> 'a -> t + (** Emit an error. *) + +end + (** This modules defines the smallest signatures for a solver state that allow to instantiate the {Pipeline.Make} functor. *) module type Pipeline = sig - type t - (** The type of values recording options for the current run. *) + include Common + (** Common interface for the state. *) val time_limit : t -> float (** The time limit for one original statement (in seconds). *) @@ -46,30 +59,18 @@ end to instantiate the {Parser.Pipe} functor. *) module type Parser_pipe = sig - type t - (** The type of state *) + include Common + (** common interface *) type term (** The type of solver terms. *) - val warn : - ?loc:Dolmen.Std.Loc.full -> - t -> ('a, Format.formatter, unit, t) format4 -> - 'a - (** Emit a warning *) - val input_file_loc : t -> Dolmen.Std.Loc.file (** Current input file location meta-data. *) val set_input_file_loc : t -> Dolmen.Std.Loc.file -> t (** Set the input file location meta-data. *) - val start : phase -> unit - (** Hook at the start of a phase *) - - val stop : phase -> unit - (** Hook at the end of a phase *) - val prelude : t -> string (** Some prelude to print at the begining of lines when in interactive mode. *) @@ -95,27 +96,18 @@ module type Parser_pipe = sig val input_source : t -> source (** Return the input source. *) - val file_not_found : - loc:Dolmen.Std.Loc.full -> dir:string -> file:string -> 'a - (** Callback for when a file specified by the input source is not found. *) - end (** This modules defines the smallest signatures for a solver state that allow to instantiate the {Typer.Make} functor. *) module type Typer = sig + include Common + (** common interface *) + type ty_state (** The type of state used by the typer. *) - type t - (** The type for the global state. *) - - val warn : - ?loc:Dolmen.Std.Loc.full -> t -> - ('a, Format.formatter, unit, t) format4 -> 'a - (** Emit a warning *) - val input_file_loc : t -> Dolmen.Std.Loc.file (** CUrrent input file location meta-data. *) @@ -125,9 +117,6 @@ module type Typer = sig val typecheck : t -> bool (** Whether to type-check expressions. *) - val strict_typing : t -> bool - (** Whether to be strict about typing warnings/errors *) - val ty_state : t -> ty_state (** Returns the typing state associated. *) @@ -140,8 +129,8 @@ end to instantiate the {Typer.Pipe} functor. *) module type Typer_pipe = sig - type t - (** The type of state *) + include Common + (** common interface *) val input_lang : t -> Logic.language option (** Return the input language (if any). *) @@ -153,24 +142,12 @@ end to instantiate the {Headers.Pipe} functor. *) module type Header_pipe = sig - type t - (** The type of state *) + include Common + (** common interface *) type header_state (** The type of state used for the header check*) - val warn : - ?loc:Dolmen.Std.Loc.full -> - t -> ('a, Format.formatter, unit, t) format4 -> - 'a - (** Emit an error. *) - - val error : - ?loc:Dolmen.Std.Loc.full -> - t -> ('a, Format.formatter, unit, t) format4 -> - 'a - (** Emit an error. *) - val input_file_loc : t -> Dolmen.Std.Loc.file (** Current input file location meta-data. *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/typer.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/typer.ml index 5662c39df41ea9f80957eb2accba971f7467e2f3..9b73ace63732b5cc55f61f9e956051ec246a2ffb 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/typer.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/typer.ml @@ -4,7 +4,7 @@ (* Dolmen_type functors instantiation *) (* ************************************************************************ *) -module T = Dolmen_type.Tff.Make +module T = Dolmen_type.Thf.Make (Dolmen.Std.Tag)(Dolmen.Std.Expr.Ty)(Dolmen.Std.Expr.Term) (* Definitions builtin *) @@ -21,13 +21,29 @@ module Subst = Dolmen_type.Def.Subst(T)(struct (* AE builtins *) module Ae_Core = - Dolmen_type.Core.Ae.Tff(T) + Dolmen_type.Core.Ae.Tff(T)(Dolmen.Std.Expr.Tags) (Dolmen.Std.Expr.Ty)(Dolmen.Std.Expr.Term) +module Ae_Arith = + Dolmen_type.Arith.Ae.Tff(T) + (Dolmen.Std.Expr.Ty)(Dolmen.Std.Expr.Term) +module Ae_Arrays = + Dolmen_type.Arrays.Ae.Tff(T) + (Dolmen.Std.Expr.Ty)(Dolmen.Std.Expr.Term) +module Ae_Bitv = + Dolmen_type.Bitv.Ae.Tff(T) + (Dolmen.Std.Expr.Ty)(Dolmen.Std.Expr.Term.Bitv) + +(* Dimacs builtin *) +module Dimacs = + Dolmen_type.Core.Dimacs.Tff(T)(Dolmen.Std.Expr.Term) (* Tptp builtins *) module Tptp_Core = Dolmen_type.Core.Tptp.Tff(T) (Dolmen.Std.Expr.Ty)(Dolmen.Std.Expr.Term) +module Tptp_Core_Ho = + Dolmen_type.Core.Tptp.Thf(T) + (Dolmen.Std.Expr.Ty)(Dolmen.Std.Expr.Term) module Tptp_Arith = Dolmen_type.Arith.Tptp.Tff(T) (Dolmen.Std.Expr.Ty)(Dolmen.Std.Expr.Term) @@ -61,6 +77,764 @@ module Smtlib2_String = (* Zf *) module Zf_Core = Dolmen_type.Core.Zf.Tff(T)(Dolmen.Std.Expr.Tags) + (Dolmen.Std.Expr.Ty)(Dolmen.Std.Expr.Term) +module Zf_arith = + Dolmen_type.Arith.Zf.Thf(T) + (Dolmen.Std.Expr.Ty)(Dolmen.Std.Expr.Term) + +(* Printing helpers *) +(* ************************************************************************ *) + +let pp_wrap pp fmt x = + Format.fprintf fmt "`%a`" pp x + +let print_symbol fmt symbol = + match (symbol : T.symbol) with + | Id id -> Dolmen.Std.Id.print fmt id + | Builtin builtin -> Dolmen.Std.Term.print_builtin fmt builtin + +let print_res fmt res = + match (res : T.res) with + | T.Ttype -> Format.fprintf fmt "Type" + | T.Ty ty -> + Format.fprintf fmt "the type@ %a" (pp_wrap Dolmen.Std.Expr.Ty.print) ty + | T.Term t -> + Format.fprintf fmt "the term@ %a" (pp_wrap Dolmen.Std.Expr.Term.print) t + | T.Tags _ -> Format.fprintf fmt "some tags" + +let print_opt pp fmt = function + | None -> Format.fprintf fmt "<none>" + | Some x -> pp fmt x + +let rec print_expected fmt = function + | [] -> assert false + | x :: [] -> Format.fprintf fmt "%d" x + | x :: r -> Format.fprintf fmt "%d or %a" x print_expected r + +let print_fragment (type a) fmt (env, fragment : T.env * a T.fragment) = + match fragment with + | T.Ast ast -> pp_wrap Dolmen.Std.Term.print fmt ast + | T.Def d -> Dolmen.Std.Statement.print_def fmt d + | T.Decl d -> Dolmen.Std.Statement.print_decl fmt d + | T.Defs d -> + Dolmen.Std.Statement.print_group Dolmen.Std.Statement.print_def fmt d + | T.Decls d -> + Dolmen.Std.Statement.print_group Dolmen.Std.Statement.print_decl fmt d + | T.Located _ -> + let full = T.fragment_loc env fragment in + let loc = Dolmen.Std.Loc.full_loc full in + Format.fprintf fmt "<located at %a>" Dolmen.Std.Loc.fmt loc + +let decl_loc d = + match (d : Dolmen.Std.Statement.decl) with + | Record { loc; _ } + | Abstract { loc; _ } + | Inductive { loc; _ } -> loc + +let print_bound_kind fmt = function + | `Quantified -> Format.fprintf fmt "quantified" + | `Letbound -> Format.fprintf fmt "let-bound" + +let print_reason fmt r = + match (r : T.reason) with + | Builtin -> + Format.fprintf fmt "defined by a builtin theory" + | Bound (file, ast) -> + Format.fprintf fmt "bound at %a" + Dolmen.Std.Loc.fmt_pos (Dolmen.Std.Loc.loc file ast.loc) + | Inferred (file, ast) -> + Format.fprintf fmt "inferred at %a" + Dolmen.Std.Loc.fmt_pos (Dolmen.Std.Loc.loc file ast.loc) + | Defined (file, d) -> + Format.fprintf fmt "defined at %a" + Dolmen.Std.Loc.fmt_pos (Dolmen.Std.Loc.loc file d.loc) + | Declared (file, d) -> + Format.fprintf fmt "declared at %a" + Dolmen.Std.Loc.fmt_pos (Dolmen.Std.Loc.loc file (decl_loc d)) + +let print_reason_opt fmt = function + | Some r -> print_reason fmt r + | None -> Format.fprintf fmt "<location missing>" + +let rec print_wildcard_origin fmt = function + | T.Arg_of src + | T.Ret_of src -> print_wildcard_origin fmt src + | T.From_source _ast -> + Format.fprintf fmt "the@ contents@ of@ a@ source@ wildcard" + | T.Added_type_argument _ast -> + Format.fprintf fmt "the@ implicit@ type@ to@ provide@ to@ an@ application" + | T.Symbol_inference { symbol; symbol_loc = _; inferred_ty; } -> + Format.fprintf fmt + "the@ type@ for@ the@ symbol@ %a@ to@ be@ %a" + (pp_wrap Dolmen.Std.Id.print) symbol + (pp_wrap Dolmen.Std.Expr.Ty.print) inferred_ty + | T.Variable_inference { variable; variable_loc = _; inferred_ty; } -> + Format.fprintf fmt + "the@ type@ for@ the@ quantified@ variable@ %a@ to@ be@ %a" + (pp_wrap Dolmen.Std.Id.print) variable + (pp_wrap Dolmen.Std.Expr.Ty.print) inferred_ty + +let rec print_wildcard_path env fmt = function + | T.Arg_of src -> + Format.fprintf fmt "one@ of@ the@ argument@ types@ of@ %a" + (print_wildcard_path env) src + | T.Ret_of src -> + Format.fprintf fmt "the@ return@ type@ of@ %a" + (print_wildcard_path env) src + | _ -> + Format.fprintf fmt "that@ type" + +let rec print_wildcard_loc env fmt = function + | T.Arg_of src + | T.Ret_of src -> print_wildcard_loc env fmt src + | T.From_source ast -> + let loc = Dolmen.Std.Loc.full_loc (T.loc env ast.loc) in + Format.fprintf fmt + "The@ source@ wildcard@ is@ located@ at@ %a" + Dolmen.Std.Loc.fmt_pos loc + | T.Added_type_argument ast -> + let loc = Dolmen.Std.Loc.full_loc (T.loc env ast.loc) in + Format.fprintf fmt + "The@ application@ is@ located@ at@ %a" + Dolmen.Std.Loc.fmt_pos loc + | T.Symbol_inference { symbol; symbol_loc; inferred_ty = _; } -> + let loc = Dolmen.Std.Loc.full_loc (T.loc env symbol_loc) in + Format.fprintf fmt + "Symbol@ %a@ is@ located@ be@ %a" + (pp_wrap Dolmen.Std.Id.print) symbol + Dolmen.Std.Loc.fmt_pos loc + | T.Variable_inference { variable; variable_loc; inferred_ty = _; } -> + let loc = Dolmen.Std.Loc.full_loc (T.loc env variable_loc) in + Format.fprintf fmt + "Variable@ %a@ is@ bound@ at@ %a" + (pp_wrap Dolmen.Std.Id.print) variable + Dolmen.Std.Loc.fmt_pos loc + +let rec print_wildcard_origin_loc env fmt = function + | T.Arg_of src + | T.Ret_of src -> print_wildcard_origin_loc env fmt src + | T.From_source ast -> + let loc = Dolmen.Std.Loc.full_loc (T.loc env ast.loc) in + Format.fprintf fmt + "a@ source@ wildcard@ located@ at@ %a" + Dolmen.Std.Loc.fmt_pos loc + | T.Added_type_argument ast -> + let loc = Dolmen.Std.Loc.full_loc (T.loc env ast.loc) in + Format.fprintf fmt + "the@ implicit@ type@ argument@ to@ provide@ to@ \ + the@ polymorphic@ application@ at@ %a" + Dolmen.Std.Loc.fmt_pos loc + | T.Symbol_inference { symbol; symbol_loc; inferred_ty = _; } -> + let loc = Dolmen.Std.Loc.full_loc (T.loc env symbol_loc) in + Format.fprintf fmt + "the@ type@ for@ the@ symbol@ %a,@ located@ at@ %a" + (pp_wrap Dolmen.Std.Id.print) symbol + Dolmen.Std.Loc.fmt_pos loc + | T.Variable_inference { variable; variable_loc; inferred_ty = _; } -> + let loc = Dolmen.Std.Loc.full_loc (T.loc env variable_loc) in + Format.fprintf fmt + "the@ type@ for@ the@ variable@ %a,@ located@ at@ %a" + (pp_wrap Dolmen.Std.Id.print) variable + Dolmen.Std.Loc.fmt_pos loc + + + + +(* Hint printers *) +(* ************************************************************************ *) + +let fo_hint _ = + Some ( + Format.dprintf "%a" Format.pp_print_text + "This statement was parsed as a first-order statement") + +let text_hint = function + | "" -> None + | msg -> Some (Format.dprintf "%a" Format.pp_print_text msg) + +let poly_hint (c, expected, actual) = + let n_ty, n_t = Dolmen.Std.Expr.Term.Const.arity c in + let total_arity = n_ty + n_t in + match expected with + | [x] when x = total_arity && actual = n_t -> + Some ( + Format.dprintf "%a" Format.pp_print_text + "the head of the application is polymorphic, \ + you probably forgot the type arguments@]") + | [x] when x = n_t && n_ty <> 0 -> + Some ( + Format.dprintf "%a" Format.pp_print_text + "it looks like the language enforces implicit polymorphism, \ + i.e. no type arguments are to be provided to applications \ + (and instead type annotation/coercions should be used).") + | _ :: _ :: _ -> + Some ( + Format.dprintf "%a" Format.pp_print_text + "this is a polymorphic function, and multiple accepted arities \ + are possible because the language supports inference of all type \ + arguments when none are given in an application.") + | _ -> None + +let literal_hint b id = + if not b then None else + match (id : Dolmen.Std.Id.t) with + | { ns = Value Integer; name = Simple _; } -> + Some ( + Format.dprintf "%a" Format.pp_print_text + "The current logic does not include integer arithmtic") + | { ns = Value Rational; name = Simple _; } -> + Some ( + Format.dprintf "%a" Format.pp_print_text + "The current logic does not include rational arithmtic") + | { ns = Value Real; name = Simple _; } -> + Some ( + Format.dprintf "%a" Format.pp_print_text + "The current logic does not include real arithmtic") + | { ns = Term; name = Indexed { basename = s; indexes = _; }; } + when (String.length s >= 2 && s.[0] = 'b' && s.[1] = 'v') -> + Some ( + Format.dprintf "%a" Format.pp_print_text + "The current logic does not include extended bitvector literals") + | _ -> None + +let poly_arg_hint _ = + Some ( + Format.dprintf "%a" Format.pp_print_text + "The typechecker enforces prenex/rank-1 polymorphism. \ + In languages with explicit type arguments for polymorphic functions, \ + you must apply this term to the adequate number of type arguments to \ + make it monomorph.") + +let poly_param_hint _ = + Some ( + Format.dprintf "%a" Format.pp_print_text + "The typechecker enforces prenex/rank-1 polymorphism. \ + This means that only monomorphic types can appear as + parameters of a function type.") + + +(* Typing warnings *) +(* ************************************************************************ *) + +let code = Code.typing + +let unused_type_variable = + Report.Warning.mk ~code ~mnemonic:"unused-type-var" + ~message:(fun fmt (kind, v) -> + Format.fprintf fmt + "The following %a type variable is unused: '%a'" + print_bound_kind kind Dolmen.Std.Expr.Print.id v) + ~name:"Unused bound type variable" () + +let unused_term_variable = + Report.Warning.mk ~code ~mnemonic:"unused-term-var" + ~message:(fun fmt (kind, v) -> + Format.fprintf fmt + "The following %a term variable is unused: `%a`" + print_bound_kind kind Dolmen.Std.Expr.Print.id v) + ~name:"Unused bound term variable" () + +let error_in_attribute = + Report.Warning.mk ~code ~mnemonic:"error-in-attr" + ~message:(fun fmt exn -> + Format.fprintf fmt + "Exception while typing attribute:@ %s" + (Printexc.to_string exn)) + ~name:"Exception while typing an attribute" () + +let superfluous_destructor = + Report.Warning.mk ~code:Code.bug ~mnemonic:"extra-dstr" + ~message:(fun fmt () -> + Format.fprintf fmt + "Superfluous destructor returned by term implementation") + ~name:"Superfluous destructor" () + +let shadowing = + Report.Warning.mk ~code ~mnemonic:"shadowing" + ~message:(fun fmt (id, old) -> + Format.fprintf fmt + "Shadowing: %a was already %a" + (pp_wrap Dolmen.Std.Id.print) id + print_reason_opt (T.binding_reason old)) + ~name:"Shadowing of identifier" () + +let almost_linear = + Report.Warning.mk ~code ~mnemonic:"almost-linear-expr" + ~message:(fun fmt _ -> + Format.fprintf fmt + "This is a non-linear expression according to the smtlib spec.") + ~hints:[text_hint] + ~name:"Non-linear expression in linear arithmetic" () + +let logic_reset = + Report.Warning.mk ~code ~mnemonic:"logic-reset" + ~message:(fun fmt old_loc -> + Format.fprintf fmt "Logic was already set at %a" + Dolmen.Std.Loc.fmt_pos old_loc) + ~name:"Multiple set-logic statements" () + +let unknown_logic = + Report.Warning.mk ~code ~mnemonic:"unknown-logic" + ~message:(fun fmt s -> + Format.fprintf fmt "Unknown logic: %s" s) + ~name:"Unknown logic" () + +let set_logic_not_supported = + Report.Warning.mk ~code ~mnemonic:"set-logic-ignored" + ~message:(fun fmt () -> + Format.fprintf fmt + "Set logic is not supported for the current language") + ~name:"Set logic not supported for current language" () + +let unknown_warning = + Report.Warning.mk ~code:Code.bug ~mnemonic:"unknown-warning" + ~message:(fun fmt cstr_name -> + Format.fprintf fmt + "@[<v>Unknown warning:@ %s@ please report upstream, ^^@]" cstr_name) + ~name:"Unknown warning" () + +(* Typing errors *) +(* ************************************************************************ *) + +let not_well_founded_datatype = + Report.Error.mk ~code ~mnemonic:"wf-datatype" + ~message:(fun fmt () -> + Format.fprintf fmt "Not well founded datatype declaration") + ~name:"Not Well Founded Datatype" () + +let expect_error = + Report.Error.mk ~code ~mnemonic:"typing-bad-kind" + ~message:(fun fmt (expected, got) -> + Format.fprintf fmt "Expected %s but got %a" + expected (print_opt print_res) got) + ~name:"Bad kind" () + +let bad_index_arity = + Report.Error.mk ~code ~mnemonic:"bad-index-arity" + ~message:(fun fmt (s, expected, actual) -> + Format.fprintf fmt + "The indexed family of operators '%s' expects %d indexes, but was given %d" + s expected actual) + ~name:"Incorrect arity for indexed operator" () + +let bad_type_arity = + Report.Error.mk ~code ~mnemonic:"bad-type-arity" + ~message:(fun fmt (c, actual) -> + Format.fprintf fmt "Bad arity: got %d arguments for type constant@ %a" + actual Dolmen.Std.Expr.Print.ty_cst c) + ~name:"Incorrect Arity for type constant application" () + +let bad_op_arity = + Report.Error.mk ~code ~mnemonic:"bad-op-arity" + ~message:(fun fmt (symbol, expected, actual) -> + Format.fprintf fmt + "Bad arity for symbol '%a':@ expected %a arguments but got %d" + print_symbol symbol print_expected expected actual) + ~name:"Incorrect arity for operator application" () + +let bad_cstr_arity = + Report.Error.mk ~code ~mnemonic:"bad-cstr-arity" + ~hints:[poly_hint] + ~message:(fun fmt (c, expected, actual) -> + Format.fprintf fmt + "Bad arity: expected %a arguments but got %d arguments for constructor@ %a" + print_expected expected actual Dolmen.Std.Expr.Print.term_cst c) + ~name:"Incorrect arity for constructor application" () + +let bad_term_arity = + Report.Error.mk ~code ~mnemonic:"bad-term-arity" + ~hints:[poly_hint] + ~message:(fun fmt (c, expected, actual) -> + Format.fprintf fmt + "Bad arity: expected %a but got %d arguments for function@ %a" + print_expected expected actual Dolmen.Std.Expr.Print.term_cst c) + ~name:"Incorrect arity for term application" () + +let bad_poly_arity = + Report.Error.mk ~code ~mnemonic:"bad-poly-arity" + ~message:(fun fmt (vars, args) -> + let expected = List.length vars in + let provided = List.length args in + if provided > expected then + (* Over application *) + Format.fprintf fmt + "This@ function@ expected@ at@ most@ %d@ type@ arguments,@ \ + but@ was@ here@ provided@ with@ %d@ type@ arguments." + expected provided + else + (* under application *) + Format.fprintf fmt + "This@ function@ expected@ exactly@ %d@ type@ arguments,@ \ + since@ term@ arguments@ are@ also@ provided,@ but@ was@ given@ \ + %d@ arguments." + expected provided) + ~name:"Incorrect arity for type arguments of a term application" () + +let over_application = + Report.Error.mk ~code ~mnemonic:"over-application" + ~message:(fun fmt over_args -> + let over = List.length over_args in + Format.fprintf fmt + "Over application:@ this@ application@ has@ %d@ \ + too@ many@ term@ arguments." over) + ~name:"Too many arguments for an application" () + +let repeated_record_field = + Report.Error.mk ~code ~mnemonic:"repeated-field" + ~message:(fun fmt f -> + Format.fprintf fmt + "The field %a is used more than once in this record construction" + Dolmen.Std.Expr.Print.id f) + ~name:"Repeated field in a record construction" () + +let missing_record_field = + Report.Error.mk ~code ~mnemonic:"missing-field" + ~message:(fun fmt f -> + Format.fprintf fmt + "The field %a is missing from this record construction" + Dolmen.Std.Expr.Print.id f) + ~name:"Missing field in a record construction" () + +let mismatch_record_type = + Report.Error.mk ~code ~mnemonic:"mismatch-field" + ~message:(fun fmt (f, r) -> + Format.fprintf fmt + "The field %a does not belong to record type %a" + Dolmen.Std.Expr.Print.id f Dolmen.Std.Expr.Print.id r) + ~name:"Field of another record type in a record construction" () + +let ty_var_application = + Report.Error.mk ~code ~mnemonic:"type-var-app" + ~message:(fun fmt v -> + Format.fprintf fmt + "Cannot apply arguments to type variable@ %a" Dolmen.Std.Expr.Print.id v) + ~name:"Application of a type variable" () + +let var_application = + Report.Error.mk ~code ~mnemonic:"term-var-app" + ~hints:[fo_hint] + ~message:(fun fmt v -> + Format.fprintf fmt + "Cannot apply arguments to term variable@ %a" Dolmen.Std.Expr.Print.id v) + ~name:"Application of a term variable" () + +let type_mismatch = + Report.Error.mk ~code ~mnemonic:"type-mismatch" + ~message:(fun fmt (t, expected) -> + Format.fprintf fmt "The term:@ %a@ has type@ %a@ but was expected to be of type@ %a" + (pp_wrap Dolmen.Std.Expr.Term.print) t + (pp_wrap Dolmen.Std.Expr.Ty.print) (Dolmen.Std.Expr.Term.ty t) + (pp_wrap Dolmen.Std.Expr.Ty.print) expected) + ~name:"Incorrect argument type in an application" () + +let var_in_binding_pos_underspecified = + Report.Error.mk ~code ~mnemonic:"var-binding-infer" + ~message:(fun fmt () -> + Format.fprintf fmt "Cannot infer type for a variable in binding position") + ~name:"Inference of a variable in binding position's type" () + +let unhandled_builtin = + Report.Error.mk ~code:Code.bug ~mnemonic:"unhandled-builtin" + ~message:(fun fmt b -> + Format.fprintf fmt + "The following Dolmen builtin is currently not handled@ %a.@ Please report upstream" + (pp_wrap Dolmen.Std.Term.print_builtin) b) + ~name:"Unhandled builtin in typechecking" () + +let cannot_tag_tag = + Report.Error.mk ~code:Code.bug ~mnemonic:"tag-tag" + ~message:(fun fmt () -> + Format.fprintf fmt "Cannot apply a tag to another tag (only expressions)") + ~name:"Trying to tag a tag" () + +let cannot_tag_ttype = + Report.Error.mk ~code:Code.bug ~mnemonic:"tag-ttype" + ~message:(fun fmt () -> + Format.fprintf fmt "Cannot apply a tag to the Ttype constant") + ~name:"Tying to tag Ttype" () + +let unbound_identifier = + Report.Error.mk ~code ~mnemonic:"unbound-id" + ~message:(fun fmt (id, _, _) -> + Format.fprintf fmt "Unbound identifier:@ %a" + (pp_wrap Dolmen.Std.Id.print) id) + ~hints:[ + (fun (id, _, lit_hint) -> literal_hint lit_hint id); + (fun (_, msg, _) -> text_hint msg);] + ~name:"Unbound identifier" () + +let multiple_declarations = + Report.Error.mk ~code ~mnemonic:"redeclaration" + ~message:(fun fmt (id, old) -> + Format.fprintf fmt + "Duplicate declaration of %a, which was already %a" + (pp_wrap Dolmen.Std.Id.print) id + print_reason_opt (T.binding_reason old)) + ~name:"Multiple declarations of the same symbol" () + +let forbidden_quant = + Report.Error.mk ~code ~mnemonic:"forbidden-quant" + ~message:(fun fmt () -> + Format.fprintf fmt "Quantified expressions are forbidden by the logic.") + ~name:"Forbidden quantifier" () + +let missing_destructor = + Report.Error.mk ~code:Code.bug ~mnemonic:"missing-destructor" + ~message:(fun fmt id -> + Format.fprintf fmt + "The destructor %a@ was not provided by the user implementation.@ Please report upstream." + (pp_wrap Dolmen.Std.Id.print) id) + ~name:"Missing destructor in implementation" () + +let higher_order_app = + Report.Error.mk ~code ~mnemonic:"ho-app" + ~message:(fun fmt () -> + Format.fprintf fmt "Higher-order applications are not handled by the Tff typechecker") + ~hints:[fo_hint] + ~name:"Higher-order application" () + +let higher_order_type = + Report.Error.mk ~code ~mnemonic:"ho-type" + ~message:(fun fmt () -> + Format.fprintf fmt "Higher-order types are not handled by the Tff typechecker") + ~hints:[fo_hint] + ~name:"Higher-order type" () + +let higher_order_env_in_tff_typer = + Report.Error.mk ~code:Code.bug ~mnemonic:"ho-env-in-tff" + ~message:(fun fmt () -> + Format.fprintf fmt + "Programmer error: trying to create a typing env for \ + higher-order with the first-order typechecker.") + ~name:"Higher order env in TFF type-checker" () + +let poly_arg = + Report.Error.mk ~code ~mnemonic:"poly-arg" + ~message:(fun fmt () -> + Format.fprintf fmt "Polymorphic terms cannot be given as argument of a function.") + ~hints:[poly_arg_hint] + ~name:"Polymorphic argument in an application" () + +let non_prenex_polymorphism = + Report.Error.mk ~code ~mnemonic:"poly-param" + ~message:(fun fmt ty -> + Format.fprintf fmt "The following polymorphic type occurs in a \ + non_prenex position: %a" + (pp_wrap Dolmen.Std.Expr.Ty.print) ty) + ~hints:[poly_param_hint] + ~name:"Polymorphic function parameter" () + +let inference_forbidden = + Report.Error.mk ~code ~mnemonic:"inference-forbidden" + ~message:(fun fmt (env, w_src, inferred_ty) -> + Format.fprintf fmt + "@[<v>@[<hov>The@ typechecker@ inferred@ %a.@]@ \ + @[<hov>That@ inference@ lead@ to@ infer@ %a@ to@ be@ %a.@]@ \ + @[<hov>However,@ the@ language@ specified@ inference@ \ + at@ that@ point@ was@ forbidden@]@ \ + @[<hov>%a@]\ + @]" + print_wildcard_origin w_src + (print_wildcard_path env) w_src + (pp_wrap Dolmen.Std.Expr.Ty.print) inferred_ty + (print_wildcard_loc env) w_src) + ~name:"Forbidden type inference" () + +let inference_conflict = + Report.Error.mk ~code ~mnemonic:"inference-conflict" + ~message:(fun fmt (env, w_src, inferred_ty, allowed_tys) -> + Format.fprintf fmt + "@[<v>@[<hov>The@ typechecker@ inferred@ %a.@]@ \ + @[<hov>That@ inference@ lead@ to@ infer@ %a@ to@ be@ %a.@]@ \ + @[<hov>However,@ the@ language@ specified@ that@ only@ the@ following@ \ + types@ should@ be@ allowed@ there:@ %a@]@ \ + @[<hov>%a@]\ + @]" + print_wildcard_origin w_src + (print_wildcard_path env) w_src + (pp_wrap Dolmen.Std.Expr.Ty.print) inferred_ty + (Format.pp_print_list (pp_wrap Dolmen.Std.Expr.Ty.print) + ~pp_sep:(fun fmt () -> Format.fprintf fmt ",@ ")) allowed_tys + (print_wildcard_loc env) w_src) + ~name:"Conflict in type inference" () + +let inference_scope_escape = + Report.Error.mk ~code ~mnemonic:"inference-scope-escape" + ~message:(fun fmt (env, w_src, escaping_var, var_reason) -> + Format.fprintf fmt + "@[<v>@[<hov>The@ typechecker@ inferred@ %a.@]@ \ + @[<hov>That@ inference@ lead@ to@ infer@ %a@ to@ contain@ \ + the@ variable@ %a@ which@ is@ not@ in@ the@ scope@ \ + of@ the@ inferred@ type.@]@ \ + @[<hov>%a@]@ \ + @[<hov>Variable %a is@ %a.@]\ + @]" + print_wildcard_origin w_src + (print_wildcard_path env) w_src + (pp_wrap Dolmen.Std.Expr.Ty.Var.print) escaping_var + (print_wildcard_loc env) w_src + (pp_wrap Dolmen.Std.Expr.Ty.Var.print) escaping_var + print_reason_opt var_reason) + ~name:"Scope escape from a type due to inference" () + +let unbound_type_wildcards = + Report.Error.mk ~code ~mnemonic:"inference-incomplete" + ~message:(fun fmt (env, l) -> + let pp_sep fmt () = Format.fprintf fmt "@ " in + let pp_src fmt src = + Format.fprintf fmt "@[<hov>%a@]" (print_wildcard_origin_loc env) src + in + let pp_wild fmt (w, srcs) = + Format.fprintf fmt "%a, @[<v>%a@]" + (pp_wrap Dolmen.Std.Expr.Print.id) w + (Format.pp_print_list ~pp_sep pp_src) srcs + in + Format.fprintf fmt + "@[<v 2>@[<hov>%a@]:@ %a@]" + Format.pp_print_text + "Top-level formulas should be closed, but the following type variables are free" + (Format.pp_print_list ~pp_sep pp_wild) l + ) + ~name:"Under-specified type inference" () + +let unhandled_ast : (T.env * Dolmen_std.Term.t T.fragment) Report.Error.t = + Report.Error.mk ~code ~mnemonic:"unhandled-ast" + ~message:(fun fmt (env, fragment) -> + Format.fprintf fmt + "The typechecker did not know what to do with the following term.@ \ + Please report upstream.@\n%a" + print_fragment (env, fragment)) + ~name:"Unhandled AST fragment" () + +let bad_farray_arity = + Report.Error.mk ~code ~mnemonic:"bad-farray-arity" + ~message:(fun fmt () -> + Format.fprintf fmt "Functional array types in Alt-Ergo expect either one or two type \ + parameters.") + ~name:"Bad functional array arity" () + +let expected_arith_type = + Report.Error.mk ~code ~mnemonic:"arith-type-expected" + ~message:(fun fmt (ty, _) -> + Format.fprintf fmt "Arithmetic type expected but got@ %a.@" + (pp_wrap Dolmen.Std.Expr.Ty.print) ty) + ~hints:[(fun (_, msg) -> text_hint msg)] + ~name:"Non-arithmetic use of overloaded arithmetic function" () + +let expected_specific_arith_type = + Report.Error.mk ~code ~mnemonic:"arith-type-specific" + ~message:(fun fmt ty -> + Format.fprintf fmt "Cannot apply the arithmetic operation to type@ %a" + (pp_wrap Dolmen.Std.Expr.Ty.print) ty) + ~name:"Incorrect use of overloaded arithmetic function" () + +let forbidden_array_sort = + Report.Error.mk ~code ~mnemonic:"forbidden-array-sort" + ~message:(fun fmt _ -> + Format.fprintf fmt "Forbidden array sort.") + ~hints:[text_hint] + ~name:"Forbidden array sort" () + +let non_linear_expression = + Report.Error.mk ~code ~mnemonic:"non-linear-expr" + ~message:(fun fmt _ -> + Format.fprintf fmt "Non-linear expressions are forbidden by the logic.") + ~hints:[text_hint] + ~name:"Non linear expression in linear arithmetic logic" () + +let invalid_bin_bitvector_char = + Report.Error.mk ~code ~mnemonic:"invalid-bv-bin-char" + ~message:(fun fmt c -> + Format.fprintf fmt + "The character '%c' is invalid inside a binary bitvector litteral" c) + ~name:"Invalid character in a binary bitvector literal" () + +let invalid_hex_bitvector_char = + Report.Error.mk ~code ~mnemonic:"invalid-bv-hex-char" + ~message:(fun fmt c -> + Format.fprintf fmt + "The character '%c' is invalid inside an hexadecimal bitvector litteral" c) + ~name:"Invalid character in an hexadecimal bitvector literal" () + +let invalid_dec_bitvector_char = + Report.Error.mk ~code ~mnemonic:"invalid-bv-dec-char" + ~message:(fun fmt c -> + Format.fprintf fmt + "The character '%c' is invalid inside a decimal bitvector litteral" c) + ~name:"Invalid character in a decimal bitvector literal" () + +let invalid_hex_string_char = + Report.Error.mk ~code ~mnemonic:"invalid-hex-string-char" + ~message:(fun fmt s -> + Format.fprintf fmt + "The following is not a valid hexadecimal character: '%s'" s) + ~name:"Invalid hexadecimal character in a string literal" () + +let invalid_string_char = + Report.Error.mk ~code ~mnemonic:"invalid-string-char" + ~message:(fun fmt c -> + Format.fprintf fmt + "The following character is not allowed in string literals: '%c'" c) + ~name:"Invalid character in a string literal" () + +let invalid_string_escape_sequence = + Report.Error.mk ~code ~mnemonic:"invalid-string-escape" + ~message:(fun fmt (s, i) -> + Format.fprintf fmt + "The escape sequence starting at index %d in the \ + following string is not allowed: '%s'" i s) + ~name:"Invalid escape sequence in a string literal" () + +let bad_tptp_kind = + Report.Error.mk ~code ~mnemonic:"bad-tptp-kind" + ~message:(fun fmt o -> + match o with + | None -> + Format.fprintf fmt "Missing kind for the tptp statement." + | Some s -> + Format.fprintf fmt "Unknown kind for the tptp statement: '%s'." s) + ~name:"Invalid kind for a TPTP statement" () + +let missing_smtlib_logic = + Report.Error.mk ~code ~mnemonic:"missing-smt-logic" + ~message:(fun fmt () -> + Format.fprintf fmt "Missing logic (aka set-logic for smtlib2).") + ~name:"Missing set-logic in an SMTLIB file" () + +let illegal_decl = + Report.Error.mk ~code ~mnemonic:"illegal-decl" + ~message:(fun fmt () -> + Format.fprintf fmt "Illegal declaration.") + ~name:"Illegal declaration in a file" () + +let invalid_push = + Report.Error.mk ~code ~mnemonic:"invalid-push" + ~message:(fun fmt () -> + Format.fprintf fmt "Invalid push payload (payload must be positive)") + ~name:"Negative payload for a push statement" () + +let invalid_pop = + Report.Error.mk ~code ~mnemonic:"invalid-pop" + ~message:(fun fmt () -> + Format.fprintf fmt "Invalid pop payload (payload must be positive)") + ~name:"Negative payload for a pop statement" () + +let empty_pop = + Report.Error.mk ~code ~mnemonic:"empty-pop" + ~message:(fun fmt () -> + Format.fprintf fmt + "Pop instruction with an empty stack (likely a \ + result of a missing push or excessive pop)") + ~name:"Excessive use of pop leading to an empty stack" () + +let unknown_error = + Report.Error.mk ~code:Code.bug ~mnemonic:"unknown-typing-error" + ~message:(fun fmt cstr_name -> + Format.fprintf fmt + "@[<v>Unknown typing error:@ %s@ please report upstream, ^^@]" + cstr_name) + ~name:"Unknown typing error" () + (* Typing state *) (* ************************************************************************ *) @@ -71,6 +845,7 @@ module Zf_Core = type ty_state = { (* logic used *) logic : Dolmen_type.Logic.t; + logic_loc : Dolmen.Std.Loc.loc; (* current typechecker global state *) typer : T.state; (* typechecker state stack *) @@ -79,6 +854,7 @@ type ty_state = { let new_state () = { logic = Auto; + logic_loc = Dolmen.Std.Loc.dummy; typer = T.new_state (); stack = []; } @@ -95,440 +871,225 @@ module Make(S : State_intf.Typer with type ty_state := ty_state) = struct (* ************************************************************************ *) type _ T.err += - | Warning_as_error : T.warning -> _ T.err - | Missing_logic : Dolmen.Std.Loc.t T.err + | Bad_tptp_kind : string option -> Dolmen.Std.Loc.t T.err + | Missing_smtlib_logic : Dolmen.Std.Loc.t T.err | Illegal_decl : Dolmen.Std.Statement.decl T.err | Invalid_push_n : Dolmen.Std.Loc.t T.err | Invalid_pop_n : Dolmen.Std.Loc.t T.err | Pop_with_empty_stack : Dolmen.Std.Loc.t T.err - (* Hints for type errors *) - (* ************************************************************************ *) - - let poly_hint fmt (c, expected, actual) = - let n_ty, n_t = Dolmen.Std.Expr.Term.Const.arity c in - let total_arity = n_ty + n_t in - match expected with - | [x] when x = total_arity && actual = n_t -> - Format.fprintf fmt - "@ @[<hov>Hint: %a@]" Format.pp_print_text - "this is a polymorphic function, you probably forgot \ - the type arguments@]" - | [x] when x = n_t && n_ty <> 0 -> - Format.fprintf fmt "@ @[<hov>Hint: %a@]" Format.pp_print_text - "it looks like the language enforces implicit polymorphism, \ - i.e. no type arguments are to be provided to applications \ - (and instead type annotation/coercions should be used)." - | _ :: _ -> - Format.fprintf fmt "@ @[<hov>Hint: %a@]" Format.pp_print_text - "this is a polymorphic function, and multiple accepted arities \ - are possible because the language supports inference of all type \ - arguments when none are given in an application." - | _ -> () - - let pp_hint fmt = function - | "" -> () - | msg -> - Format.fprintf fmt "@ @[<hov 2>Hint: %a@]" - Format.pp_print_text msg - (* Report type warnings *) (* ************************************************************************ *) - let decl_loc d = - match (d : Dolmen.Std.Statement.decl) with - | Record { loc; _ } - | Abstract { loc; _ } - | Inductive { loc; _ } -> loc - - let print_reason fmt r = - match (r : T.reason) with - | Builtin -> - Format.fprintf fmt "defined by a builtin theory" - | Bound (file, ast) -> - Format.fprintf fmt "bound at %a" - Dolmen.Std.Loc.fmt_pos (Dolmen.Std.Loc.loc file ast.loc) - | Inferred (file, ast) -> - Format.fprintf fmt "inferred at %a" - Dolmen.Std.Loc.fmt_pos (Dolmen.Std.Loc.loc file ast.loc) - | Defined (file, d) -> - Format.fprintf fmt "defined at %a" - Dolmen.Std.Loc.fmt_pos (Dolmen.Std.Loc.loc file d.loc) - | Declared (file, d) -> - Format.fprintf fmt "declared at %a" - Dolmen.Std.Loc.fmt_pos (Dolmen.Std.Loc.loc file (decl_loc d)) - - let print_reason_opt fmt = function - | Some r -> print_reason fmt r - | None -> Format.fprintf fmt "<location missing>" - - let report_warning (T.Warning (_env, _fragment, warn)) = + let smtlib2_6_shadow_rules st = + match (S.input_lang st : Logic.language option) with + | Some Smtlib2 (`Latest | `V2_6 | `Poly) -> true + | _ -> false + + let report_warning st (T.Warning (env, fragment, warn)) = + let loc = T.fragment_loc env fragment in let aux: type a. a T.warn -> _ = fun warn -> match warn with - | T.Unused_type_variable v -> Some (fun fmt () -> - Format.fprintf fmt - "Quantified type variable `%a` is unused" - Dolmen.Std.Expr.Print.ty_var v - ) - | T.Unused_term_variable v -> Some (fun fmt () -> - Format.fprintf fmt - "Quantified term variable `%a` is unused" - Dolmen.Std.Expr.Print.term_var v - ) - | T.Error_in_attribute exn -> Some (fun fmt () -> - Format.fprintf fmt - "Exception while typing attribute:@ %s" (Printexc.to_string exn) - ) - | T.Superfluous_destructor _ -> Some (fun fmt () -> - Format.fprintf fmt "Internal warning, please report upstream, ^^" - ) - - | T.Shadowing (id, old, _cur) -> Some (fun fmt () -> - Format.fprintf fmt - "Shadowing: %a was already %a" - Dolmen.Std.Id.print id - print_reason_opt (T.binding_reason old) - ) - - | Smtlib2_Ints.Restriction msg - -> Some (fun fmt () -> - Format.fprintf fmt - "This is a non-linear expression according to the smtlib spec.%a" - pp_hint msg - ) - - | Smtlib2_Reals.Restriction msg - -> Some (fun fmt () -> - Format.fprintf fmt - "This is a non-linear expression according to the smtlib spec.%a" - pp_hint msg - ) - - | Smtlib2_Reals_Ints.Restriction msg - -> Some (fun fmt () -> - Format.fprintf fmt - "This is a non-linear expression according to the smtlib spec.%a" - pp_hint msg - ) - - | Smtlib2_Float.Real_lit -> Some (fun fmt () -> - Format.fprintf fmt - "Real literals are not part of the Floats specification." - ) - | Smtlib2_Float.Bitv_extended_lit -> Some (fun fmt () -> - Format.fprintf fmt - "Bitvector decimal literals are not part of the Floats specification." - ) - - - | _ -> Some (fun fmt () -> - Format.fprintf fmt - "Unknown warning, please report upstream, ^^" - ) + (* typer warnings that are actually errors given some languages spec *) + | T.Shadowing (id, ((`Builtin `Term | `Not_found) as old), `Variable _) + | T.Shadowing (id, ((`Constant _ | `Builtin _ | `Not_found) as old), `Constant _) + when smtlib2_6_shadow_rules st -> + S.error ~loc st multiple_declarations (id, old) + + (* warnings *) + | T.Unused_type_variable (kind, v) -> + S.warn ~loc st unused_type_variable (kind, v) + | T.Unused_term_variable (kind, v) -> + S.warn ~loc st unused_term_variable (kind, v) + | T.Error_in_attribute exn -> + S.warn ~loc st error_in_attribute exn + | T.Superfluous_destructor _ -> + S.warn ~loc st superfluous_destructor () + | T.Shadowing (id, old, _cur) -> + S.warn ~loc st shadowing (id, old) + | Smtlib2_Ints.Restriction msg -> + S.warn ~loc st almost_linear msg + | Smtlib2_Reals.Restriction msg -> + S.warn ~loc st almost_linear msg + | Smtlib2_Reals_Ints.Restriction msg -> + S.warn ~loc st almost_linear msg + | _ -> + S.warn ~loc st unknown_warning + (Obj.Extension_constructor.(name (of_val warn))) in aux warn (* Report type errors *) (* ************************************************************************ *) - let print_res fmt res = - match (res : T.res) with - | T.Ttype -> Format.fprintf fmt "Type" - | T.Ty ty -> Format.fprintf fmt "the type@ %a" Dolmen.Std.Expr.Ty.print ty - | T.Term t -> Format.fprintf fmt "the term@ %a" Dolmen.Std.Expr.Term.print t - | T.Tags _ -> Format.fprintf fmt "some tags" - - let print_opt pp fmt = function - | None -> Format.fprintf fmt "<none>" - | Some x -> pp fmt x - - let rec print_expected fmt = function - | [] -> assert false - | x :: [] -> Format.fprintf fmt "%d" x - | x :: r -> Format.fprintf fmt "%d or %a" x print_expected r - - let print_fragment (type a) fmt (env, fragment : T.env * a T.fragment) = - match fragment with - | T.Ast ast -> Dolmen.Std.Term.print fmt ast - | T.Def d -> Dolmen.Std.Statement.print_def fmt d - | T.Decl d -> Dolmen.Std.Statement.print_decl fmt d - | T.Defs d -> - Dolmen.Std.Statement.print_group Dolmen.Std.Statement.print_def fmt d - | T.Decls d -> - Dolmen.Std.Statement.print_group Dolmen.Std.Statement.print_decl fmt d - | T.Located _ -> - let full = T.fragment_loc env fragment in - let loc = Dolmen.Std.Loc.full_loc full in - Format.fprintf fmt "<located at %a>" Dolmen.Std.Loc.fmt loc - - let print_bt fmt bt = - if Printexc.backtrace_status () then begin - let s = Printexc.raw_backtrace_to_string bt in - Format.fprintf fmt "@ @[<h>%a@]" Format.pp_print_text s - end - - - let report_error fmt (T.Error (env, fragment, err)) = + let report_error st (T.Error (env, fragment, err)) = + let loc = T.fragment_loc env fragment in match err with - (* Datatype definition not well founded *) | T.Not_well_founded_datatypes _ -> - Format.fprintf fmt "Not well founded datatype declaration" - - (* Inference of the type of a bound variable *) - | T.Infer_type_variable -> - Format.fprintf fmt "Cannot infer the type of a variable" - + S.error ~loc st not_well_founded_datatype () (* Generic error for when something was expected but not there *) | T.Expected (expect, got) -> - Format.fprintf fmt "Expected %s but got %a" - expect (print_opt print_res) got - - (* Arity errors *) + S.error ~loc st expect_error (expect, got) + (* Arity errors *) | T.Bad_index_arity (s, expected, actual) -> - Format.fprintf fmt - "The indexed family of operators '%s' expects %d indexes, but was given %d" - s expected actual + S.error ~loc st bad_index_arity (s, expected, actual) | T.Bad_ty_arity (c, actual) -> - Format.fprintf fmt "Bad arity: got %d arguments for type constant@ %a" - actual Dolmen.Std.Expr.Print.ty_const c - | T.Bad_op_arity (s, expected, actual) -> - Format.fprintf fmt - "Bad arity for operator '%s':@ expected %a arguments but got %d" - s print_expected expected actual + S.error ~loc st bad_type_arity (c, actual) + | T.Bad_op_arity (symbol, expected, actual) -> + S.error ~loc st bad_op_arity (symbol, expected, actual) | T.Bad_cstr_arity (c, expected, actual) -> - Format.fprintf fmt - "Bad arity: expected %a arguments but got %d arguments for constructor@ %a%a" - print_expected expected actual Dolmen.Std.Expr.Print.term_const c - poly_hint (c, expected, actual) + S.error ~loc st bad_cstr_arity (c, expected, actual) | T.Bad_term_arity (c, expected, actual) -> - Format.fprintf fmt - "Bad arity: expected %a but got %d arguments for function@ %a%a" - print_expected expected actual Dolmen.Std.Expr.Print.term_const c - poly_hint (c, expected, actual) - + S.error ~loc st bad_term_arity (c, expected, actual) + | T.Bad_poly_arity (vars, args) -> + S.error ~loc st bad_poly_arity (vars, args) + | T.Over_application over_args -> + S.error ~loc st over_application over_args (* Record constuction errors *) | T.Repeated_record_field f -> - Format.fprintf fmt - "The field %a is used more than once in this record construction" - Dolmen.Std.Expr.Print.id f + S.error ~loc st repeated_record_field f | T.Missing_record_field f -> - Format.fprintf fmt - "The field %a is missing from this record construction" - Dolmen.Std.Expr.Print.id f + S.error ~loc st missing_record_field f | T.Mismatch_record_type (f, r) -> - Format.fprintf fmt - "The field %a does not belong to record type %a" - Dolmen.Std.Expr.Print.id f Dolmen.Std.Expr.Print.id r - + S.error ~loc st mismatch_record_type (f, r) (* Application of a variable *) | T.Var_application v -> - Format.fprintf fmt "Cannot apply arguments to term variable@ %a" Dolmen.Std.Expr.Print.id v + S.error ~loc st var_application v | T.Ty_var_application v -> - Format.fprintf fmt "Cannot apply arguments to type variable@ %a" Dolmen.Std.Expr.Print.id v - + S.error ~loc st ty_var_application v (* Wrong type *) | T.Type_mismatch (t, expected) -> - Format.fprintf fmt "The term:@ %a@ has type@ %a@ but was expected to be of type@ %a" - Dolmen.Std.Expr.Term.print t - Dolmen.Std.Expr.Ty.print (Dolmen.Std.Expr.Term.ty t) - Dolmen.Std.Expr.Ty.print expected - - | T.Quantified_var_inference -> - Format.fprintf fmt "Cannot infer type for a quantified variable" - + S.error ~loc st type_mismatch (t, expected) + | T.Var_in_binding_pos_underspecified -> + S.error ~loc st var_in_binding_pos_underspecified () | T.Unhandled_builtin b -> - Format.fprintf fmt - "The following Dolmen builtin is currently not handled@ %a.@ Please report upstream" - Dolmen.Std.Term.print_builtin b - + S.error ~loc st unhandled_builtin b | T.Cannot_tag_tag -> - Format.fprintf fmt "Cannot apply a tag to another tag (only expressions)" - + S.error ~loc st cannot_tag_tag () | T.Cannot_tag_ttype -> - Format.fprintf fmt "Cannot apply a tag to the Ttype constant" - - | T.Cannot_find id -> - Format.fprintf fmt "Unbound identifier:@ '%a'" Dolmen.Std.Id.print id - + S.error ~loc st cannot_tag_ttype () + | T.Cannot_find (id, msg) -> + let lit_hint = + match S.input_lang st with + | Some Smtlib2 _ -> true + | _ -> false + in + S.error ~loc st unbound_identifier (id, msg, lit_hint) | T.Forbidden_quantifier -> - Format.fprintf fmt "Quantified expressions are forbidden by the logic." - - | T.Type_var_in_type_constructor -> - Format.fprintf fmt "Type variables cannot appear in the signature of a type constant" - + S.error ~loc st forbidden_quant () | T.Missing_destructor id -> - Format.fprintf fmt - "The destructor '%a'@ was not provided by the user implementation.@ Please report upstream." - Dolmen.Std.Id.print id - + S.error ~loc st missing_destructor id | T.Higher_order_application -> - Format.fprintf fmt "Higher-order applications are not handled by the Tff typechecker" - + S.error ~loc st higher_order_app () | T.Higher_order_type -> - Format.fprintf fmt "Higher-order types are not handled by the Tff typechecker" - - | T.Unbound_variables (tys, [], _) -> - let pp_sep fmt () = Format.fprintf fmt ",@ " in - Format.fprintf fmt "The following variables are not bound:@ %a" - (Format.pp_print_list ~pp_sep Dolmen.Std.Expr.Print.id) tys - - | T.Unbound_variables ([], ts, _) -> - let pp_sep fmt () = Format.fprintf fmt ",@ " in - Format.fprintf fmt "The following variables are not bound:@ %a" - (Format.pp_print_list ~pp_sep Dolmen.Std.Expr.Print.id) ts - - | T.Unbound_variables (tys, ts, _) -> - let pp_sep fmt () = Format.fprintf fmt ",@ " in - Format.fprintf fmt "The following variables are not bound:@ %a,@ %a" - (Format.pp_print_list ~pp_sep Dolmen.Std.Expr.Print.id) tys - (Format.pp_print_list ~pp_sep Dolmen.Std.Expr.Print.id) ts - + S.error ~loc st higher_order_type () + | T.Higher_order_env_in_tff_typechecker -> + S.error ~loc st higher_order_env_in_tff_typer () + | T.Polymorphic_function_argument -> + S.error ~loc st poly_arg () + | T.Non_prenex_polymorphism ty -> + S.error ~loc st non_prenex_polymorphism ty + | T.Inference_forbidden (_, w_src, inferred_ty) -> + S.error ~loc st inference_forbidden (env, w_src, inferred_ty) + | T.Inference_conflict (_, w_src, inferred_ty, allowed_tys) -> + S.error ~loc st inference_conflict (env, w_src, inferred_ty, allowed_tys) + | T.Inference_scope_escape (_, w_src, escaping_var, var_reason) -> + S.error ~loc st inference_scope_escape (env, w_src, escaping_var, var_reason) + | T.Unbound_type_wildcards tys -> + S.error ~loc st unbound_type_wildcards (env, tys) | T.Unhandled_ast -> - Format.fprintf fmt - "The typechecker did not know what to do with the following term.@ \ - Please report upstream.@\n%a" - print_fragment (env, fragment) - + S.error ~loc st unhandled_ast (env, fragment) + (* Alt-Ergo Functional Array errors *) + | Ae_Arrays.Bad_farray_arity -> + S.error ~loc st bad_farray_arity () + (* Alt-Ergo Arithmetic errors *) + | Ae_Arith.Expected_arith_type ty -> + S.error ~loc st expected_arith_type (ty, "") (* Tptp Arithmetic errors *) | Tptp_Arith.Expected_arith_type ty -> - Format.fprintf fmt "Arithmetic type expected but got@ %a.@ %s" - Dolmen.Std.Expr.Ty.print ty - "Tptp arithmetic symbols are only polymorphic over the arithmetic types $int, $rat and $real." + S.error ~loc st expected_arith_type + (ty, "Tptp arithmetic symbols are only polymorphic over the arithmetic \ + types $int, $rat and $real.") | Tptp_Arith.Cannot_apply_to ty -> - Format.fprintf fmt "Cannot apply the arithmetic operation to type@ %a" - Dolmen.Std.Expr.Ty.print ty - - (* Smtlib Arrya errors *) + S.error ~loc st expected_specific_arith_type ty + (* Smtlib Array errors *) | Smtlib2_Arrays.Forbidden msg -> - Format.fprintf fmt "Forbidden array sort.%a" pp_hint msg - + S.error ~loc st forbidden_array_sort msg (* Smtlib Arithmetic errors *) | Smtlib2_Ints.Forbidden msg -> - Format.fprintf fmt "Non-linear expressions are forbidden by the logic.%a" pp_hint msg + S.error ~loc st non_linear_expression msg | Smtlib2_Reals.Forbidden msg -> - Format.fprintf fmt "Non-linear expressions are forbidden by the logic.%a" pp_hint msg + S.error ~loc st non_linear_expression msg | Smtlib2_Reals_Ints.Forbidden msg -> - Format.fprintf fmt "Non-linear expressions are forbidden by the logic.%a" pp_hint msg + S.error ~loc st non_linear_expression msg | Smtlib2_Reals_Ints.Expected_arith_type ty -> - Format.fprintf fmt "Arithmetic type expected but got@ %a.@ %s" - Dolmen.Std.Expr.Ty.print ty - "The stmlib Reals_Ints theory requires an arithmetic type in order to correctly desugar the expression." - + S.error ~loc st expected_arith_type + (ty, "The stmlib Reals_Ints theory requires an arithmetic type in order to \ + correctly desugar the expression.") (* Smtlib Bitvector errors *) - | Smtlib2_Bitv.Invalid_bin_char c -> - Format.fprintf fmt "The character '%c' is invalid inside a binary bitvector litteral" c - | Smtlib2_Bitv.Invalid_hex_char c -> - Format.fprintf fmt "The character '%c' is invalid inside a hexadecimal bitvector litteral" c - | Smtlib2_Bitv.Invalid_dec_char c -> - Format.fprintf fmt "The character '%c' is invalid inside a decimal bitvector litteral" c - - (* Smtlib Float errors *) + | Smtlib2_Bitv.Invalid_bin_char c | Smtlib2_Float.Invalid_bin_char c -> - Format.fprintf fmt "The character '%c' is invalid inside a binary bitvector litteral" c + S.error ~loc st invalid_bin_bitvector_char c + | Smtlib2_Bitv.Invalid_hex_char c | Smtlib2_Float.Invalid_hex_char c -> - Format.fprintf fmt "The character '%c' is invalid inside a hexadecimal bitvector litteral" c + S.error ~loc st invalid_hex_bitvector_char c + | Smtlib2_Bitv.Invalid_dec_char c | Smtlib2_Float.Invalid_dec_char c -> - Format.fprintf fmt "The character '%c' is invalid inside a decimal bitvector litteral" c - + S.error ~loc st invalid_dec_bitvector_char c (* Smtlib String errors *) | Smtlib2_String.Invalid_hexadecimal s -> - Format.fprintf fmt "The following is not a valid hexadecimal character: '%s'" s + S.error ~loc st invalid_hex_string_char s | Smtlib2_String.Invalid_string_char c -> - Format.fprintf fmt "The following character is not allowed in string literals: '%c'" c + S.error ~loc st invalid_string_char c | Smtlib2_String.Invalid_escape_sequence (s, i) -> - Format.fprintf fmt "The escape sequence starting at index %d in the \ - following string is not allowed: '%s'" i s - - (* Expression filters *) - | T.Uncaught_exn (Dolmen.Std.Expr.Filter_failed_ty (name, _ty, msg), _) -> - Format.fprintf fmt "Filter '%s' failed for the given type.%a" name pp_hint msg - | T.Uncaught_exn (Dolmen.Std.Expr.Filter_failed_term (name, _t, msg), _) -> - Format.fprintf fmt "Filter '%s' failed for the given term.%a" name pp_hint msg - + S.error ~loc st invalid_string_escape_sequence (s, i) (* Uncaught exception during type-checking *) + | T.Uncaught_exn ((Pipeline.Out_of_time | + Pipeline.Out_of_space | + Pipeline.Sigint) as exn, bt) -> + Printexc.raise_with_backtrace exn bt | T.Uncaught_exn (exn, bt) -> - Format.fprintf fmt - "@[<v 2>Uncaught exception: %s%a@]" - (Printexc.to_string exn) print_bt bt - - (* Warnings as errors *) - | Warning_as_error w -> - begin match report_warning w with - | Some pp -> pp fmt () - | None -> - Format.fprintf fmt "missing warning reporter, please report upstream, ^^" - end - - (* Missing logic *) - | Missing_logic -> - Format.fprintf fmt "Missing logic (aka set-logic for smtlib2)." - + S.error ~loc st Report.Error.uncaught_exn (exn, bt) + (* Bad tptp kind *) + | Bad_tptp_kind o -> + S.error ~loc st bad_tptp_kind o + (* Missing smtlib logic *) + | Missing_smtlib_logic -> + S.error ~loc st missing_smtlib_logic () (* Illegal declarations *) | Illegal_decl -> - Format.fprintf fmt "Illegal declaration. Hint: check your logic" - + S.error ~loc st illegal_decl () (* Push/Pop errors *) | Invalid_push_n -> - Format.fprintf fmt "Invalid push payload (payload must be positive)" + S.error ~loc st invalid_push () | Invalid_pop_n -> - Format.fprintf fmt "Invalid pop payload (payload must be positive)" + S.error ~loc st invalid_pop () | Pop_with_empty_stack -> - Format.fprintf fmt "Pop instruction with an empty stack (likely a \ - result of a missing push or excessive pop)" - + S.error ~loc st empty_pop () (* Catch-all *) | _ -> - Format.fprintf fmt "Unknown typing error,@ please report upstream, ^^" - - let () = - Printexc.register_printer (function - | T.Typing_error error -> - Some (Format.asprintf "Typing error:@ %a" report_error error) - | _ -> None - ) - - (* Warning reporting and wrappers *) - (* ************************************************************************ *) - - type warning_conf = { - strict_typing : bool; - smtlib2_6_shadow_rules : bool; - } - - (* Warning reporter, sent to the typechecker. - This is responsible for turning fatal warnings into errors *) - let warnings_aux report conf ((T.Warning (env, fragment, warn)) as w) = - match warn, fragment with - (* Warnings as errors *) - | T.Shadowing (_, (`Builtin `Term | `Not_found), `Variable _), fragment - | T.Shadowing (_, (`Constant _ | `Builtin _ | `Not_found), `Constant _), fragment - when conf.smtlib2_6_shadow_rules -> - T._error env fragment (Warning_as_error w) - - | Smtlib2_Ints.Restriction _, fragment - when conf.strict_typing -> - T._error env fragment (Warning_as_error w) - | Smtlib2_Reals.Restriction _, fragment - when conf.strict_typing -> - T._error env fragment (Warning_as_error w) - | Smtlib2_Reals_Ints.Restriction _, fragment - when conf.strict_typing -> - T._error env fragment (Warning_as_error w) - - | Smtlib2_Float.Real_lit, fragment - when conf.strict_typing -> - T._error env fragment (Warning_as_error w) - | Smtlib2_Float.Bitv_extended_lit, fragment - when conf.strict_typing -> - T._error env fragment (Warning_as_error w) - - (* general case *) - | _ -> report w + S.error ~loc st unknown_error + (Obj.Extension_constructor.(name (of_val err))) (* Generate typing env from state *) (* ************************************************************************ *) + let extract_tptp_kind = function + | { Dolmen.Std.Term.term = App ( + { term = Symbol id; _ }, + [{ term = Symbol { name = Simple s; _ }; _ }]); _ } + when Dolmen.Std.Id.(equal tptp_kind) id -> Some s + | _ -> None + + let rec tptp_kind_of_attrs = function + | [] -> None + | t :: r -> + begin match extract_tptp_kind t with + | None -> tptp_kind_of_attrs r + | (Some _) as res -> res + end + let builtins_of_smtlib2_logic v (l : Dolmen_type.Logic.Smtlib2.t) = List.fold_left (fun acc th -> match (th : Dolmen_type.Logic.Smtlib2.theory) with @@ -548,14 +1109,9 @@ module Make(S : State_intf.Typer with type ty_state := ty_state) = struct let additional_builtins = ref (fun _ _ -> `Not_found : T.builtin_symbols) - let typing_env ~loc warnings (st : S.t) = - + let typing_env ?(attrs=[]) ~loc warnings (st : S.t) = let file = S.input_file_loc st in - - let additional_builtins env args = - !additional_builtins env args - in - + let additional_builtins env args = !additional_builtins env args in (* Match the language to determine bultins and other options *) match (S.input_lang st : Logic.language option) with @@ -568,37 +1124,52 @@ module Make(S : State_intf.Typer with type ty_state := ty_state) = struct - there are no explicit declaration or definitions, hence no builtins *) | Some Dimacs | Some ICNF -> let poly = T.Flexible in - let expect = T.Typed Dolmen.Std.Expr.Ty.prop in - let infer_base = Some Dolmen.Std.Expr.Ty.prop in - let warnings = warnings { - strict_typing = S.strict_typing st; - smtlib2_6_shadow_rules = false; + let var_infer = T.{ + infer_unbound_vars = No_inference; + infer_type_vars_in_binding_pos = false; + infer_term_vars_in_binding_pos = No_inference; } in - let builtins = Dolmen_type.Base.noop in - T.empty_env + let sym_infer = T.{ + infer_type_csts = false; + infer_term_csts = Wildcard (Any_base { + allowed = [Dolmen.Std.Expr.Ty.prop]; + preferred = Dolmen.Std.Expr.Ty.prop; + }); + } in + let builtins = Dimacs.parse in + T.empty_env ~order:First_order ~st:(S.ty_state st).typer - ~expect ?infer_base ~poly + ~var_infer ~sym_infer ~poly ~warnings ~file builtins (* Alt-Ergo format *) | Some Alt_ergo -> let poly = T.Flexible in - let expect = T.Nothing in - let infer_base = None in - let warnings = warnings { - strict_typing = S.strict_typing st; - smtlib2_6_shadow_rules = false; + let free_wildcards = T.Implicitly_universally_quantified in + let var_infer = T.{ + infer_unbound_vars = Unification_type_variable; + infer_type_vars_in_binding_pos = true; + infer_term_vars_in_binding_pos = No_inference; + } in + let sym_infer = T.{ + infer_type_csts = false; + infer_term_csts = No_inference; } in let builtins = Dolmen_type.Base.merge [ + Decl.parse; + Subst.parse; + additional_builtins; Ae_Core.parse; - Decl.parse; Subst.parse; - additional_builtins + Ae_Arith.parse; + Ae_Arrays.parse; + Ae_Bitv.parse; ] in - T.empty_env + T.empty_env ~order:First_order ~st:(S.ty_state st).typer - ~expect ?infer_base ~poly - ~warnings ~file builtins + ~var_infer ~sym_infer ~poly + ~free_wildcards ~warnings ~file + builtins (* Zipperposition Format - no inference of constants @@ -606,21 +1177,25 @@ module Make(S : State_intf.Typer with type ty_state := ty_state) = struct *) | Some Zf -> let poly = T.Flexible in - let expect = T.Nothing in - let infer_base = None in - let warnings = warnings { - strict_typing = S.strict_typing st; - smtlib2_6_shadow_rules = false; + let var_infer = T.{ + infer_unbound_vars = No_inference; + infer_type_vars_in_binding_pos = true; + infer_term_vars_in_binding_pos = Wildcard Any_in_scope; + } in + let sym_infer = T.{ + infer_type_csts = false; + infer_term_csts = No_inference; } in let builtins = Dolmen_type.Base.merge [ Decl.parse; Subst.parse; additional_builtins; Zf_Core.parse; + Zf_arith.parse ] in - T.empty_env + T.empty_env ~order:Higher_order ~st:(S.ty_state st).typer - ~expect ?infer_base ~poly + ~var_infer ~sym_infer ~poly ~warnings ~file builtins (* TPTP @@ -630,23 +1205,74 @@ module Make(S : State_intf.Typer with type ty_state := ty_state) = struct *) | Some Tptp v -> let poly = T.Explicit in - let expect = T.Typed Dolmen.Std.Expr.Ty.prop in - let infer_base = Some Dolmen.Std.Expr.Ty.base in - let warnings = warnings { - strict_typing = S.strict_typing st; - smtlib2_6_shadow_rules = false; - } in - let builtins = Dolmen_type.Base.merge [ - Decl.parse; - Subst.parse; - additional_builtins; - Tptp_Core.parse v; - Tptp_Arith.parse v; - ] in - T.empty_env - ~st:(S.ty_state st).typer - ~expect ?infer_base ~poly - ~warnings ~file builtins + begin match tptp_kind_of_attrs attrs with + | Some "thf" -> + let var_infer = T.{ + infer_unbound_vars = No_inference; + infer_type_vars_in_binding_pos = true; + infer_term_vars_in_binding_pos = No_inference; + } in + let sym_infer = T.{ + infer_type_csts = false; + infer_term_csts = No_inference; + } in + let builtins = Dolmen_type.Base.merge [ + Decl.parse; + Subst.parse; + additional_builtins; + Tptp_Core_Ho.parse v; + Tptp_Arith.parse v; + ] in + T.empty_env ~order:Higher_order + ~st:(S.ty_state st).typer + ~var_infer ~sym_infer ~poly + ~warnings ~file builtins + | Some ("tff" | "tpi" | "fof" | "cnf") -> + let var_infer = T.{ + infer_unbound_vars = No_inference; + infer_type_vars_in_binding_pos = true; + infer_term_vars_in_binding_pos = + Wildcard (Any_base { + allowed = [Dolmen.Std.Expr.Ty.base]; + preferred = Dolmen.Std.Expr.Ty.base; + }); + } in + let sym_infer = T.{ + infer_type_csts = true; + infer_term_csts = Wildcard (Arrow { + arg_shape = Any_base { + allowed = [Dolmen.Std.Expr.Ty.base]; + preferred = Dolmen.Std.Expr.Ty.base; + }; + ret_shape = Any_base { + allowed = [ + Dolmen.Std.Expr.Ty.base; + Dolmen.Std.Expr.Ty.prop; + ]; + preferred = Dolmen.Std.Expr.Ty.base; + }; + }); + } in + let builtins = Dolmen_type.Base.merge [ + Decl.parse; + Subst.parse; + additional_builtins; + Tptp_Core.parse v; + Tptp_Arith.parse v; + ] in + T.empty_env ~order:First_order + ~st:(S.ty_state st).typer + ~var_infer ~sym_infer ~poly + ~warnings ~file builtins + | bad_kind -> + let builtins = Dolmen_type.Base.noop in + let env = + T.empty_env + ~st:(S.ty_state st).typer + ~poly ~warnings ~file builtins + in + T._error env (Located loc) (Bad_tptp_kind bad_kind) + end (* SMTLib v2 - no inference @@ -656,45 +1282,46 @@ module Make(S : State_intf.Typer with type ty_state := ty_state) = struct *) | Some Smtlib2 v -> let poly = T.Implicit in - let expect = T.Nothing in - let infer_base = None in - let warnings = warnings { - strict_typing = S.strict_typing st; - smtlib2_6_shadow_rules = match v with - | `Latest | `V2_6 -> true; + let var_infer = T.{ + infer_unbound_vars = No_inference; + infer_type_vars_in_binding_pos = true; + infer_term_vars_in_binding_pos = No_inference; + } in + let sym_infer = T.{ + infer_type_csts = false; + infer_term_csts = No_inference; } in begin match (S.ty_state st).logic with | Auto -> let builtins = Dolmen_type.Base.noop in let env = - T.empty_env + T.empty_env ~order:First_order ~st:(S.ty_state st).typer - ~poly ~expect ~warnings ~file builtins + ~var_infer ~sym_infer ~poly + ~warnings ~file builtins in - T._error env (Located loc) Missing_logic + T._error env (Located loc) Missing_smtlib_logic | Smtlib2 logic -> let builtins = Dolmen_type.Base.merge ( Decl.parse :: Subst.parse :: additional_builtins :: builtins_of_smtlib2_logic v logic ) in let quants = logic.features.quantifiers in - T.empty_env + T.empty_env ~order:First_order ~st:(S.ty_state st).typer - ~expect ?infer_base ~poly ~quants + ~var_infer ~sym_infer ~poly ~quants ~warnings ~file builtins end - let typing_wrap ?(loc=Dolmen.Std.Loc.no_loc) st ~f = + let typing_wrap ?attrs ?(loc=Dolmen.Std.Loc.no_loc) st ~f = let st = ref st in - let report (T.Warning (env, fg, _) as w) = - let loc = T.fragment_loc env fg in - match report_warning w with - | None -> () - | Some pp -> st := S.warn ~loc !st "%a" pp () - in - let env = typing_env ~loc (warnings_aux report) !st in - let res = f env in - !st, res + let report warn = st := report_warning !st warn in + match f (typing_env ?attrs ~loc report !st) with + | res -> !st, res + | exception T.Typing_error err -> + let st = report_error !st err in + raise (S.Error st) + (* Push&Pop *) (* ************************************************************************ *) @@ -702,12 +1329,23 @@ module Make(S : State_intf.Typer with type ty_state := ty_state) = struct let reset st ?loc:_ () = S.set_ty_state st (new_state ()) + let reset_assertions st ?loc:_ () = + let state = S.ty_state st in + S.set_ty_state st { + logic = state.logic; + logic_loc = state.logic_loc; + typer = T.new_state (); + stack = []; + } + + let rec push st ?(loc=Dolmen.Std.Loc.no_loc) = function | 0 -> st | i -> - if i <= 0 then - let env = typing_env ~loc (fun _ _ -> ()) st in - T._error env (Located loc) Invalid_push_n + if i < 0 then + fst @@ typing_wrap ~loc st ~f:(fun env -> + T._error env (Located loc) Invalid_push_n + ) else begin let t = S.ty_state st in let st' = T.copy_state t.typer in @@ -719,15 +1357,17 @@ module Make(S : State_intf.Typer with type ty_state := ty_state) = struct let rec pop st ?(loc=Dolmen.Std.Loc.no_loc) = function | 0 -> st | i -> - if i <= 0 then - let env = typing_env ~loc (fun _ _ -> ()) st in - T._error env (Located loc) Invalid_pop_n + if i < 0 then + fst @@ typing_wrap ~loc st ~f:(fun env -> + T._error env (Located loc) Invalid_pop_n + ) else begin let t = S.ty_state st in match t.stack with | [] -> - let env = typing_env ~loc (fun _ _ -> ()) st in - T._error env (Located loc) Pop_with_empty_stack + fst @@ typing_wrap ~loc st ~f:(fun env -> + T._error env (Located loc) Pop_with_empty_stack + ) | ty :: r -> let t' = { t with typer = ty; stack = r; } in let st' = S.set_ty_state st t' in @@ -738,6 +1378,19 @@ module Make(S : State_intf.Typer with type ty_state := ty_state) = struct (* Setting the logic *) (* ************************************************************************ *) + let set_logic_aux ~loc st new_logic = + let ty_st = S.ty_state st in + let st = + match ty_st.logic with + | Auto -> st + | Smtlib2 _ -> S.warn ~loc st logic_reset ty_st.logic_loc + in + S.set_ty_state st { + ty_st with + logic = new_logic; + logic_loc = Dolmen.Std.Loc.full_loc loc; + } + let set_logic (st : S.t) ?(loc=Dolmen.Std.Loc.no_loc) s = let file = S.input_file_loc st in let loc : Dolmen.Std.Loc.full = { file; loc; } in @@ -749,13 +1402,12 @@ module Make(S : State_intf.Typer with type ty_state := ty_state) = struct match Dolmen_type.Logic.Smtlib2.parse s with | Some l -> st, l | None -> - let st = S.warn ~loc st "Unknown logic %s" s in + let st = S.warn ~loc st unknown_logic s in st, Dolmen_type.Logic.Smtlib2.all in - S.set_ty_state st { (S.ty_state st) with logic = Smtlib2 l; } + set_logic_aux ~loc st (Smtlib2 l) | _ -> - S.warn ~loc st - "Set logic is not supported for the current language" + S.warn ~loc st set_logic_not_supported () (* Declarations *) @@ -777,7 +1429,7 @@ module Make(S : State_intf.Typer with type ty_state := ty_state) = struct | Auto -> true let check_decl st env d = function - | `Type_decl (c : Dolmen.Std.Expr.ty_const) -> + | `Type_decl (c : Dolmen.Std.Expr.ty_cst) -> begin match Dolmen.Std.Expr.Ty.definition c with | None | Some Abstract -> if not (allow_abstract_type_decl st) then @@ -786,17 +1438,20 @@ module Make(S : State_intf.Typer with type ty_state := ty_state) = struct if not (allow_data_type_decl st) then T._error env (Decl d) Illegal_decl end - | `Term_decl (c : Dolmen.Std.Expr.term_const) -> - let is_function = c.ty.fun_vars <> [] || c.ty.fun_args <> [] in + | `Term_decl (c : Dolmen.Std.Expr.term_cst) -> + let is_function = + let vars, args, _ = Dolmen.Std.Expr.Ty.poly_sig c.id_ty in + vars <> [] || args <> [] + in if is_function && not (allow_function_decl st) then T._error env (Decl d) Illegal_decl let check_decls st env l decls = List.iter2 (check_decl st env) l decls - let decls (st : S.t) ?loc ?attr d = - typing_wrap ?loc st ~f:(fun env -> - let decls = T.decls env ?attr d in + let decls (st : S.t) ?loc ?attrs d = + typing_wrap ?attrs ?loc st ~f:(fun env -> + let decls = T.decls env ?attrs d in let () = check_decls st env d.contents decls in decls ) @@ -805,15 +1460,19 @@ module Make(S : State_intf.Typer with type ty_state := ty_state) = struct (* Definitions *) (* ************************************************************************ *) - let defs st ?loc ?attr d = - typing_wrap ?loc st ~f:(fun env -> - let l = T.defs env ?attr d in + let defs st ?loc ?attrs d = + typing_wrap ?attrs ?loc st ~f:(fun env -> + let l = T.defs env ?attrs d in let l = List.map (function - | `Type_def (id, _, vars, body) -> - let () = if not d.recursive then Subst.define_ty id vars body in - `Type_def (id, vars, body) + | `Type_def (id, c, vars, body) -> + (* are recursive defs interesting to expand ? *) + let () = + if not d.recursive then Dolmen.Std.Expr.Ty.alias_to c vars body + in + let () = Decl.add_definition env id (`Ty c) in + `Type_def (id, c, vars, body) | `Term_def (id, f, vars, args, body) -> - let () = Decl.add_definition id (`Term f) in + let () = Decl.add_definition env id (`Term f) in `Term_def (id, f, vars, args, body) ) l in @@ -825,20 +1484,24 @@ module Make(S : State_intf.Typer with type ty_state := ty_state) = struct let typecheck = S.typecheck - let terms st ?loc ?attr:_ l = - typing_wrap ?loc st ~f:(fun env -> - List.map (T.parse_term env) l - ) + let terms st ?loc ?attrs = function + | [] -> st, [] + | l -> + typing_wrap ?attrs ?loc st ~f:(fun env -> + List.map (T.parse_term env) l + ) - let formula st ?loc ?attr:_ ~goal:_ (t : Dolmen.Std.Term.t) = - typing_wrap ?loc st ~f:(fun env -> + let formula st ?loc ?attrs ~goal:_ (t : Dolmen.Std.Term.t) = + typing_wrap ?attrs ?loc st ~f:(fun env -> T.parse env t ) - let formulas st ?loc ?attr:_ l = - typing_wrap ?loc st ~f:(fun env -> - List.map (T.parse env) l - ) + let formulas st ?loc ?attrs = function + | [] -> st, [] + | l -> + typing_wrap ?attrs ?loc st ~f:(fun env -> + List.map (T.parse env) l + ) end @@ -851,15 +1514,23 @@ module type Pipe_res = Typer_intf.Pipe_res module Pipe (Expr : Expr_intf.S) + (Print : Expr_intf.Print + with type ty := Expr.ty + and type ty_var := Expr.ty_var + and type ty_cst := Expr.ty_cst + and type term := Expr.term + and type term_var := Expr.term_var + and type term_cst := Expr.term_cst + and type formula := Expr.formula) (State : State_intf.Typer_pipe) (Typer : Typer_intf.Pipe_arg with type state := State.t and type ty := Expr.ty and type ty_var := Expr.ty_var - and type ty_const := Expr.ty_const + and type ty_cst := Expr.ty_cst and type term := Expr.term and type term_var := Expr.term_var - and type term_const := Expr.term_const + and type term_cst := Expr.term_cst and type formula := Expr.formula) = struct @@ -876,8 +1547,8 @@ module Pipe } type def = [ - | `Type_def of Dolmen.Std.Id.t * Expr.ty_var list * Expr.ty - | `Term_def of Dolmen.Std.Id.t * Expr.term_const * Expr.ty_var list * Expr.term_var list * Expr.term + | `Type_def of Dolmen.Std.Id.t * Expr.ty_cst * Expr.ty_var list * Expr.ty + | `Term_def of Dolmen.Std.Id.t * Expr.term_cst * Expr.ty_var list * Expr.term_var list * Expr.term ] type defs = [ @@ -885,8 +1556,8 @@ module Pipe ] type decl = [ - | `Type_decl of Expr.ty_const - | `Term_decl of Expr.term_const + | `Type_decl of Expr.ty_cst + | `Term_decl of Expr.term_cst ] type decls = [ @@ -938,6 +1609,50 @@ module Pipe (* let tr implicit contents = { implicit; contents; } *) let simple id loc (contents: typechecked) = { id; loc; contents; } + let print_def fmt = function + | `Type_def (id, c, vars, body) -> + Format.fprintf fmt "@[<hov 2>type-def:@ %a: %a(%a) ->@ %a@]" + Dolmen.Std.Id.print id Print.ty_cst c + (Format.pp_print_list Print.ty_var) vars Print.ty body + | `Term_def (id, c, vars, args, body) -> + let pp_sep fmt () = Format.fprintf fmt ",@ " in + Format.fprintf fmt + "@[<hv 2>term-def{%a}:@ @[<hv>%a@] =@ @[<hov 2>fun (%a;@ %a) ->@ %a@]@]" + Dolmen.Std.Id.print id Print.term_cst c + (Format.pp_print_list ~pp_sep Print.ty_var) vars + (Format.pp_print_list ~pp_sep Print.term_var) args + Print.term body + + let print_decl fmt = function + | `Type_decl c -> + Format.fprintf fmt "@[<hov 2>type-decl:@ %a@]" Print.ty_cst c + | `Term_decl c -> + Format.fprintf fmt "@[<hov 2>term-decl:@ %a@]" Print.term_cst c + + let print_typechecked fmt t = + match (t : typechecked) with + | `Defs l -> + Format.fprintf fmt "@[<v 2>defs:@ %a@]" + (Format.pp_print_list print_def) l + | `Decls l -> + Format.fprintf fmt "@[<v 2>decls:@ %a@]" + (Format.pp_print_list print_decl) l + | `Hyp f -> + Format.fprintf fmt "@[<hov 2>hyp:@ %a@]" Print.formula f + | `Goal f -> + Format.fprintf fmt "@[<hov 2>goal:@ %a@]" Print.formula f + | `Clause l -> + Format.fprintf fmt "@[<v 2>clause:@ %a@]" + (Format.pp_print_list Print.formula) l + | `Solve l -> + Format.fprintf fmt "@[<hov 2>solve-assuming: %a@]" + (Format.pp_print_list Print.formula) l + | _ -> + Format.fprintf fmt "TODO" + + let print fmt ({ id; loc = _; contents; } : typechecked stmt) = + Format.fprintf fmt "%a:@ %a" + Dolmen.Std.Id.print id print_typechecked contents (* Typechecking *) (* ************************************************************************ *) @@ -946,11 +1661,11 @@ module Pipe let counter = ref 0 in (fun c -> match c.Dolmen.Std.Statement.id with - | { Dolmen.Std.Id.ns = Dolmen.Std.Id.Decl; name = "" } -> + | None -> let () = incr counter in let name = Format.sprintf "%s_%d" ref_name !counter in Dolmen.Std.Id.mk Dolmen.Std.Id.decl name - | id -> id) + | Some id -> id) let def_id = stmt_id "def" let decl_id = stmt_id "decl" @@ -972,7 +1687,7 @@ module Pipe ) vars in Dolmen.Std.Term.forall ~loc vars f - let normalize st c = + let normalize _st c = match c with (* Clauses without free variables can be typechecked as is without worry, but if there are free variables, these must @@ -980,7 +1695,6 @@ module Pipe | { S.descr = S.Clause l; _ } -> begin match fv_list l with | [] -> c - | free_vars -> let loc = c.S.loc in let f = match l with @@ -991,29 +1705,6 @@ module Pipe let f = quantify ~loc (fun _ -> None) free_vars f in { c with descr = S.Antecedent f; } end - (* Axioms and goals in alt-ergo have their type variables - implicitly quantified. *) - | { S.descr = S.Antecedent t; _ } - when State.input_lang st = Some Logic.Alt_ergo -> - begin match fv_list [t] with - | [] -> c - | free_vars -> - let loc = c.S.loc in - let var_ttype _ = Some (Dolmen.Std.Term.tType ~loc ()) in - let f = quantify ~loc var_ttype free_vars t in - { c with descr = S.Antecedent f; } - end - | { S.descr = S.Consequent t; _ } - when State.input_lang st = Some Logic.Alt_ergo -> - begin match fv_list [t] with - | [] -> c - | free_vars -> - let loc = c.S.loc in - let var_ttype _ = Some (Dolmen.Std.Term.tType ~loc ()) in - let f = quantify ~loc var_ttype free_vars t in - { c with descr = S.Consequent f; } - end - (* catch all *) | _ -> c @@ -1024,98 +1715,99 @@ module Pipe st, `Done () else match normalize st c with - (* Pack and includes. - These should have been filtered out before this point. - TODO: emit some kind of warning ? *) - | { S.descr = S.Pack _; _ } -> st, `Done () - | { S.descr = S.Include _; _ } -> st, `Done () - - (* Assertion stack Management *) - | { S.descr = S.Pop i; _ } -> - let st = Typer.pop st ~loc:c.S.loc i in - st, `Continue (simple (other_id c) c.S.loc (`Pop i)) - | { S.descr = S.Push i; _ } -> - let st = Typer.push st ~loc:c.S.loc i in - st, `Continue (simple (other_id c) c.S.loc (`Push i)) - | { S.descr = S.Reset_assertions; _ } -> - let st = Typer.reset st ~loc:c.S.loc () in - st, `Continue (simple (other_id c) c.S.loc `Reset_assertions) - - (* Plain statements - TODO: allow the `plain` function to return a meaningful value *) - | { S.descr = S.Plain t; _ } -> - st, `Continue (simple (other_id c) c.S.loc (`Plain t)) - - (* Hypotheses and goal statements *) - | { S.descr = S.Prove l; _ } -> - let st, l = Typer.formulas st ~loc:c.S.loc ?attr:c.S.attr l in - st, `Continue (simple (prove_id c) c.S.loc (`Solve l)) - - (* Hypotheses & Goals *) - | { S.descr = S.Clause l; _ } -> - let st, res = Typer.formulas st ~loc:c.S.loc ?attr:c.S.attr l in - let stmt : typechecked stmt = simple (hyp_id c) c.S.loc (`Clause res) in - st, `Continue stmt - | { S.descr = S.Antecedent t; _ } -> - let st, ret = Typer.formula st ~loc:c.S.loc ?attr:c.S.attr ~goal:false t in - let stmt : typechecked stmt = simple (hyp_id c) c.S.loc (`Hyp ret) in - st, `Continue stmt - | { S.descr = S.Consequent t; _ } -> - let st, ret = Typer.formula st ~loc:c.S.loc ?attr:c.S.attr ~goal:true t in - let stmt : typechecked stmt = simple (goal_id c) c.S.loc (`Goal ret) in - st, `Continue stmt - - (* Other set_logics should check whether corresponding plugins are activated ? *) - | { S.descr = S.Set_logic s; _ } -> - let st = Typer.set_logic st ~loc:c.S.loc s in - st, `Continue (simple (other_id c) c.S.loc (`Set_logic s)) - - (* Set/Get info *) - | { S.descr = S.Get_info s; _ } -> - st, `Continue (simple (other_id c) c.S.loc (`Get_info s)) - | { S.descr = S.Set_info t; _ } -> - st, `Continue (simple (other_id c) c.S.loc (`Set_info t)) - - (* Set/Get options *) - | { S.descr = S.Get_option s; _ } -> - st, `Continue (simple (other_id c) c.S.loc (`Get_option s)) - | { S.descr = S.Set_option t; _ } -> - st, `Continue (simple (other_id c) c.S.loc (`Set_option t)) - - (* Declarations and definitions *) - | { S.descr = S.Defs d; _ } -> - let st, l = Typer.defs st ~loc:c.S.loc ?attr:c.S.attr d in - let res : typechecked stmt = simple (def_id c) c.S.loc (`Defs l) in - st, `Continue (res) - | { S.descr = S.Decls l; _ } -> - let st, l = Typer.decls st ~loc:c.S.loc ?attr:c.S.attr l in - let res : typechecked stmt = simple (decl_id c) c.S.loc (`Decls l) in - st, `Continue (res) - - (* Smtlib's proof/model instructions *) - | { S.descr = S.Get_proof; _ } -> - st, `Continue (simple (other_id c) c.S.loc `Get_proof) - | { S.descr = S.Get_unsat_core; _ } -> - st, `Continue (simple (other_id c) c.S.loc `Get_unsat_core) - | { S.descr = S.Get_unsat_assumptions; _ } -> - st, `Continue (simple (other_id c) c.S.loc `Get_unsat_assumptions) - | { S.descr = S.Get_model; _ } -> - st, `Continue (simple (other_id c) c.S.loc `Get_model) - | { S.descr = S.Get_value l; _ } -> - let st, l = Typer.terms st ~loc:c.S.loc ?attr:c.S.attr l in - st, `Continue (simple (other_id c) c.S.loc (`Get_value l)) - | { S.descr = S.Get_assignment; _ } -> - st, `Continue (simple (other_id c) c.S.loc `Get_assignment) - (* Assertions *) - | { S.descr = S.Get_assertions; _ } -> - st, `Continue (simple (other_id c) c.S.loc `Get_assertions) - (* Misc *) - | { S.descr = S.Echo s; _ } -> - st, `Continue (simple (other_id c) c.S.loc (`Echo s)) - | { S.descr = S.Reset; _ } -> - st, `Continue (simple (other_id c) c.S.loc `Reset) - | { S.descr = S.Exit; _ } -> - st, `Continue (simple (other_id c) c.S.loc `Exit) + (* Pack and includes. + These should have been filtered out before this point. + TODO: emit some kind of warning ? *) + | { S.descr = S.Pack _; _ } -> st, `Done () + | { S.descr = S.Include _; _ } -> st, `Done () + + (* State&Assertion stack management *) + | { S.descr = S.Reset; _ } -> + let st = Typer.reset st ~loc:c.S.loc () in + st, `Continue (simple (other_id c) c.S.loc `Reset) + | { S.descr = S.Pop i; _ } -> + let st = Typer.pop st ~loc:c.S.loc i in + st, `Continue (simple (other_id c) c.S.loc (`Pop i)) + | { S.descr = S.Push i; _ } -> + let st = Typer.push st ~loc:c.S.loc i in + st, `Continue (simple (other_id c) c.S.loc (`Push i)) + | { S.descr = S.Reset_assertions; _ } -> + let st = Typer.reset_assertions st ~loc:c.S.loc () in + st, `Continue (simple (other_id c) c.S.loc `Reset_assertions) + + (* Plain statements + TODO: allow the `plain` function to return a meaningful value *) + | { S.descr = S.Plain t; _ } -> + st, `Continue (simple (other_id c) c.S.loc (`Plain t)) + + (* Hypotheses and goal statements *) + | { S.descr = S.Prove l; _ } -> + let st, l = Typer.formulas st ~loc:c.S.loc ~attrs:c.S.attrs l in + st, `Continue (simple (prove_id c) c.S.loc (`Solve l)) + + (* Hypotheses & Goals *) + | { S.descr = S.Clause l; _ } -> + let st, res = Typer.formulas st ~loc:c.S.loc ~attrs:c.S.attrs l in + let stmt : typechecked stmt = simple (hyp_id c) c.S.loc (`Clause res) in + st, `Continue stmt + | { S.descr = S.Antecedent t; _ } -> + let st, ret = Typer.formula st ~loc:c.S.loc ~attrs:c.S.attrs ~goal:false t in + let stmt : typechecked stmt = simple (hyp_id c) c.S.loc (`Hyp ret) in + st, `Continue stmt + | { S.descr = S.Consequent t; _ } -> + let st, ret = Typer.formula st ~loc:c.S.loc ~attrs:c.S.attrs ~goal:true t in + let stmt : typechecked stmt = simple (goal_id c) c.S.loc (`Goal ret) in + st, `Continue stmt + + (* Other set_logics should check whether corresponding plugins are activated ? *) + | { S.descr = S.Set_logic s; _ } -> + let st = Typer.set_logic st ~loc:c.S.loc s in + st, `Continue (simple (other_id c) c.S.loc (`Set_logic s)) + + (* Set/Get info *) + | { S.descr = S.Get_info s; _ } -> + st, `Continue (simple (other_id c) c.S.loc (`Get_info s)) + | { S.descr = S.Set_info t; _ } -> + st, `Continue (simple (other_id c) c.S.loc (`Set_info t)) + + (* Set/Get options *) + | { S.descr = S.Get_option s; _ } -> + st, `Continue (simple (other_id c) c.S.loc (`Get_option s)) + | { S.descr = S.Set_option t; _ } -> + st, `Continue (simple (other_id c) c.S.loc (`Set_option t)) + + (* Declarations and definitions *) + | { S.descr = S.Defs d; _ } -> + let st, l = Typer.defs st ~loc:c.S.loc ~attrs:c.S.attrs d in + let res : typechecked stmt = simple (def_id c) c.S.loc (`Defs l) in + st, `Continue (res) + | { S.descr = S.Decls l; _ } -> + let st, l = Typer.decls st ~loc:c.S.loc ~attrs:c.S.attrs l in + let res : typechecked stmt = simple (decl_id c) c.S.loc (`Decls l) in + st, `Continue (res) + + (* Smtlib's proof/model instructions *) + | { S.descr = S.Get_proof; _ } -> + st, `Continue (simple (other_id c) c.S.loc `Get_proof) + | { S.descr = S.Get_unsat_core; _ } -> + st, `Continue (simple (other_id c) c.S.loc `Get_unsat_core) + | { S.descr = S.Get_unsat_assumptions; _ } -> + st, `Continue (simple (other_id c) c.S.loc `Get_unsat_assumptions) + | { S.descr = S.Get_model; _ } -> + st, `Continue (simple (other_id c) c.S.loc `Get_model) + | { S.descr = S.Get_value l; _ } -> + let st, l = Typer.terms st ~loc:c.S.loc ~attrs:c.S.attrs l in + st, `Continue (simple (other_id c) c.S.loc (`Get_value l)) + | { S.descr = S.Get_assignment; _ } -> + st, `Continue (simple (other_id c) c.S.loc `Get_assignment) + (* Assertions *) + | { S.descr = S.Get_assertions; _ } -> + st, `Continue (simple (other_id c) c.S.loc `Get_assertions) + (* Misc *) + | { S.descr = S.Echo s; _ } -> + st, `Continue (simple (other_id c) c.S.loc (`Echo s)) + | { S.descr = S.Exit; _ } -> + st, `Continue (simple (other_id c) c.S.loc `Exit) in res diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/typer.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/typer.mli index a594f23a50c4e691696c7a11b00e91e79ea8b842..2e91b4cdf4f77f8050c29c138b620dcaa07831f2 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/typer.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/typer.mli @@ -15,11 +15,11 @@ module T : Dolmen_type.Tff.S with type 'a Tag.t = 'a Dolmen.Std.Tag.t and type Ty.t = Dolmen.Std.Expr.ty and type Ty.Var.t = Dolmen.Std.Expr.ty_var - and type Ty.Const.t = Dolmen.Std.Expr.ty_const + and type Ty.Const.t = Dolmen.Std.Expr.ty_cst and type T.t = Dolmen.Std.Expr.term and type T.Var.t = Dolmen.Std.Expr.term_var - and type T.Const.t = Dolmen.Std.Expr.term_const - and type T.Cstr.t = Dolmen.Std.Expr.term_const + and type T.Const.t = Dolmen.Std.Expr.term_cst + and type T.Cstr.t = Dolmen.Std.Expr.term_cst (** The raw type-checker module. *) @@ -44,24 +44,37 @@ module type Pipe_res = Typer_intf.Pipe_res module Pipe (Expr : Expr_intf.S) + (Print : Expr_intf.Print + with type ty := Expr.ty + and type ty_var := Expr.ty_var + and type ty_cst := Expr.ty_cst + and type term := Expr.term + and type term_var := Expr.term_var + and type term_cst := Expr.term_cst + and type formula := Expr.formula) (State : State_intf.Typer_pipe) (Typer : Pipe_arg with type state := State.t and type ty := Expr.ty and type ty_var := Expr.ty_var - and type ty_const := Expr.ty_const + and type ty_cst := Expr.ty_cst and type term := Expr.term and type term_var := Expr.term_var - and type term_const := Expr.term_const + and type term_cst := Expr.term_cst and type formula := Expr.formula) : Pipe_res with type state := State.t and type ty := Expr.ty and type ty_var := Expr.ty_var - and type ty_const := Expr.ty_const + and type ty_cst := Expr.ty_cst and type term := Expr.term and type term_var := Expr.term_var - and type term_const := Expr.term_const + and type term_cst := Expr.term_cst and type formula := Expr.formula +(* Warnings *) + +val almost_linear : string Report.Warning.t +(** Almost linear warning. *) + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/typer_intf.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/typer_intf.ml index 8bd1c054df5483ffa52bedc97b4123880f0140d6..aef524b11b3e1ae19caec3ac546878b5c1809aa0 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/typer_intf.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/loop/typer_intf.ml @@ -8,11 +8,11 @@ module type Pipe_types = sig type ty type ty_var - type ty_const + type ty_cst type term type term_var - type term_const + type term_cst type formula @@ -30,6 +30,9 @@ module type Pipe_arg = sig val reset : state -> ?loc:Dolmen.Std.Loc.t -> unit -> state + val reset_assertions : + state -> ?loc:Dolmen.Std.Loc.t -> unit -> state + val push : state -> ?loc:Dolmen.Std.Loc.t -> int -> state @@ -41,33 +44,33 @@ module type Pipe_arg = sig val defs : state -> ?loc:Dolmen.Std.Loc.t -> - ?attr:Dolmen.Std.Term.t -> Dolmen.Std.Statement.defs -> + ?attrs:Dolmen.Std.Term.t list -> Dolmen.Std.Statement.defs -> state * [ - | `Type_def of Dolmen.Std.Id.t * ty_var list * ty - | `Term_def of Dolmen.Std.Id.t * term_const * ty_var list * term_var list * term + | `Type_def of Dolmen.Std.Id.t * ty_cst * ty_var list * ty + | `Term_def of Dolmen.Std.Id.t * term_cst * ty_var list * term_var list * term ] list val decls : state -> ?loc:Dolmen.Std.Loc.t -> - ?attr:Dolmen.Std.Term.t -> Dolmen.Std.Statement.decls -> + ?attrs:Dolmen.Std.Term.t list -> Dolmen.Std.Statement.decls -> state * [ - | `Type_decl of ty_const - | `Term_decl of term_const + | `Type_decl of ty_cst + | `Term_decl of term_cst ] list val terms : state -> ?loc:Dolmen.Std.Loc.t -> - ?attr:Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> + ?attrs:Dolmen.Std.Term.t list -> Dolmen.Std.Term.t list -> state * term list val formula : state -> ?loc:Dolmen.Std.Loc.t -> - ?attr:Dolmen.Std.Term.t -> goal:bool -> Dolmen.Std.Term.t -> + ?attrs:Dolmen.Std.Term.t list -> goal:bool -> Dolmen.Std.Term.t -> state * formula val formulas : state -> ?loc:Dolmen.Std.Loc.t -> - ?attr:Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> + ?attrs:Dolmen.Std.Term.t list -> Dolmen.Std.Term.t list -> state * formula list end @@ -94,30 +97,25 @@ module type S = sig (** The type of type-checking warnings. *) type builtin_symbols - (** The type of builint symbols for the type-checker. *) + (** The type of builin symbols for the type-checker. *) include Pipe_arg with type state := state and type ty := Dolmen.Std.Expr.ty and type ty_var := Dolmen.Std.Expr.ty_var - and type ty_const := Dolmen.Std.Expr.ty_const + and type ty_cst := Dolmen.Std.Expr.ty_cst and type term := Dolmen.Std.Expr.term and type term_var := Dolmen.Std.Expr.term_var - and type term_const := Dolmen.Std.Expr.term_const + and type term_cst := Dolmen.Std.Expr.term_cst and type formula := Dolmen.Std.Expr.formula (** This signature includes the requirements to instantiate the {Pipes.Make: functor*) - val print_fragment : Format.formatter -> env * 'a fragment -> unit - (** Print a code fragment *) + val report_error : state -> error -> state + (** Report a typing error by calling the appropriate state function. *) - val report_error : Format.formatter -> error -> unit - (** Report a typing error on the given formatter. *) - - val report_warning : warning -> - (Format.formatter -> unit -> unit) option - (** Return a reporter for the given warning, if the warning should be - reported. *) + val report_warning : state -> warning -> state + (** Return a typing warning by calling the appropriate state function. *) val additional_builtins : builtin_symbols ref (** This reference can be modified to parse new builtin symbols. By default no @@ -141,8 +139,8 @@ module type Pipe_res = sig (** Wrapper around statements. It records implicit type declarations. *) type decl = [ - | `Type_decl of ty_const - | `Term_decl of term_const + | `Type_decl of ty_cst + | `Term_decl of term_cst ] (** The type of top-level type declarations. *) @@ -152,8 +150,8 @@ module type Pipe_res = sig (** A list of type declarations. *) type def = [ - | `Type_def of Dolmen.Std.Id.t * ty_var list * ty - | `Term_def of Dolmen.Std.Id.t * term_const * ty_var list * term_var list * term + | `Type_def of Dolmen.Std.Id.t * ty_cst * ty_var list * ty + | `Term_def of Dolmen.Std.Id.t * term_cst * ty_var list * term_var list * term ] (** The type of top-level type definitions. Type definitions are inlined and so can be ignored. *) @@ -208,6 +206,9 @@ module type Pipe_res = sig type typechecked = [ defs | decls | assume | solve | get_info | set_info | stack_control ] (** The type of statements after typechecking *) + val print : Format.formatter -> typechecked stmt -> unit + (** Printing funciton for typechecked statements. *) + val typecheck : state -> Dolmen.Std.Statement.t -> state * [ `Continue of typechecked stmt | `Done of unit ] (** Typechecks a statement. *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/diagnostic.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/diagnostic.ml index 9a2dc5079d8d0466aaf8d0fbeda7189623145892..0b877adcda48c6c8a777f6b5252ea648ba126c2a 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/diagnostic.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/diagnostic.ml @@ -1,13 +1,13 @@ (* This file is free software, part of dolmen. See file "LICENSE" for more information *) -type t = Lsp.Protocol.PublishDiagnostics.diagnostic +type t = Lsp.Types.Diagnostic.t let lsp_pos line character = - Lsp.Protocol.Position.{ line; character; } + Lsp.Types.Position.create ~line ~character -let lsp_range start_ end_ = - Lsp.Protocol.Range.{ start_; end_; } +let lsp_range start end_ = + Lsp.Types.Range.create ~start ~end_ let start_pos = lsp_pos 1 1 let start_range = lsp_range start_pos start_pos @@ -19,26 +19,18 @@ let range_of_loc = function (lsp_pos (l.start_line - 1) l.start_column) (lsp_pos (l.stop_line - 1) l.stop_column) -let warn ?loc msg = - Lsp.Protocol.PublishDiagnostics.{ - range = range_of_loc loc; - severity = Some Warning; - code = NoCode; - source = Some "dolmenls"; - message = msg; - relatedInformation = []; - tags = []; - } - -let error ?loc msg = - Lsp.Protocol.PublishDiagnostics.{ - range = range_of_loc loc; - severity = Some Error; - code = NoCode; - source = Some "dolmenls"; - message = msg; - relatedInformation = []; - tags = []; - } +let warn ?loc message = + Lsp.Types.Diagnostic.create () + ~range:(range_of_loc loc) + ~severity:Warning + ~source:"dolmenls" + ~message + +let error ?loc message = + Lsp.Types.Diagnostic.create () + ~range:(range_of_loc loc) + ~severity:Error + ~source:"dolmenls" + ~message diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/dune index 0f4ac6f0582d3fce36468adfc96a54fd881365b3..ef0a9d04be95291b6fc4595e5fc2050f6c7a977b 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/dune @@ -5,8 +5,10 @@ (libraries ; ocaml deps threads.posix - ; external deps - lsp gen + ; lwp deps + lsp linol linol-lwt + ; other external deps + gen logs logs.fmt ; dolmen deps dolmen dolmen.intf dolmen.std dolmen_type dolmen_loop diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/loop.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/loop.ml index 60fc28cdf47b9ce5de0fe478cf90f536c88ffe1e..e97d067f3a841716f3199aa5bcb684e96df72833 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/loop.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/loop.ml @@ -8,68 +8,45 @@ module Header = Dolmen_loop.Headers.Pipe(State) module Typer = struct module T = Dolmen_loop.Typer.Make(State) include T - include Dolmen_loop.Typer.Pipe(Dolmen.Std.Expr)(State)(T) + include Dolmen_loop.Typer.Pipe(Dolmen.Std.Expr)(Dolmen.Std.Expr.Print)(State)(T) end exception Finished of (State.t, string) result let handle_exn st = function - (* Simple error cases *) - | Pipeline.Sigint -> Error "user interrupt" - | Pipeline.Out_of_time -> Error "timeout" - | Pipeline.Out_of_space -> Error "memoryout" - (* Exn during parsing *) - | Dolmen.Std.Loc.Uncaught (loc, exn) -> - let file = State.input_file_loc st in - let loc = Dolmen.Std.Loc.loc file loc in - Error (Format.asprintf "%a: %s" - Dolmen.Std.Loc.fmt loc (Printexc.to_string exn)) - - (* lexing error *) - | Dolmen.Std.Loc.Lexing_error (loc, msg) -> - let file = State.input_file_loc st in - let loc = { Dolmen.Std.Loc.file; loc; } in - Ok (State.error ~loc st "Lexing error: %s" msg) - (* Parsing error *) - | Dolmen.Std.Loc.Syntax_error (loc, msg) -> - let file = State.input_file_loc st in - let loc = { Dolmen.Std.Loc.file; loc; } in - Ok (State.error ~loc st "Syntax error %t" msg) - (* Typing error *) - | Dolmen_loop.Typer.T.Typing_error ( - Dolmen_loop.Typer.T.Error (env, fragment, _err) as error) -> - let loc = Dolmen_loop.Typer.T.fragment_loc env fragment in - Ok (State.error ~loc st "Typing error: %a" Typer.report_error error) - - (* File not found *) - | State.File_not_found (loc, dir, f) -> - Ok (State.error ~loc st "File not found: '%s' in directory '%s'" f dir) - (* Input lang changed *) - | State.Input_lang_changed _ -> - Ok (State.error st "Language changed because of an include") + (* Internal exception used for jumping *) + | State.Error st -> Ok st + (* Simple error cases *) + | Dolmen_loop.Pipeline.Sigint -> Error "user interrupt" + | Dolmen_loop.Pipeline.Out_of_time -> Error "timeout" + | Dolmen_loop.Pipeline.Out_of_space -> Error "memoryout" (* Fallback *) | exn -> + let bt = Printexc.get_raw_backtrace () in Ok (State.error st - "Internal error, please report upstream: %s" - (Printexc.to_string exn)) + Dolmen_loop.Report.Error.uncaught_exn (exn, bt)) let finally st e = match e with | None -> st - | Some exn -> + | Some (bt,exn) -> + (* Print the backtrace if requested *) + if Printexc.backtrace_status () then + Printexc.print_raw_backtrace stdout bt; let res = handle_exn st exn in raise (Finished res) let process path opt_contents = let dir = Filename.dirname path in let file = Filename.basename path in + let reports = Dolmen_loop.Report.Conf.mk ~default:Enabled in let st = State.{ debug = false; + reports; loc_style = `Short; max_warn = max_int; cur_warn = 0; - context = false; time_limit = 0.; (* disable the timer *) size_limit = max_float; input_dir = dir; @@ -80,13 +57,12 @@ let process path opt_contents = | Some contents -> `Raw (file, contents) end; input_file_loc = Dolmen.Std.Loc.mk_file ""; - header_check = true; + header_check = false; header_licenses = []; header_lang_version = None; header_state = Dolmen_loop.Headers.empty; type_state = Dolmen_loop.Typer.new_state (); type_check = true; - type_strict = true; solve_state = []; export_lang = []; } in diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/main.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/main.ml index db9ef745ee4a60910305e75ddf4bb9f2454aa22e..8d6fc760bfd42b6f16bdfe00d5488290f6b89d0d 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/main.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/main.ml @@ -1,54 +1,17 @@ (* This file is free software, part of dolmen. See file "LICENSE" for more information *) -type listen = - | Stdio - | Server of string * int +let run () = + let s = new Server.dolmen_lsp_server in + let server = Linol_lwt.Jsonrpc2.create_stdio s in + let task = Linol_lwt.Jsonrpc2.run server in + match Linol_lwt.run task with + | () -> () + | exception e -> + let e = Printexc.to_string e in + Printf.eprintf "error: %s\n%!" e; + exit 1 -let run ic oc = - let section = Printf.sprintf "%s[%d]" Handler.section @@ Thread.id (Thread.self ()) in - Lsp.Logger.log ~section ~title:Debug "start lsp"; - Lsp.Rpc.start Handler.empty Handler.handler ic oc; - Lsp.Logger.log ~section ~title:Debug "stop lsp"; - () - -let main_tcp addr port = - let sock = Unix.socket ~cloexec:true Unix.PF_INET Unix.SOCK_STREAM 0 in - Unix.setsockopt_optint sock Unix.SO_LINGER None; - Unix.setsockopt sock Unix.SO_REUSEADDR true; - let inet_addr = Unix.inet_addr_of_string addr in - Unix.bind sock (Unix.ADDR_INET (inet_addr, port)); - Unix.listen sock 10; - while true do - let client_sock, _ = Unix.accept sock in - let ic = Unix.in_channel_of_descr client_sock in - let oc = Unix.out_channel_of_descr client_sock in - let _th = Thread.create (fun () -> run ic oc) () in - () - done; - () - -let main ~listen () : unit = - begin match listen with - | Stdio -> run stdin stdout - | Server (addr,port) -> - main_tcp addr port - end - -let () = - let stdio = ref true in - let host = ref "0.0.0.0" in - let port = ref 8854 in - let logfile = ref "" in - let opts = [ - "--host", Arg.Set_string host, " address to listen in"; - "--port", Arg.Set_int port, " port to listen on"; - "--stdio", Arg.Set stdio, " connection on stdio"; - "--tcp", Arg.Clear stdio, " connections on TCP"; - "--log", Arg.Set_string logfile, " log file"; - ] |> Arg.align in - Arg.parse opts (fun _ -> raise (Arg.Bad "no such arg")) "dolmenls [option*]"; - if !logfile = "" then logfile := Filename.temp_file "dolmenls-" ".log"; - let listen = if !stdio then Stdio else Server (!host, !port) in - Lsp.Logger.with_log_file (Some !logfile) (main ~listen) +(* Finally, we actually run the server *) +let () = run () diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/server.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/server.ml new file mode 100644 index 0000000000000000000000000000000000000000..df48111512b287978486ef8f1cfee73d6ed84c74 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/server.ml @@ -0,0 +1,41 @@ +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +(* Lsp server class *) +(* ************************************************************************ *) + +class dolmen_lsp_server = + object(self) + inherit Linol_lwt.Jsonrpc2.server + + (* one env per document *) + val buffers: (Lsp.Types.DocumentUri.t, State.t) Hashtbl.t = Hashtbl.create 32 + + method private _on_doc + ~(notify_back:Linol_lwt.Jsonrpc2.notify_back) + (uri:Lsp.Types.DocumentUri.t) (contents:string) = + (* TODO: unescape uri/translate it to a correct path ? *) + match Loop.process uri (Some contents) with + | Ok state -> + let diags = state.solve_state in + Hashtbl.replace buffers uri state; + notify_back#send_diagnostic diags + | Error msg -> + Linol_lwt.Jsonrpc2.IO.failwith ( + Format.asprintf "Internal dolmen error: %s" msg + ) + + method on_notif_doc_did_open ~notify_back d ~content = + self#_on_doc ~notify_back d.uri content + + method on_notif_doc_did_change ~notify_back d _c ~old_content:_old ~new_content = + self#_on_doc ~notify_back d.uri new_content + + method on_notif_doc_did_close ~notify_back:_ d = + Hashtbl.remove buffers d.uri; + Linol_lwt.Jsonrpc2.IO.return () + + end + + + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/state.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/state.ml index 536c6154ff7e0d97ce90095979adb356f683be3f..f87ed407bf64c1832a7a108000e37eb9850b9453 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/state.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/lsp/state.ml @@ -8,6 +8,7 @@ include Dolmen_loop.State type solve_state = Diagnostic.t list type t = solve_state state +exception Error of t (* Warnings *) (* ************************************************************************* *) @@ -22,14 +23,28 @@ let full_loc t = function let file = input_file_loc t in Dolmen.Std.Loc.loc file Dolmen.Std.Loc.no_loc -let warn ?loc t format = +let warn ?loc t warn payload = let loc = full_loc t loc in - Format.kasprintf (fun msg -> + (* Flush the str formatter to clear any unflushed leftover *) + let _ = Format.flush_str_formatter () in + (* Set the str formatter out functions to not emit newline characters *) + let str_out_functions = + Format.pp_get_formatter_out_functions Format.str_formatter () + in + let () = + Format.pp_set_formatter_out_functions Format.str_formatter { + str_out_functions with + out_newline = (fun () -> str_out_functions.out_spaces 1); + out_indent = (fun _ -> ()); + } + in + Format.kfprintf (fun _ -> + let msg = Format.flush_str_formatter () in let d = Diagnostic.warn ~loc msg in - add_diag d t) format - + add_diag d t) Format.str_formatter "%a" + Dolmen_loop.Report.Warning.print (warn, payload) -let error ?loc t format = +let error ?loc t err payload = let loc = full_loc t loc in (* Flush the str formatter to clear any unflushed leftover *) let _ = Format.flush_str_formatter () in @@ -48,6 +63,6 @@ let error ?loc t format = Format.kfprintf (fun _ -> let msg = Format.flush_str_formatter () in let d = Diagnostic.error ~loc msg in - add_diag d t - ) Format.str_formatter ("@[<h>" ^^ format ^^ "@]") + add_diag d t) Format.str_formatter "%a" + Dolmen_loop.Report.Error.print (err, payload) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/builtin.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/builtin.ml new file mode 100644 index 0000000000000000000000000000000000000000..55d1c14635593609c9598bafd6fdf34ba9726112 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/builtin.ml @@ -0,0 +1,192 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +(* Main Typedef *) +(* ************************************************************************* *) + +type 'a t = .. +constraint 'a = < .. > +(* Extensible variant type for builtin operations. + Parameterized over the type of variables, constants, and terms. *) + +(* Base builtins *) +(* ************************************************************************* *) + +type _ t += Base + +type _ t += + | Wildcard : { ty : 'ty option ref; } -> < ty : 'ty ; .. > t + +type _ t += Kind | Type | Prop + +type _ t += | Unit | Univ + +type _ t += Coercion + +(* Boolean builtins *) +(* ************************************************************************* *) + +type _ t += + | True | False + | Equal | Distinct + | Neg | And | Or + | Nand | Nor | Xor + | Imply | Implied | Equiv + +type _ t += Ite + +type _ t += Pi | Sigma + +(* Algebraic datatype builtins *) +(* ************************************************************************* *) + +type _ t += + | Tester : + { adt: 'ty_cst; case: int; cstr : 'term_cst; } -> + < ty_cst : 'ty_cst ; term_cst : 'term_cst; .. > t + | Constructor : + { adt : 'ty_cst; case : int; } -> + < ty_cst : 'ty_cst ; .. > t + | Destructor : + { adt : 'ty_cst; case : int; cstr : 'term_cst; field: int; } -> + < ty_cst : 'ty_cst ; term_cst : 'term_cst; .. > t + + +(* Arithmetic builtins *) +(* ************************************************************************* *) + +type _ t += + | Int | Integer of string + | Rat | Rational of string + | Real | Decimal of string + | Lt | Leq | Gt | Geq + | Minus | Add | Sub | Mul | Pow + | Div + | Div_e | Modulo_e + | Div_t | Modulo_t + | Div_f | Modulo_f + | Abs | Divisible + | Is_int | Is_rat + | Floor | Floor_to_int + | Ceiling | Truncate | Round + +(* arrays *) +type _ t += + | Array | Store | Select + +(* Bitvectors *) +type _ t += + | Bitv of int + | Bitvec of string + | Bitv_concat + | Bitv_extract of int * int + | Bitv_repeat + | Bitv_zero_extend + | Bitv_sign_extend + | Bitv_rotate_right of int + | Bitv_rotate_left of int + | Bitv_not | Bitv_and | Bitv_or + | Bitv_nand | Bitv_nor + | Bitv_xor | Bitv_xnor + | Bitv_comp + | Bitv_neg | Bitv_add | Bitv_sub | Bitv_mul + | Bitv_udiv | Bitv_urem + | Bitv_sdiv | Bitv_srem | Bitv_smod + | Bitv_shl | Bitv_lshr | Bitv_ashr + | Bitv_ult | Bitv_ule + | Bitv_ugt | Bitv_uge + | Bitv_slt | Bitv_sle + | Bitv_sgt | Bitv_sge + +(* Floats *) +type _ t += + | Float of int * int + | RoundingMode + | Fp of int * int + | RoundNearestTiesToEven + | RoundNearestTiesToAway + | RoundTowardPositive + | RoundTowardNegative + | RoundTowardZero + | Plus_infinity of int * int + | Minus_infinity of int * int + | Plus_zero of int * int + | Minus_zero of int * int + | NaN of int * int + | Fp_abs of int * int + | Fp_neg of int * int + | Fp_add of int * int + | Fp_sub of int * int + | Fp_mul of int * int + | Fp_div of int * int + | Fp_fma of int * int + | Fp_sqrt of int * int + | Fp_rem of int * int + | Fp_roundToIntegral of int * int + | Fp_min of int * int + | Fp_max of int * int + | Fp_leq of int * int + | Fp_lt of int * int + | Fp_geq of int * int + | Fp_gt of int * int + | Fp_eq of int * int + | Fp_isNormal of int * int + | Fp_isSubnormal of int * int + | Fp_isZero of int * int + | Fp_isInfinite of int * int + | Fp_isNaN of int * int + | Fp_isNegative of int * int + | Fp_isPositive of int * int + | Ieee_format_to_fp of int * int + | Fp_to_fp of int * int * int * int + | Real_to_fp of int * int + | Sbv_to_fp of int * int * int + | Ubv_to_fp of int * int * int + | To_ubv of int * int * int + | To_sbv of int * int * int + | To_real of int * int + +(* Strings *) +type _ t += + | String + | Str of string + | Str_length + | Str_at + | Str_to_code + | Str_of_code + | Str_is_digit + | Str_to_int + | Str_of_int + | Str_concat + | Str_sub + | Str_index_of + | Str_replace + | Str_replace_all + | Str_replace_re + | Str_replace_re_all + | Str_is_prefix + | Str_is_suffix + | Str_contains + | Str_lexicographic_strict + | Str_lexicographic_large + | Str_in_re + +(* String Regular languages *) +type _ t += + | String_RegLan + | Re_empty + | Re_all + | Re_allchar + | Re_of_string + | Re_range + | Re_concat + | Re_union + | Re_inter + | Re_star + | Re_cross + | Re_complement + | Re_diff + | Re_option + | Re_power of int + | Re_loop of int * int + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/builtin.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/builtin.mli new file mode 100644 index 0000000000000000000000000000000000000000..328e49b28119fb854832a6ae6577653548bdafde --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/builtin.mli @@ -0,0 +1,696 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +(** This module defines the builtins that are defined by Dolmen. + + Builtins are particularly used in typed expressions see {!Dolmen.Std.Expr}, + in order to give more information about constants which have builtin + semantics. + + Users are encouraged to match builtins rather than specific symbols when + inspecting typed expressions, as this basically allows to match on the + semantics of an identifier rather than matching on the syntaxic value of an + identifier. For instance, equality can take an arbitrary number of + arguments, and thus in order to have well-typed terms, each arity of + equality gives rise to a different symbol (because the symbol's type + depends on the arity desired), but all these symbols have the [Equal] + builtin. + + In the following we will use pseudo-code to describe the arity and actual + type associated to builtins. These will follow ocaml's notation for types + with an additional syntax using dots for arbitrary arity. Some examples: + - [ttype] is a type constant + - [ttype -> ttype] is a type constructor (e.g. [list]) + - [int] is a constant of type [int] + - [float -> int] is a unary function + - ['a. 'a -> 'a] is a polymorphic unary function + - ['a. 'a -> ... -> Prop] describes a family of functions that take + a type and then an arbitrary number of arguments of that type, and + return a proposition (this is for instance the type of equality). + + Additionally, due to some languages having overloaded operators, and in + order to not have too verbose names, some of these builtins may have + overloaded signtures, such as comparisons on numbers which can operate on + integers, rationals, or reals. Note that arbitrary arity operators (well + family of operators) can be also be seen as overloaded operators. + Overloaded types (particularly for numbers) are written: + - [{a=(Int|Rational|Real)} a -> a -> Prop], where the notable difference + with polymorphic functions is that functions of this type does not + take a type argument. +*) + + +(** {2 Type definition} *) +(* ************************************************************************* *) + +type 'a t = .. +constraint 'a = < .. > +(* Extensible variant type for builtin operations. + Parameterized over an object type that will record the type used for + various constants and/or terms. *) + + +(** {2 Base Builtins} *) +(* ************************************************************************* *) + +type _ t += + | Base + (** The base builtin; it is the default builtin for identifiers. *) + +type _ t += + | Wildcard : { ty : 'ty option ref; } -> < ty : 'ty ; .. > t + (** Wildcards, currently used internally to represent implicit type + variables during type-checking. *) + +type _ t += + | Kind + (** Used for the type of [Type]. + It is an error to try and access the type of kind. *) + | Type + (** Builtin used to represent the type of types. *) + | Prop + (** [Prop: ttype]: the builtin type constant for the type of + propositions / booleans. *) + +type _ t += + | Unit + (** The unit type, which has only one element (named void). *) + | Univ + (** [Univ: ttype]: a builtin type constant used for languages + with a default type for elements (such as tptp's `$i`). *) + +type _ t += + | Coercion + (** [Coercion: 'a 'b. 'a -> 'b]: + Coercion/cast operator, i.e. allows to cast values of some type to + another type. This is a polymorphic operator that takes two type + arguments [a] and [b], a value of type [a], and returns a value of + type [b]. + The interpretation/semantics of this cast can remain + up to the user. This operator is currently mainly used to cast + numeric types when this transormation is exact (i.e. an integer + casted into a rational, which is always possible and exact, + or the cast of a rational into an integer, as long as the cast is + guarded by a clause verifying the rational is an integer). *) + + +(** {2 Boolean Builtins} *) +(* ************************************************************************* *) + +type _ t += + | True (** [True: Prop]: the [true] proposition. *) + | False (** [False: Prop]: the [false] proposition. *) + | Equal (** [Equal: 'a. 'a -> ... -> Prop]: equality beetween values. *) + | Distinct (** [Distinct: 'a. 'a -> ... -> Prop]: pairwise dis-equality beetween arguments. *) + | Neg (** [Neg: Prop -> Prop]: propositional negation. *) + | And (** [And: Prop -> Prop]: propositional conjunction. *) + | Or (** [Or: Prop -> ... -> Prop]: propositional disjunction. *) + | Nand (** [Nand: Prop -> Prop -> Prop]: propositional negated conjunction. *) + | Nor (** [Nor: Prop -> Prop -> Prop]: propositional negated disjunction. *) + | Xor (** [Xor: Prop -> Prop -> Prop]: ppropositional exclusive disjunction. *) + | Imply (** [Imply: Prop -> Prop -> Prop]: propositional implication. *) + | Implied (** [Implied: Prop -> Prop -> Prop]: reverse propositional implication. *) + | Equiv (** [Equiv: Prop -> Prop -> Prop]: propositional Equivalence. *) + +type _ t += + | Ite + (** [Ite: 'a. Prop -> 'a -> 'a -> 'a]: branching operator. *) + +type _ t += + | Pi + (** [Pi: 'a. ('a -> Prop) -> Prop]: higher-order encoding of universal quantification. *) + | Sigma + (** [Sigma: 'a. ('a -> Prop) -> Prop]: higher-order encoding of existencial quantification. *) + + +(** {2 Algebraic datatype Builtins} *) +(* ************************************************************************* *) + +type _ t += + | Tester : + { adt: 'ty_cst; case: int; cstr : 'term_cst; } -> + < ty_cst : 'ty_cst ; term_cst : 'term_cst; .. > t + (** [Tester { adt; cstr; case; }] is the tester of the case-th constructor + of type [adt] which should be [cstr]. *) + | Constructor : + { adt : 'ty_cst; case : int; } -> + < ty_cst : 'ty_cst ; .. > t + (** [Constructor { adt; case}] is the case-th constructor of the algebraic + datatype defined by [adt]. *) + | Destructor : + { adt : 'ty_cst; case : int; cstr : 'term_cst; field: int; } -> + < ty_cst : 'ty_cst ; term_cst : 'term_cst; .. > t + (** [Destructor { adt; cstr; case; field; }] is the destructor returning the + field-th argument of the case-th constructor of type [adt] which should + be [cstr]. *) + + +(** {2 Arithmetic Builtins} *) +(* ************************************************************************* *) + +type _ t += + | Int + (** [Int: ttype] the type for signed integers of arbitrary precision. *) + | Integer of string + (** [Integer s: Int]: integer litteral. The string [s] should be the + decimal representation of an integer with arbitrary precision (hence + the use of strings rather than the limited precision [int]). *) + | Rat + (** [Rat: ttype] the type for signed rationals. *) + | Rational of string + (** [Rational s: Rational]: rational litteral. The string [s] should be + the decimal representation of a rational (see the various languages + spec for more information). *) + | Real + (** [Real: ttype] the type for signed reals. *) + | Decimal of string + (** [Decimal s: Real]: real litterals. The string [s] should be a + floating point representatoin of a real. Not however that reals + here means the mathematical abstract notion of real numbers, including + irrational, non-algebric numbers, and is thus not restricted to + floating point numbers, although these are the only litterals + supported. *) + | Lt + (** [Lt: {a=(Int|Rational|Real)} a -> a -> Prop]: + strict comparison (less than) on numbers + (whether integers, rationals, or reals). *) + | Leq + (** [Leq:{a=(Int|Rational|Real)} a -> a -> Prop]: + large comparison (less or equal than) on numbers + (whether integers, rationals, or reals). *) + | Gt + (** [Gt:{a=(Int|Rational|Real)} a -> a -> Prop]: + strict comparison (greater than) on numbers + (whether integers, rationals, or reals). *) + | Geq + (** [Geq:{a=(Int|Rational|Real)} a -> a -> Prop]: + large comparison (greater or equal than) on numbers + (whether integers, rationals, or reals). *) + | Minus + (** [Minus:{a=(Int|Rational|Real)} a -> a]: + arithmetic unary negation/minus on numbers + (whether integers, rationals, or reals). *) + | Add + (** [Add:{a=(Int|Rational|Real)} a -> a -> a]: + arithmetic addition on numbers + (whether integers, rationals, or reals). *) + | Sub + (** [Sub:{a=(Int|Rational|Real)} a -> a -> a]: + arithmetic substraction on numbers + (whether integers, rationals, or reals). *) + | Mul + (** [Mul:{a=(Int|Rational|Real)} a -> a -> a]: + arithmetic multiplication on numbers + (whether integers, rationals, or reals). *) + | Pow + (** [Pow:{a=(Int|Rational|Real)} a -> a -> a]: + arithmetic exponentiation on numbers + (whether integers, rationals, or reals). *) + | Div + (** [Div:{a=(Rational|Real)} a -> a -> a]: + arithmetic exact division on numbers + (rationals, or reals, but **not** integers). *) + | Div_e + (** [Div_e:{a=(Int|Rational|Real)} a -> a -> a]: + arithmetic integer euclidian quotient + (whether integers, rationals, or reals). + If D is positive then [Div_e (N,D)] is the floor + (in the type of N and D) of the real division [N/D], + and if D is negative then [Div_e (N,D)] is the ceiling + of [N/D]. *) + | Div_t + (** [Div_t:{a=(Int|Rational|Real)} a -> a -> a]: + arithmetic integer truncated quotient + (whether integers, rationals, or reals). + [Div_t (N,D)] is the truncation of the real + division [N/D]. *) + | Div_f + (** [Div_f:{a=(Int|Rational|Real)} a -> a -> a]: + arithmetic integer floor quotient + (whether integers, rationals, or reals). + [Div_t (N,D)] is the floor of the real + division [N/D]. *) + | Modulo_e + (** [Modulo_e:{a=(Int|Rational|Real)} a -> a -> a]: + arithmetic integer euclidian remainder + (whether integers, rationals, or reals). + It is defined by the following equation: + [Div_e (N, D) * D + Modulo(N, D) = N]. *) + | Modulo_t + (** [Modulo_t:{a=(Int|Rational|Real)} a -> a -> a]: + arithmetic integer truncated remainder + (whether integers, rationals, or reals). + It is defined by the following equation: + [Div_t (N, D) * D + Modulo_t(N, D) = N]. *) + | Modulo_f + (** [Modulo_f:{a=(Int|Rational|Real)} a -> a -> a]: + arithmetic integer floor remainder + (whether integers, rationals, or reals). + It is defined by the following equation: + [Div_f (N, D) * D + Modulo_f(N, D) = N]. *) + | Abs + (** [Abs: Int -> Int]: + absolute value on integers. *) + | Divisible + (** [Divisible: Int -> Int -> Prop]: + divisibility predicate on integers. Smtlib restricts + applications of this predicate to have a litteral integer + for the divisor/second argument. *) + | Is_int + (** [Is_int:{a=(Int|Rational|Real)} a -> Prop]: + integer predicate for numbers: is the given number + an integer. *) + | Is_rat + (** [Is_rat:{a=(Int|Rational|Real)} a -> Prop]: + rational predicate for numbers: is the given number + an rational. *) + | Floor + (** [Floor:{a=(Int|Rational|Real)} a -> a]: + floor function on numbers, defined in tptp as + the largest integer not greater than the argument. *) + | Floor_to_int + (** [Floor_to_int:{a=(Rational|Real)} a -> Int]: + floor and conversion to integers in a single funciton. + Should return the greatest integer [i] such that the + rational or real intepretation of [i] is less than, or + equal to, the argument. *) + | Ceiling + (** [Ceiling:{a=(Int|Rational|Real)} a -> a]: + ceiling function on numbers, defined in tptp as + the smallest integer not less than the argument. *) + | Truncate + (** [Truncate:{a=(Int|Rational|Real)} a -> a]: + ceiling function on numbers, defined in tptp as + the nearest integer value with magnitude not greater + than the absolute value of the argument. *) + | Round + (** [Round:{a=(Int|Rational|Real)} a -> a]: + rounding function on numbers, defined in tptp as + the nearest intger to the argument; when the argument + is halfway between two integers, the nearest even integer + to the argument. *) + + +(** {2 Arrays Builtins} *) +(* ************************************************************************* *) + +type _ t += + | Array + (** [Array: ttype -> ttype -> ttype]: the type constructor for + polymorphic functional arrays. An [(src, dst) Array] is an array + from expressions of type [src] to expressions of type [dst]. + Typically, such arrays are immutables. *) + | Store + (** [Store: 'a 'b. ('a, 'b) Array -> 'a -> 'b -> ('a, 'b) Array]: + store operation on arrays. Returns a new array with the key bound + to the given value (shadowing the previous value associated to + the key). *) + | Select + (** [Select: 'a 'b. ('a, 'b) Array -> 'a -> 'b]: + select operation on arrays. Returns the value associated to the + given key. Typically, functional arrays are complete, i.e. all + keys are mapped to a value. *) + + +(** {2 Bitvectors Builtins} *) +(* ************************************************************************* *) + +type _ t += + | Bitv of int + (** [Bitv n: ttype]: type constructor for bitvectors of length [n]. *) + | Bitvec of string + (** [Bitvec s: Bitv]: bitvector litteral. The string [s] should + be a binary representation of bitvectors using characters + ['0'], and ['1'] (lsb last) *) + | Bitv_concat + (** [Bitv_concat: Bitv(n) -> Bitv(m) -> Bitv(n+m)]: + concatenation operator on bitvectors. *) + | Bitv_extract of int * int + (** [Bitv_extract(i, j): Bitv(n) -> Bitv(i - j + 1)]: + bitvector extraction, from index [j] up to [i] (both included). *) + | Bitv_repeat + (** [Bitv_repeat: Bitv(n) -> Bitv(n*k)]: + bitvector repeatition. NOTE: inlcude [k] in the builtin ? *) + | Bitv_zero_extend + (** [Bitv_zero_extend: Bitv(n) -> Bitv(n + k)]: + zero extension for bitvectors (produces a representation of the + same unsigned integer). *) + | Bitv_sign_extend + (** [Bitv_sign_extend: Bitv(n) -> Bitv(n + k)]: + sign extension for bitvectors ((produces a representation of the + same signed integer). *) + | Bitv_rotate_right of int + (** [Bitv_rotate_right(i): Bitv(n) -> Bitv(n)]: + logical rotate right for bitvectors by [i]. *) + | Bitv_rotate_left of int + (** [Bitv_rotate_left(i): Bitv(n) -> Bitv(n)]: + logical rotate left for bitvectors by [i]. *) + | Bitv_not + (** [Bitv_not: Bitv(n) -> Bitv(n)]: + bitwise negation for bitvectors. *) + | Bitv_and + (** [Bitv_and: Bitv(n) -> Bitv(n) -> Bitv(n)]: + bitwise conjunction for bitvectors. *) + | Bitv_or + (** [bitv_or: Bitv(n) -> Bitv(n) -> Bitv(n)]: + bitwise disjunction for bitvectors. *) + | Bitv_nand + (** [Bitv_nand: Bitv(n) -> Bitv(n) -> Bitv(n)]: + bitwise negated conjunction for bitvectors. + [Bitv_nand s t] abbreviates [Bitv_not (Bitv_and s t))]. *) + | Bitv_nor + (** [Bitv_nor: Bitv(n) -> Bitv(n) -> Bitv(n)]: + bitwise negated disjunction for bitvectors. + [Bitv_nor s t] abbreviates [Bitv_not (Bitv_or s t))]. *) + | Bitv_xor + (** [Bitv_xor: Bitv(n) -> Bitv(n) -> Bitv(n)]: + bitwise exclusive disjunction for bitvectors. + [Bitv_xor s t] abbreviates + [Bitv_or (Bitv_and s (Bitv_not t)) + (Bitv_and (Bitv_not s) t) ]. *) + | Bitv_xnor + (** [Bitv_xnor: Bitv(n) -> Bitv(n) -> Bitv(n)]: + bitwise negated exclusive disjunction for bitvectors. + [Bitv_xnor s t] abbreviates + [Bitv_or (Bitv_and s t) + (Bitv_and (Bitv_not s) (Bitv_not t))]. *) + | Bitv_comp + (** [Bitv_comp: Bitv(n) -> Bitv(n) -> Bitv(1)]: + Returns the constant bitvector ["1"] is all bits are equal, + and the bitvector ["0"] if not. *) + | Bitv_neg + (** [Bitv_neg: Bitv(n) -> Bitv(n)]: + 2's complement unary minus. *) + | Bitv_add + (** [Bitv_add: Bitv(n) -> Bitv(n) -> Bitv(n)]: + addition modulo 2^n. *) + | Bitv_sub + (** [Bitv_sub: Bitv(n) -> Bitv(n) -> Bitv(n)]: + 2's complement subtraction modulo 2^n. *) + | Bitv_mul + (** [Bitv_mul: Bitv(n) -> Bitv(n) -> Bitv(n)]: + multiplication modulo 2^n. *) + | Bitv_udiv + (** [Bitv_udiv: Bitv(n) -> Bitv(n) -> Bitv(n)]: + unsigned division, truncating towards 0. *) + | Bitv_urem + (** [Bitv_urem: Bitv(n) -> Bitv(n) -> Bitv(n)]: + unsigned remainder from truncating division. *) + | Bitv_sdiv + (** [Bitv_sdiv: Bitv(n) -> Bitv(n) -> Bitv(n)]: + 2's complement signed division. *) + | Bitv_srem + (** [Bitv_srem: Bitv(n) -> Bitv(n) -> Bitv(n)]: + 2's complement signed remainder (sign follows dividend). *) + | Bitv_smod + (** [Bitv_smod: Bitv(n) -> Bitv(n) -> Bitv(n)]: + 2's complement signed remainder (sign follows divisor). *) + | Bitv_shl + (** [Bitv_shl: Bitv(n) -> Bitv(n) -> Bitv(n)]: + shift left (equivalent to multiplication by 2^x where x + is the value of the second argument). *) + | Bitv_lshr + (** [Bitv_lshr: Bitv(n) -> Bitv(n) -> Bitv(n)]: + logical shift right (equivalent to unsigned division by 2^x, + where x is the value of the second argument). *) + | Bitv_ashr + (** [Bitv_ashr: Bitv(n) -> Bitv(n) -> Bitv(n)]: + Arithmetic shift right, like logical shift right except that + the most significant bits of the result always copy the most + significant bit of the first argument. *) + | Bitv_ult + (** [Bitv_ult: Bitv(n) -> Bitv(n) -> Prop]: + binary predicate for unsigned less-than. *) + | Bitv_ule + (** [Bitv_ule: Bitv(n) -> Bitv(n) -> Prop]: + binary predicate for unsigned less than or equal. *) + | Bitv_ugt + (** [Bitv_ugt: Bitv(n) -> Bitv(n) -> Prop]: + binary predicate for unsigned greater-than. *) + | Bitv_uge + (** [Bitv_uge: Bitv(n) -> Bitv(n) -> Prop]: + binary predicate for unsigned greater than or equal. *) + | Bitv_slt + (** [Bitv_slt: Bitv(n) -> Bitv(n) -> Prop]: + binary predicate for signed less-than. *) + | Bitv_sle + (** [Bitv_sle: Bitv(n) -> Bitv(n) -> Prop]: + binary predicate for signed less than or equal. *) + | Bitv_sgt + (** [Bitv_sgt: Bitv(n) -> Bitv(n) -> Prop]: + binary predicate for signed greater-than. *) + | Bitv_sge + (** [Bitv_sge: Bitv(n) -> Bitv(n) -> Prop]: + binary predicate for signed greater than or equal. *) + + +(** {2 Floats Builtins} *) +(* ************************************************************************* *) + +type _ t += + | RoundingMode + (** [RoundingMode: ttype]: type for enumerated type of rounding modes. *) + | RoundNearestTiesToEven + (** [RoundNearestTiesToEven : RoundingMode]: *) + | RoundNearestTiesToAway + (** [RoundNearestTiesToAway : RoundingMode]: *) + | RoundTowardPositive + (** [RoundTowardPositive : RoundingMode *) + | RoundTowardNegative + (** [RoundTowardNegative : RoundingMode *) + | RoundTowardZero + (** [RoundTowardZero : RoundingMode *) + | Float of int * int + (** [Float(e,s): ttype]: type constructor for floating point of exponent of + size [e] and significand of size [s] (hidden bit included). Those size are + greater than 1 *) + | Fp of int * int + (** [Fp(e, s): Bitv(1) -> Bitv(e) -> Bitv(s-1) -> Fp(e,s)]: bitvector literal. + The IEEE-format is used for the conversion [sb^se^ss]. + All the NaN are converted to the same value. *) + | Plus_infinity of int * int + (** [Plus_infinity(s,e) : Fp(s,e)] *) + | Minus_infinity of int * int + (** [Minus_infinity(s,e) : Fp(s,e)] *) + | Plus_zero of int * int + (** [Plus_zero(s,e) : Fp(s,e)] *) + | Minus_zero of int * int + (** [Minus_zero(s,e) : Fp(s,e)] *) + | NaN of int * int + (** [NaN(s,e) : Fp(s,e)] *) + | Fp_abs of int * int + (** [Fp_abs(s,e): Fp(s,e) -> Fp(s,e)]: absolute value *) + | Fp_neg of int * int + (** [Fp_neg(s,e): Fp(s,e) -> Fp(s,e)]: negation *) + | Fp_add of int * int + (** [Fp_add(s,e): RoundingMode -> Fp(s,e) -> Fp(s,e) -> Fp(s,e)]: addition *) + | Fp_sub of int * int + (** [Fp_sub(s,e): RoundingMode -> Fp(s,e) -> Fp(s,e) -> Fp(s,e)]: subtraction *) + | Fp_mul of int * int + (** [Fp_mul(s,e): RoundingMode -> Fp(s,e) -> Fp(s,e) -> Fp(s,e)]: multiplication *) + | Fp_div of int * int + (** [Fp_div(s,e): RoundingMode -> Fp(s,e) -> Fp(s,e) -> Fp(s,e)]: division *) + | Fp_fma of int * int + (** [Fp_fma(s,e): RoundingMode -> Fp(s,e) -> Fp(s,e)]: fuse multiply add *) + | Fp_sqrt of int * int + (** [Fp_sqrt(s,e): RoundingMode -> Fp(s,e) -> Fp(s,e)]: square root *) + | Fp_rem of int * int + (** [Fp_rem(s,e): Fp(s,e) -> Fp(s,e) -> Fp(s,e)]: remainder *) + | Fp_roundToIntegral of int * int + (** [Fp_roundToIntegral(s,e): RoundingMode -> Fp(s,e) -> Fp(s,e)]: round to integral *) + | Fp_min of int * int + (** [Fp_min(s,e): Fp(s,e) -> Fp(s,e) -> Fp(s,e)]: minimum *) + | Fp_max of int * int + (** [Fp_max(s,e): Fp(s,e) -> Fp(s,e) -> Fp(s,e)]: maximum *) + | Fp_leq of int * int + (** [Fp_leq(s,e): Fp(s,e) -> Fp(s,e) -> Prop]: IEEE less or equal *) + | Fp_lt of int * int + (** [Fp_lt(s,e): Fp(s,e) -> Fp(s,e) -> Prop]: IEEE less than *) + | Fp_geq of int * int + (** [Fp_geq(s,e): Fp(s,e) -> Fp(s,e) -> Prop]: IEEE greater or equal *) + | Fp_gt of int * int + (** [Fp_gt(s,e): Fp(s,e) -> Fp(s,e) -> Prop]: IEEE greater than *) + | Fp_eq of int * int + (** [Fp_eq(s,e): Fp(s,e) -> Fp(s,e) -> Prop]: IEEE equality *) + | Fp_isNormal of int * int + (** [Fp_isNormal(s,e): Fp(s,e) -> Prop]: test if it is a normal floating point *) + | Fp_isSubnormal of int * int + (** [Fp_isSubnormal(s,e): Fp(s,e) -> Prop]: test if it is a subnormal floating point *) + | Fp_isZero of int * int + (** [Fp_isZero(s,e): Fp(s,e) -> Prop]: test if it is a zero *) + | Fp_isInfinite of int * int + (** [Fp_isInfinite(s,e): Fp(s,e) -> Prop]: test if it is an infinite *) + | Fp_isNaN of int * int + (** [Fp_isNaN(s,e): Fp(s,e) -> Prop]: test if it is Not a Number *) + | Fp_isNegative of int * int + (** [Fp_isNegative(s,e): Fp(s,e) -> Prop]: test if it is negative *) + | Fp_isPositive of int * int + (** [Fp_isPositive(s,e): Fp(s,e) -> Prop]: test if it is positive *) + | Ieee_format_to_fp of int * int + (** [Ieee_format_to_fp(s,e): Bv(s+e) -> Fp(s,e)]: Convert from IEEE interchange format *) + | Fp_to_fp of int * int * int * int + (** [Fp_to_fp(s1,e1,s2,e2): RoundingMode -> Fp(s1,e1) -> Fp(s2,e2)]: Convert from another floating point format *) + | Real_to_fp of int * int + (** [Real_to_fp(s,e): RoundingMode -> Real -> Fp(s,e)]: Convert from a real *) + | Sbv_to_fp of int * int * int + (** [Sbv_to_fp(m,s,e): RoundingMode -> Bitv(m) -> Fp(s,e)]: Convert from a signed integer *) + | Ubv_to_fp of int * int * int + (** [Ubv_to_fp(m,s,e): RoundingMode -> Bitv(m) -> Fp(s,e)]: Convert from a unsigned integer *) + | To_ubv of int * int * int + (** [To_ubv(s,e,m): RoundingMode -> Fp(s,e) -> Bitv(m)]: Convert to an unsigned integer *) + | To_sbv of int * int * int + (** [To_ubv(s,e,m): RoundingMode -> Fp(s,e) -> Bitv(m)]: Convert to an signed integer *) + | To_real of int * int + (** [To_real(s,e,m): RoundingMode -> Fp(s,e) -> Real]: Convert to real *) + + +(** {2 String and Regexp Builtins} *) +(* ************************************************************************* *) + +type _ t += + | String + (** [String: ttype]: type constructor for strings. *) + | Str of string + (** [Str s: String]: string literals. *) + | Str_length + (** [Str_length: String -> Int]: string length. *) + | Str_at + (** [Str_at: String -> Int -> String]: + Singleton string containing a character at given position + or empty string when position is out of range. + The leftmost position is 0. *) + | Str_to_code + (** [Str_to_code: String -> Int]: + [Str_to_code s] is the code point of the only character of s, + if s is a singleton string; otherwise, it is -1. *) + | Str_of_code + (** [Str_of_code: Int -> String]: + [Str_of_code n] is the singleton string whose only character is + code point n if n is in the range [0, 196607]; otherwise, it is the + empty string. *) + | Str_is_digit + (** [Str_is_digit: String -> Prop]: Digit check + [Str.is_digit s] is true iff s consists of a single character which is + a decimal digit, that is, a code point in the range 0x0030 ... 0x0039. *) + | Str_to_int + (** [Str_to_int: String -> Int]: Conversion to integers + [Str.to_int s] with s consisting of digits (in the sense of str.is_digit) + evaluates to the positive integer denoted by s when seen as a number in + base 10 (possibly with leading zeros). + It evaluates to -1 if s is empty or contains non-digits. *) + | Str_of_int + (** [Str_of_int : Int -> String]: Conversion from integers. + [Str.from_int n] with n non-negative is the corresponding string in + decimal notation, with no leading zeros. If n < 0, it is the empty string. *) + | Str_concat + (** [Str_concat: String -> String -> String]: string concatenation. *) + | Str_sub + (** [Str_sub: String -> Int -> Int -> String]: + [Str_sub s i n] evaluates to the longest (unscattered) substring + of s of length at most n starting at position i. + It evaluates to the empty string if n is negative or i is not in + the interval [0,l-1] where l is the length of s. *) + | Str_index_of + (** [Str_index_of: String -> String -> Int -> Int]: + Index of first occurrence of second string in first one starting at + the position specified by the third argument. + [Str_index_of s t i], with 0 <= i <= |s| is the position of the first + occurrence of t in s at or after position i, if any. + Otherwise, it is -1. Note that the result is i whenever i is within + the range [0, |s|] and t is empty. *) + | Str_replace + (** [Str_replace: String -> String -> String -> String]: Replace + [Str_replace s t t'] is the string obtained by replacing the first + occurrence of t in s, if any, by t'. Note that if t is empty, the + result is to prepend t' to s; also, if t does not occur in s then + the result is s. *) + | Str_replace_all + (** [Str_replace_all: String -> String -> String -> String]: + [Str_replace_all s t tâ] is s if t is the empty string. Otherwise, it + is the string obtained from s by replacing all occurrences of t in s + by tâ, starting with the first occurrence and proceeding in + left-to-right order. *) + | Str_replace_re + (** [Str_replace_re: String -> String_RegLan -> String -> String]: + [Str_replace_re s r t] is the string obtained by replacing the + shortest leftmost non-empty match of r in s, if any, by t. + Note that if t is empty, the result is to prepend t to s. *) + | Str_replace_re_all + (** [Str_replace_re_all: String -> String_RegLan -> String -> String]: + [Str_replace_re_all s r t] is the string obtained by replacing, + left-to right, each shortest *non-empty* match of r in s by t. *) + | Str_is_prefix + (** [Str_is_prefix: String -> String -> Prop]: Prefix check + [Str_is_prefix s t] is true iff s is a prefix of t. *) + | Str_is_suffix + (** [Str_is_suffix: String -> String -> Prop]: Suffix check + [Str_is_suffix s t] is true iff s is a suffix of t. *) + | Str_contains + (** [Str_contains: String -> String -> Prop]: Inclusion check + [Str_contains s t] is true iff s contains t. *) + | Str_lexicographic_strict + (** [Str_lexicographic_strict: String -> String -> Prop]: + lexicographic ordering (strict). *) + | Str_lexicographic_large + (** [Str_lexicographic_large: String -> String -> Prop]: + reflexive closure of the lexicographic ordering. *) + | Str_in_re + (** [Str_in_re: String -> String_RegLan -> Prop]: set membership. *) + +(* String Regular languages *) +type _ t += + | String_RegLan + (** [String_RegLan: ttype]: + type constructor for Regular languages over strings. *) + | Re_empty + (** [Re_empty: String_RegLan]: + the empty language. *) + | Re_all + (** [Re_all: String_RegLan]: + the language of all strings. *) + | Re_allchar + (** [Re_allchar: String_RegLan]: + the language of all singleton strings. *) + | Re_of_string + (** [Re_of_string: String -> String_RegLan]: + the singleton language with a single string. *) + | Re_range + (** [Re_range: String -> String -> String_RegLan]: Language range + [Re_range s1 s2] is the set of all *singleton* strings [s] such that + [Str_lexicographic_large s1 s s2] provided [s1] and [s1] are singleton. + Otherwise, it is the empty language. *) + | Re_concat + (** [Re_concat: String_RegLan -> String_RegLan -> String_RegLan]: + language concatenation. *) + | Re_union + (** [Re_union: String_RegLan -> String_RegLan -> String_RegLan]: + language union. *) + | Re_inter + (** [Re_inter: String_RegLan -> String_RegLan -> String_RegLan]: + language intersection. *) + | Re_star + (** [Re_star: String_RegLan -> String_RegLan]: Kleen star. *) + | Re_cross + (** [Re_cross: String_RegLan -> String_RegLan]: Kleen cross. *) + | Re_complement + (** [Re_complement: String_RegLan -> String_RegLan]: language complement. *) + | Re_diff + (** [Re_diff: String_RegLan -> String_RegLan -> String_RegLan]: + language difference. *) + | Re_option + (** [Re_option: String_RegLan -> String_RegLan]: language option + [Re_option e] abbreviates [Re_union e (Str_to_re "")]. *) + | Re_power of int + (** [Re_power(n): String_RegLan -> String_RegLan]: + [Re_power(n) e] is the nth power of e: + - [Re_power(0) e] is [Str_to_re ""] + - [Re_power(n+1) e] is [Re_concat e (Re_power(n) e)] *) + | Re_loop of int * int + (** [Re_loop(n1,n2): String_RegLan -> String_RegLan]: + Defined as: + - [Re_loop(nâ, nâ) e] is [Re_empty] if nâ > nâ + - [Re_loop(nâ, nâ) e] is [Re_power(nâ) e] if nâ = nâ + - [Re_loop(nâ, nâ) e] is + [Re_union ((Re_power(nâ) e) ... (Re_power(nâ) e))] if nâ < nâ + *) + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/dune index cfce679acb545f412914371eb53e3147cb949495..068fddf9a5a0347e201168991160ebdf1dac60f1 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/dune @@ -1,13 +1,17 @@ (library (name dolmen_std) (public_name dolmen.std) - (libraries dolmen_intf dolmen_line fmt) + (libraries dolmen_intf dolmen_line unix fmt) + (instrumentation (backend bisect_ppx)) (flags :standard -warn-error -3) (modules + ; Maps &Utils % TODO: split this into a dedicated sub-library ? + Timer Stats Maps Maps_string ; Parsing structures - Loc Id Term Statement Normalize + Loc Name Namespace Id Term Statement Normalize ; Typing & Loop stuff - Tag Pretty Expr + Tag Pretty Path Builtin Expr ; Helpers Msg Tok Vec Misc Escape Transformer) ) + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/expr.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/expr.ml index f0abea8560d1cfb14a060c04976b2a1a34e3273b..5cbe8e4dd9a0e4ef31eed3b5d1254ccfa138b64d 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/expr.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/expr.ml @@ -10,248 +10,98 @@ type index = int type 'a tag = 'a Tag.t (* Extensible variant type for builtin operations *) -type builtin = .. - -(* Type for first order types *) -type ttype = Type - -(* Identifiers, parametrized by the kind of the type of the variable and - the lengths of the expected arguments lists *) -type 'ty id = { - ty : 'ty; - name : string; - index : index; (** unique *) +type builtin = < + ty : ty; + ty_var : ty_var; + ty_cst : ty_cst; + term : term; + term_var : term_var; + term_cst : term_cst; +> Builtin.t + +(* Identifiers *) +and 'ty id = { + id_ty : 'ty; + index : index; (** unique index *) + path : Path.t; builtin : builtin; mutable tags : Tag.map; } -(* The type of functions *) -type ('ttype, 'ty) function_type = { - fun_vars : 'ttype id list; (* prenex forall *) - fun_args : 'ty list; - fun_ret : 'ty; +and type_ = Type + +(* Type for first order types *) +and type_fun = { + arity : int; + mutable alias : type_alias; } -(* Representation of types *) -type ty_var = ttype id +and type_alias = + | No_alias + | Alias of { + alias_vars : ty_var list; + alias_body : ty; + } + +(* Representation of polymorphic types + Rank-1 polymorphism is enforced at runtime *) +and ty_var = type_ id -and ty_const = (unit, ttype) function_type id +and ty_cst = type_fun id and ty_descr = - | Var of ty_var - | App of ty_const * ty list + | TyVar of ty_var + | TyApp of ty_cst * ty list + | Arrow of ty list * ty + | Pi of ty_var list * ty and ty = { - as_ : ty_var option; - mutable descr : ty_descr; - mutable hash : hash; (* lazy hash *) - mutable tags : Tag.map; + mutable ty_hash : hash; (* lazy hash *) + mutable ty_tags : Tag.map; + mutable ty_descr : ty_descr; + mutable ty_head : ty; } (* Terms and formulas *) -type term_var = ty id +and term_var = ty id -and term_const = (ttype, ty) function_type id +and term_cst = ty id and pattern = term and term_descr = | Var of term_var - | App of term_const * ty list * term list + | Cst of term_cst + | App of term * ty list * term list | Binder of binder * term | Match of term * (pattern * term) list and binder = + | Let_seq of (term_var * term) list + | Let_par of (term_var * term) list + | Lambda of ty_var list * term_var list | Exists of ty_var list * term_var list | Forall of ty_var list * term_var list - | Letin of (term_var * term) list and term = { - ty : ty; - descr : term_descr; - mutable hash : hash; - mutable tags : Tag.map; + term_ty : ty; + term_descr : term_descr; + mutable term_hash : hash; + mutable term_tags : Tag.map; } (* Alias for dolmen_loop and others who allow to have different types for terms and formulas. *) -type formula = term - -(* Builtins *) -(* ************************************************************************* *) - -type builtin += Base | Wildcard -type builtin += Prop | Unit | Univ -type builtin += Coercion -type builtin += - | True | False - | Equal | Distinct - | Neg | And | Or - | Nand | Nor | Xor - | Imply | Equiv -type builtin += Ite -type builtin += - | Tester of { - cstr : term_const; - } - | Constructor of { - adt : ty_const; - case : int; - } - | Destructor of { - adt : ty_const; - cstr : term_const; - case : int; - field: int; - } - -(* arithmetic *) -type builtin += - | Int | Integer of string - | Rat | Rational of string - | Real | Decimal of string - | Lt | Leq | Gt | Geq - | Minus | Add | Sub | Mul - | Div - | Div_e | Modulo_e - | Div_t | Modulo_t - | Div_f | Modulo_f - | Abs | Divisible - | Is_int | Is_rat - | Floor | Ceiling | Truncate | Round - -(* arrays *) -type builtin += - | Array | Store | Select - -(* Bitvectors *) -type builtin += - | Bitv of int - | Bitvec of string - | Bitv_concat - | Bitv_extract of int * int - | Bitv_repeat - | Bitv_zero_extend - | Bitv_sign_extend - | Bitv_rotate_right of int - | Bitv_rotate_left of int - | Bitv_not | Bitv_and | Bitv_or - | Bitv_nand | Bitv_nor - | Bitv_xor | Bitv_xnor - | Bitv_comp - | Bitv_neg | Bitv_add | Bitv_sub | Bitv_mul - | Bitv_udiv | Bitv_urem - | Bitv_sdiv | Bitv_srem | Bitv_smod - | Bitv_shl | Bitv_lshr | Bitv_ashr - | Bitv_ult | Bitv_ule - | Bitv_ugt | Bitv_uge - | Bitv_slt | Bitv_sle - | Bitv_sgt | Bitv_sge - -(* Floats *) -type builtin += - | Float of int * int - | RoundingMode - | Fp of int * int - | RoundNearestTiesToEven - | RoundNearestTiesToAway - | RoundTowardPositive - | RoundTowardNegative - | RoundTowardZero - | Plus_infinity of int * int - | Minus_infinity of int * int - | Plus_zero of int * int - | Minus_zero of int * int - | NaN of int * int - | Fp_abs of int * int - | Fp_neg of int * int - | Fp_add of int * int - | Fp_sub of int * int - | Fp_mul of int * int - | Fp_div of int * int - | Fp_fma of int * int - | Fp_sqrt of int * int - | Fp_rem of int * int - | Fp_roundToIntegral of int * int - | Fp_min of int * int - | Fp_max of int * int - | Fp_leq of int * int - | Fp_lt of int * int - | Fp_geq of int * int - | Fp_gt of int * int - | Fp_eq of int * int - | Fp_isNormal of int * int - | Fp_isSubnormal of int * int - | Fp_isZero of int * int - | Fp_isInfinite of int * int - | Fp_isNaN of int * int - | Fp_isNegative of int * int - | Fp_isPositive of int * int - | Ieee_format_to_fp of int * int - | Fp_to_fp of int * int * int * int - | Real_to_fp of int * int - | Sbv_to_fp of int * int * int - | Ubv_to_fp of int * int * int - | To_ubv of int * int * int - | To_sbv of int * int * int - | To_real of int * int - -(* Strings *) -type builtin += - | String - | Str of string - | Str_length - | Str_at - | Str_to_code - | Str_of_code - | Str_is_digit - | Str_to_int - | Str_of_int - | Str_concat - | Str_sub - | Str_index_of - | Str_replace - | Str_replace_all - | Str_replace_re - | Str_replace_re_all - | Str_is_prefix - | Str_is_suffix - | Str_contains - | Str_lexicographic_strict - | Str_lexicographic_large - | Str_in_re - -(* String Regular languages *) -type builtin += - | String_RegLan - | Re_empty - | Re_all - | Re_allchar - | Re_of_string - | Re_range - | Re_concat - | Re_union - | Re_inter - | Re_star - | Re_cross - | Re_complement - | Re_diff - | Re_option - | Re_power of int - | Re_loop of int * int +and formula = term (* Exceptions *) (* ************************************************************************* *) -exception Bad_ty_arity of ty_const * ty list -exception Bad_term_arity of term_const * ty list * term list - -exception Filter_failed_ty of string * ty * string -exception Filter_failed_term of string * term * string - -exception Type_already_defined of ty_const -exception Record_type_expected of ty_const +exception Already_aliased of ty_cst +exception Type_already_defined of ty_cst +exception Record_type_expected of ty_cst +exception Wildcard_already_set of ty_var (* Tags *) @@ -266,6 +116,7 @@ module Tags = struct let pos = Tag.create () let name = Tag.create () let rwrt = Tag.create () + let ac = Tag.create () let exact s = Pretty.Exact s let infix = Pretty.Infix @@ -273,6 +124,7 @@ module Tags = struct let named = Tag.create () let triggers = Tag.create () + let filters = Tag.create () let bound = Tag.create () @@ -293,83 +145,90 @@ module Print = struct let return fmt_str out () = Format.fprintf out "%(%)" fmt_str - let ttype fmt Type = Format.fprintf fmt "Type" - let pp_index fmt (v : _ id) = Format.fprintf fmt "/%d" v.index + (* Id printing *) + + let pp_index fmt (v : _ id) = + let aux fmt v = Format.fprintf fmt "/%d" v.index in + Fmt.styled (`Fg (`Hi `Black)) aux fmt v let id fmt (v : _ id) = - match Tag.last v.tags name with - | Some (Pretty.Exact s | Pretty.Renamed s) -> Format.fprintf fmt "%s" s + match Tag.get v.tags name with + | Some (Pretty.Exact s | Pretty.Renamed s) -> + Format.fprintf fmt "%s" s | None -> if !print_index then - Format.fprintf fmt "%s%a" v.name (Fmt.styled (`Fg (`Hi `Black)) pp_index) v + Format.fprintf fmt "%a%a" Path.print v.path pp_index v else - Format.fprintf fmt "%s" v.name + Format.fprintf fmt "%a" Path.print v.path + + let id_pretty fmt (v : _ id) = + match Tag.get v.tags pos with + | None -> () + | Some Pretty.Infix -> Format.fprintf fmt "(%a)" id v + | Some Pretty.Prefix -> Format.fprintf fmt "[%a]" id v + + let id_type print fmt (v : _ id) = + Format.fprintf fmt "@[<hov 2>%a%a :@ %a@]" id v id_pretty v print v.id_ty + + + (* Type printing *) + + let type_ fmt Type = + Format.fprintf fmt "Type" + + let type_fun fmt { arity; alias = _; } = + let rec aux fmt = function + | 0 -> type_ fmt Type + | n -> Format.fprintf fmt "%a ->@ %a" type_ Type aux (n - 1) + in + aux fmt arity + + let ty_var fmt var = id_type type_ fmt var + let ty_cst fmt cst = id_type type_fun fmt cst let rec ty_descr fmt (descr : ty_descr) = match descr with - | Var v -> id fmt v - | App (f, []) -> id fmt f - | App (f, l) -> - begin match Tag.last f.tags pos with + | TyVar v -> id fmt v + | Arrow (args, ret) -> + Format.fprintf fmt "@[<hov 2>%a ->@ %a@]" + (Format.pp_print_list ~pp_sep:(return " ->@ ") subty) args subty ret + | TyApp (f, []) -> id fmt f + | TyApp (f, l) -> + begin match Tag.get f.tags pos with | Some Pretty.Prefix -> Format.fprintf fmt "@[<hov 2>%a %a@]" - id f (Format.pp_print_list ~pp_sep:(return "") ty) l + id f (Format.pp_print_list ~pp_sep:(return "") subty) l | Some Pretty.Infix when List.length l >= 2 -> let pp_sep fmt () = Format.fprintf fmt " %a@ " id f in - Format.fprintf fmt "@[<hov 2>%a@]" (Format.pp_print_list ~pp_sep ty) l + Format.fprintf fmt "@[<hov 2>%a@]" (Format.pp_print_list ~pp_sep subty) l | None | Some Pretty.Infix -> Format.fprintf fmt "@[<hov 2>%a(%a)@]" - id f (Format.pp_print_list ~pp_sep:(return ",@ ") ty) l + id f (Format.pp_print_list ~pp_sep:(return ",@ ") subty) l end + | Pi ([], body) -> ty fmt body + | Pi (poly_vars, body) -> + Format.fprintf fmt "@[<hov 2>â @[<hov>%a@] .@ %a@]" + (Format.pp_print_list ~pp_sep:(return ",@ ") ty_var) poly_vars ty body - and ty fmt (t: ty) = - match t.as_ with - | None -> ty_descr fmt t.descr - | Some v -> Format.fprintf fmt "(%a@ as@ %a)" ty_descr t.descr id v - - let params fmt = function - | [] -> () - | l -> Format.fprintf fmt "â @[<hov>%a@].@ " - (Format.pp_print_list ~pp_sep:(return ",@ ") id) l - - let signature print fmt f = - match f.fun_args with - | [] -> Format.fprintf fmt "@[<hov 2>%a%a@]" params f.fun_vars print f.fun_ret - | l -> Format.fprintf fmt "@[<hov 2>%a%a ->@ %a@]" params f.fun_vars - (Format.pp_print_list ~pp_sep:(return " ->@ ") print) l print f.fun_ret - - let fun_ty = signature ty - let fun_ttype = signature ttype - - let id_pretty fmt (v : _ id) = - match Tag.last v.tags pos with - | None -> () - | Some Pretty.Infix -> Format.fprintf fmt "(%a)" id v - | Some Pretty.Prefix -> Format.fprintf fmt "[%a]" id v - - let id_type print fmt (v : _ id) = - Format.fprintf fmt "@[<hov 2>%a%a :@ %a@]" id v id_pretty v print v.ty - - let ty_var fmt id = id_type ttype fmt id - let term_var fmt id = id_type ty fmt id + and subty fmt t = + match t.ty_descr with + | TyVar _ + | TyApp (_, []) -> ty fmt t + | _ -> Format.fprintf fmt "( %a )" ty t - let ty_const fmt id = id_type fun_ttype fmt id - let term_const fmt id = id_type fun_ty fmt id + and ty fmt t = + ty_descr fmt t.ty_descr - let binder_name fmt = function - | Exists _ -> Format.fprintf fmt "â" - | Forall _ -> Format.fprintf fmt "â" - | Letin _ -> Format.fprintf fmt "let" + let term_var fmt var = id_type ty fmt var + let term_cst fmt cst = id_type ty fmt cst - let binder_sep fmt = function - | Exists _ - | Forall _ -> Format.fprintf fmt "." - | Letin _ -> Format.fprintf fmt "in" + (* Term printing *) - let rec term fmt t = match t.descr with + let rec term_descr fmt = function | Var v -> id fmt v - | App (f, [], []) -> id fmt f + | Cst c -> id fmt c + | App (f, [], []) -> term fmt f | App (f, tys, args) -> term_app fmt f tys args | Binder (b, body) -> Format.fprintf fmt "@[<hv 2>%a%a@ %a@]" binder b binder_sep b term body @@ -378,55 +237,73 @@ module Print = struct term scrutinee (Format.pp_print_list ~pp_sep:(return "@ ") branch) branches - and term_app fmt (f : _ id) tys args = - match Tag.last f.tags pos with - | Some Pretty.Prefix -> - begin match args with - | [] -> id fmt f - | _ -> - Format.fprintf fmt "@[<hov>%a(%a)@]" - id f (Format.pp_print_list ~pp_sep:(return ",@ ") term) args - end - | Some Pretty.Infix when List.length args >= 2 -> - let pp_sep fmt () = Format.fprintf fmt " %a@ " id f in - Format.fprintf fmt "(@[<hov>%a@])" (Format.pp_print_list ~pp_sep term) args - | None | Some Pretty.Infix -> - begin match tys, args with - | _, [] -> - Format.fprintf fmt "%a(@[<hov>%a@])" - id f (Format.pp_print_list ~pp_sep:(return ",@ ") ty) tys - | [], _ -> - Format.fprintf fmt "%a(@[<hov>%a@])" - id f (Format.pp_print_list ~pp_sep:(return ",@ ") term) args - | _ -> - Format.fprintf fmt "%a(@[<hov>%a%a%a@])" id f - (Format.pp_print_list ~pp_sep:(return ",@ ") ty) tys - (return ";@ ") () - (Format.pp_print_list ~pp_sep:(return ",@ ") term) args + and term_app fmt f tys args = + match f.term_descr with + | Cst g -> + begin match Tag.get g.tags pos with + | Some Pretty.Prefix -> generic_app fmt f [] args + | Some Pretty.Infix when List.length args >= 2 -> + let pp_sep fmt () = Format.fprintf fmt " %a@ " id g in + Format.fprintf fmt "@[<hov>%a@]" (Format.pp_print_list ~pp_sep subterm) args + | None | Some Pretty.Infix -> generic_app fmt f tys args end + | _ -> generic_app fmt f tys args + + and generic_app fmt f tys args = + match tys, args with + | _, [] -> + Format.fprintf fmt "@[<hov>%a@ %a@]" + subterm f (Format.pp_print_list ~pp_sep:(return "@ ") ty) tys + | [], _ -> + Format.fprintf fmt "@[<hov>%a@ %a@]" + subterm f (Format.pp_print_list ~pp_sep:(return "@ ") subterm) args + | _ -> + Format.fprintf fmt "@[<hov>%a@ %a@ %a@]" subterm f + (Format.pp_print_list ~pp_sep:(return "@ ") subty) tys + (Format.pp_print_list ~pp_sep:(return "@ ") subterm) args + + and binder_sep fmt = function + | Lambda _ -> Format.fprintf fmt "=>" + | Let_seq _ + | Let_par _ -> Format.fprintf fmt "in" + | Exists _ + | Forall _ -> Format.fprintf fmt "." and binder fmt b = match b with - | Exists (l, []) + | Let_seq l -> + Format.fprintf fmt "let @[<hv>%a@]" + (Format.pp_print_list ~pp_sep:(return ",@ ") binding) l + | Let_par l -> + Format.fprintf fmt "let @[<hv>%a@]" + (Format.pp_print_list ~pp_sep:(return " and@ ") binding) l + + | Lambda (vars, args) -> + Format.fprintf fmt "Îģ @[<hov>%a .@ %a@]" + (Format.pp_print_list ~pp_sep:(return ",@ ") ty_var) vars + (Format.pp_print_list ~pp_sep:(return "@ ") term_var) args + + | Exists (l, []) -> + Format.fprintf fmt "â @[<hov>%a@]" + (Format.pp_print_list ~pp_sep:(return ",@ ") ty_var) l + | Exists ([], l) -> + Format.fprintf fmt "â @[<hov>%a@]" + (Format.pp_print_list ~pp_sep:(return ",@ ") term_var) l + | Exists (tys, ts) -> + Format.fprintf fmt "â @[<hov>%a,@ %a@]" + (Format.pp_print_list ~pp_sep:(return ",@ ") ty_var) tys + (Format.pp_print_list ~pp_sep:(return ",@ ") term_var) ts + | Forall (l, []) -> - Format.fprintf fmt "%a @[<hov>%a@]" - binder_name b + Format.fprintf fmt "â @[<hov>%a@]" (Format.pp_print_list ~pp_sep:(return ",@ ") ty_var) l - | Exists ([], l) | Forall ([], l) -> - Format.fprintf fmt "%a @[<hov>%a@]" - binder_name b + Format.fprintf fmt "â @[<hov>%a@]" (Format.pp_print_list ~pp_sep:(return ",@ ") term_var) l - | Exists (tys, ts) | Forall (tys, ts) -> - Format.fprintf fmt "%a @[<hov>%a,@ %a@]" - binder_name b + Format.fprintf fmt "â @[<hov>%a,@ %a@]" (Format.pp_print_list ~pp_sep:(return ",@ ") ty_var) tys (Format.pp_print_list ~pp_sep:(return ",@ ") term_var) ts - | Letin l -> - Format.fprintf fmt "%a @[<hv>%a@]" - binder_name b - (Format.pp_print_list ~pp_sep:(return ",@ ") binding) l and binding fmt (v, t) = Format.fprintf fmt "@[<hov 2>%a =@ %a@]" id v term t @@ -434,6 +311,17 @@ module Print = struct and branch fmt (pattern, body) = Format.fprintf fmt "@[<hov 2>| %a@ ->@ %a" term pattern term body + and subterm fmt t = + match t.term_descr with + | Var _ | Cst _ -> term fmt t + | App (t', [], []) -> subterm fmt t' + | _ -> Format.fprintf fmt "(%a)" term t + + and term fmt t = + term_descr fmt t.term_descr + + let formula = term + let iter ~sep pp fmt k = let first = ref true in k (fun x -> @@ -442,119 +330,18 @@ module Print = struct ) end -(* Views *) -(* ************************************************************************* *) - -module View = struct - - module Ty = struct - type t = [ - | `Int - | `Rat - | `Real - | `Array of ty * ty - | `Bitv of int - | `Float of int * int - | `String - | `String_reg_lang - (* Generic cases *) - | `Var of ty_var - | `App of [ - | `Generic of ty_const - | `Builtin of builtin - ] * ty list - ] - - let view (ty : ty) : t = - match ty.descr with - | Var v -> `Var v - | App (({ builtin; _ } as c), l) -> - begin match builtin with - | Int -> `Int - | Rat -> `Rat - | Real -> `Real - | Bitv i -> `Bitv i - | Float (e, s) -> `Float (e, s) - | Array -> begin match l with - | [src; dst] -> `Array (src, dst) - | _ -> assert false (* not possible *) - end - | String -> `String - | String_RegLan -> `String_reg_lang - | Base -> `App (`Generic c, l) - | _ -> `App (`Builtin builtin, l) - end - - end - -end - -(* Flags and filters *) -(* ************************************************************************* *) - -module Filter = struct - - type status = [ - | `Pass - | `Warn - | `Error of string - ] - type ty_filter = string * bool ref * (ty_const -> ty list -> status) - type term_filter = string * bool ref * (term_const -> ty list -> term list -> status) - - let ty : ty_filter tag = Tag.create () - let term : term_filter tag = Tag.create () - - module type S = sig - val name : string - val active : bool ref - val reset : unit -> unit - end - -end - (* Helpers *) (* ************************************************************************* *) (* Useful shorthand for chaining comparisons *) -let (<?>) i (cmp, x, y) = - match i with - | 0 -> cmp x y - | _ -> i - -(* hash helpers *) -let hash2 x y = Hashtbl.seeded_hash x y -let hash3 x y z = hash2 x (hash2 y z) -let hash4 x y z t = hash2 x (hash3 y z t) +let (<?>) = Misc.(<?>) +let lexicographic = Misc.lexicographic (* option iter *) -let option_iter f = function - | None -> () - | Some x -> f x - -(* list hash *) -let hash_list f l = - let rec aux acc = function - | [] -> acc - | x :: r -> aux (Hashtbl.seeded_hash acc (f x)) r - in - aux 0 l - -(* lexicographic comparison *) -let lexicographic cmp l l' = - let rec aux l l' = - match l, l' with - | [], [] -> 0 - | _ :: _, [] -> 1 - | [], _ :: _ -> -1 - | x :: r, x' :: r' -> - begin match cmp x x' with - | 0 -> aux r r' - | res -> res - end - in - aux l l' +let option_map f = function + | None -> None + | Some x -> Some (f x) (* List creation *) let init_list n f = @@ -580,6 +367,7 @@ let with_cache ~cache f x = Hashtbl.add cache x res; res + (* Ids *) (* ************************************************************************* *) @@ -587,6 +375,8 @@ module Id = struct type 'a t = 'a id + let print = Print.id + (* Usual functions *) let hash (v : _ t) = v.index @@ -594,64 +384,42 @@ module Id = struct let equal v v' = compare v v' = 0 - let print fmt id = Format.pp_print_string fmt id.name - (* Tags *) - let tag (id : _ id) k v = id.tags <- Tag.add id.tags k v - let get_tag (id : _ id) k = Tag.get id.tags k + let get_tag_last (id : _ id) k = Tag.get_last id.tags k + let get_tag_list (id : _ id) k = Tag.get_list id.tags k + + let set_tag (id : _ id) k v = id.tags <- Tag.set id.tags k v + let add_tag (id : _ id) k v = id.tags <- Tag.add id.tags k v + let add_tag_opt (id : _ id) k o = id.tags <- Tag.add_opt id.tags k o + let add_tag_list (id : _ id) k l = id.tags <- Tag.add_list id.tags k l - let get_tag_last (id : _ id) k = Tag.last id.tags k + let unset_tag (id : _ id) k = id.tags <- Tag.unset id.tags k (* Creating ids *) let id_counter = ref 0 - let mk ?(builtin=Base) ?(tags=Tag.empty) name ty = + let mk + ?pos ?name + ?(tags=Tag.empty) + ?(builtin=Builtin.Base) + id_path id_ty = incr id_counter; - { name; ty; builtin; tags; index = !id_counter; } - - let const - ?pos ?name ?builtin ?tags - ?(ty_filters=[]) ?(term_filters=[]) - cname fun_vars fun_args fun_ret = - let res = mk ?builtin ?tags cname { fun_vars; fun_args; fun_ret; } in - (* Add filter tags *) - List.iter (tag res Filter.ty) ty_filters; - List.iter (tag res Filter.term) term_filters; - (* Add pretty printing tags *) - option_iter (tag res Print.pos) pos; - option_iter (fun s -> tag res Print.name (Pretty.Exact s)) name; - (* Return the id *) - res - - let indexed - ?pos ?name ?builtin ?tags - cname fun_vars fun_arg fun_ret = - let h = Hashtbl.create 13 in - (fun i -> - match Hashtbl.find h i with - | res -> res - | exception Not_found -> - let fun_args = replicate i fun_arg in - let c = const - ?pos ?name ?builtin ?tags - cname fun_vars fun_args fun_ret - in - Hashtbl.add h i c; - c - ) + let tags = Tag.set_opt tags Print.pos pos in + let tags = Tag.set_opt tags Print.name + (option_map (fun s -> Pretty.Exact s) name) + in + { path = id_path; id_ty; builtin; tags; index = !id_counter; } end -(* Maps from pairs of integers *) +(* Maps from integers *) (* ************************************************************************* *) -module Mi = Map.Make(struct - type t = index - let compare (a : int) b = compare a b - end) +module M = Map.Make(Int) -(* Sets of ids *) + +(* Sets of variables *) (* ************************************************************************* *) module FV = struct @@ -660,25 +428,25 @@ module FV = struct | Ty of ty_var | Term of term_var - type t = elt Mi.t + type t = elt M.t let tok v = v.index let token = function | Ty v -> tok v | Term v -> tok v - (* let mem v s = Mi.mem (tok v) s *) - (* let get v s = Mi.find (tok v) s *) - let add x (s : t) = Mi.add (token x) x s - let del x (s : t) = Mi.remove (tok x) s + (* let mem v s = M.mem (tok v) s *) + (* let get v s = M.find (tok v) s *) + let add x (s : t) = M.add (token x) x s + let del x (s : t) = M.remove (tok x) s - let empty = Mi.empty + let empty = M.empty let remove s l = List.fold_left (fun acc v -> del v acc) s l let diff s s' = - Mi.merge (fun _ o o' -> + M.merge (fun _ o o' -> match o, o' with | None, None -> None | None, Some _ -> None @@ -687,7 +455,7 @@ module FV = struct ) s s' let merge s s' = - Mi.merge (fun _ o o' -> + M.merge (fun _ o o' -> match o, o' with | None, None -> None | _, ((Some _) as res) @@ -699,7 +467,7 @@ module FV = struct | Ty v -> (v :: tys, ts) | Term v -> (tys, v :: ts) in - Mi.fold aux s ([], []) + M.fold aux s ([], []) end @@ -708,18 +476,18 @@ end module Subst = struct - type ('a, 'b) t = ('a * 'b) Mi.t + type ('a, 'b) t = ('a * 'b) M.t (* Usual functions *) - let empty = Mi.empty + let empty = M.empty - let is_empty = Mi.is_empty + let is_empty = M.is_empty let wrap key = function | None -> None | Some x -> Some (key, x) - let merge f = Mi.merge (fun _ opt1 opt2 -> + let merge f = M.merge (fun _ opt1 opt2 -> match opt1, opt2 with | None, None -> assert false | Some (key, value), None -> @@ -730,22 +498,22 @@ module Subst = struct wrap key @@ f key (Some value1) (Some value2) ) - let iter f = Mi.iter (fun _ (key, value) -> f key value) + let iter f = M.iter (fun _ (key, value) -> f key value) - let map f = Mi.map (fun (key, value) -> (key, f value)) + let map f = M.map (fun (key, value) -> (key, f value)) - let fold f = Mi.fold (fun _ (key, value) acc -> f key value acc) + let fold f = M.fold (fun _ (key, value) acc -> f key value acc) - let bindings s = Mi.fold (fun _ (key, value) acc -> (key, value) :: acc) s [] + let bindings s = M.fold (fun _ (key, value) acc -> (key, value) :: acc) s [] - let filter p = Mi.filter (fun _ (key, value) -> p key value) + let filter p = M.filter (fun _ (key, value) -> p key value) (* Comparisons *) - let equal f = Mi.equal (fun (_, value1) (_, value2) -> f value1 value2) - let compare f = Mi.compare (fun (_, value1) (_, value2) -> f value1 value2) - let hash h s = Mi.fold (fun i (_, value) acc -> Hashtbl.hash (acc, i, h value)) s 1 + let equal f = M.equal (fun (_, value1) (_, value2) -> f value1 value2) + let compare f = M.compare (fun (_, value1) (_, value2) -> f value1 value2) + let hash h s = M.fold (fun i (_, value) acc -> Hashtbl.hash (acc, i, h value)) s 1 - let choose m = snd (Mi.choose m) + let choose m = snd (M.choose m) (* Iterators *) let exists pred s = @@ -767,7 +535,7 @@ module Subst = struct Format.fprintf fmt "@[<hov 2>%a âĻ@ %a@]" print_key key print_value value in Format.fprintf fmt "@[<hv>%a@]" - Print.(iter ~sep:(return ";@ ") aux) (fun k -> Mi.iter (fun x y -> k(x,y)) map) + Print.(iter ~sep:(return ";@ ") aux) (fun k -> M.iter (fun x y -> k(x,y)) map) let debug print_key print_value fmt map = let aux fmt (i, (key, value)) = @@ -775,7 +543,7 @@ module Subst = struct i print_key key print_value value in Format.fprintf fmt "@[<hv>%a@]" - Print.(iter ~sep:(return ";@ ") aux) (fun k -> Mi.iter (fun x y -> k(x,y)) map) + Print.(iter ~sep:(return ";@ ") aux) (fun k -> M.iter (fun x y -> k(x,y)) map) (* Specific substitutions signature *) module type S = sig @@ -790,107 +558,271 @@ module Subst = struct module Var = struct type 'a key = 'a id let tok v = v.index - let get v s = snd (Mi.find (tok v) s) - let mem v s = Mi.mem (tok v) s - let bind s v t = Mi.add (tok v) (v, t) s - let remove v s = Mi.remove (tok v) s + let get v s = snd (M.find (tok v) s) + let mem v s = M.mem (tok v) s + let bind s v t = M.add (tok v) (v, t) s + let remove v s = M.remove (tok v) s end end + (* Types *) (* ************************************************************************* *) module Ty = struct - (* Std type aliase *) + (* Std type aliases *) type t = ty - type view = View.Ty.t - type subst = (ty_var, ty) Subst.t type 'a tag = 'a Tag.t + exception Bad_arity of ty_cst * ty list + exception Prenex_polymorphism of ty + (* printing *) let print = Print.ty + (* Set a wildcard/hole to a concrete type + + Wildcard can be set to point to another type by mutating the + descr field of types (this is the only operation that mutates + this field). + In order to be correct, when we set a wildcard v to point at + another wildcard w, we must remember that, so that when we set + w to something, we also need to update v. *) + + let wildcard_refs = Tag.create () + let wildcard_hook = Tag.create () + + let wildcard_add_refs v l = + Id.add_tag_list v wildcard_refs l + + let set_wildcard v (t: t) = + match v.builtin with + | Builtin.Wildcard { ty; } -> + begin match !ty with + | Some _ -> + raise (Wildcard_already_set v) + | None -> + let set_descr (t : t) (s: t) = + s.ty_descr <- t.ty_descr; + s.ty_hash <- -1 + in + ty := Some t; + let l = Id.get_tag_list v wildcard_refs in + List.iter (set_descr t) l; + begin match t.ty_descr with + | TyVar ({ builtin = Builtin.Wildcard _; _ } as w) -> + wildcard_add_refs w l + | _ -> () + end; + List.iter (fun f -> f v t) + (Id.get_tag_list v wildcard_hook) + end + | _ -> () + + let wildcard_var () = + let path = Path.local "_" in + Id.mk ~builtin:(Builtin.Wildcard { ty = ref None; }) path Type + + + (* Prenex/Rank-1 polymorphism check *) + let rec check_prenex t = + match (expand_head t).ty_descr with + | Pi _ -> raise (Prenex_polymorphism t) + | _ -> () + + and check_prenex_list l = + List.iter check_prenex l + + and subst_bind subst v u = + check_prenex u; + Subst.Var.bind subst v u + + (* type creation *) + and dummy = { + ty_hash = 0; (* must be non-negative *) + ty_head = dummy; + ty_tags = Tag.empty; + ty_descr = Arrow ([], dummy); + } + + and mk ty_descr = { + ty_descr; + ty_hash = -1; + ty_tags = Tag.empty; + ty_head = dummy; + } + + and of_var v = + match v with + | { builtin = Builtin.Wildcard { ty; }; _ } -> + begin match !ty with + | None -> + let t = mk (TyVar v) in + wildcard_add_refs v [t]; + t + | Some t -> t + end + | _ -> mk (TyVar v) + + and apply f args = + if List.length args <> f.id_ty.arity then + raise (Bad_arity (f, args)) + else begin + check_prenex_list args; + mk (TyApp (f, args)) + end + + and arrow args ret = + check_prenex_list (ret :: args); + match args with + | [] -> ret + | _ -> mk (Arrow (args, ret)) + + and pi vars body = + check_prenex body; + match vars with + | [] -> body + | _ -> mk (Pi (vars, body)) + + + (* Substitutions *) + and subst_aux ~fix var_map (t : t) = + match t.ty_descr with + | TyVar v -> + begin match Subst.Var.get v var_map with + | exception Not_found -> t + | ty -> if fix then subst_aux ~fix var_map ty else ty + end + | TyApp (f, args) -> + let new_args = List.map (subst_aux ~fix var_map) args in + if List.for_all2 (==) args new_args then t + else apply f new_args + | Arrow (args, ret) -> + let new_args = List.map (subst_aux ~fix var_map) args in + let new_ret = subst_aux ~fix var_map ret in + if ret == new_ret && List.for_all2 (==) args new_args then t + else arrow new_args new_ret + | Pi (vars, body) -> + let var_map = + List.fold_left (fun map v -> + Subst.Var.remove v map + ) var_map vars + in + let new_body = subst_aux ~fix var_map body in + if body == new_body then t + else pi vars new_body + + and subst ?(fix=true) var_map t = + if Subst.is_empty var_map then t + else subst_aux ~fix var_map t + + (* type aliases *) + + and alias_to c alias_vars alias_body = + let cst_ty = c.id_ty in + match cst_ty.alias with + | No_alias -> cst_ty.alias <- Alias { alias_vars; alias_body; } + | Alias _ -> raise (Already_aliased c) + + and expand_head t = + if t.ty_head != dummy then t.ty_head + else match t.ty_descr with + | TyApp (f, args) -> + begin match f.id_ty.alias with + | No_alias -> t.ty_head <- t; t + | Alias { alias_vars; alias_body; } -> + assert (List.compare_lengths alias_vars args = 0); + let map = List.fold_left2 Subst.Var.bind Subst.empty alias_vars args in + let res = expand_head (subst map alias_body) in + t.ty_head <- res; + res + end + | _ -> t.ty_head <- t; t + (* hash function *) - let rec hash_aux (t : t) = match t.descr with - | Var v -> hash2 3 (Id.hash v) - | App (f, args) -> hash3 5 (Id.hash f) (hash_list hash args) + let rec hash_aux (t : t) = + match t.ty_descr with + | TyVar v -> + Misc.hash2 3 (Id.hash v) + | TyApp (f, args) -> + Misc.hash3 5 (Id.hash f) (Misc.hash_list hash args) + | Arrow (args, ret) -> + Misc.hash3 7 (Misc.hash_list hash args) (hash ret) + | Pi (vars, body) -> + Misc.hash3 11 (Misc.hash_list Id.hash vars) (hash body) and hash (t : t) = - if t.hash < 0 then t.hash <- hash_aux t; - t.hash + if t.ty_hash >= 0 then t.ty_hash + else begin + let t' = expand_head t in + let res = hash_aux t' in + t'.ty_hash <- res; + t.ty_hash <- res; + res + end (* comparison *) - let discr (t: t) = match t.descr with - | Var _ -> 1 - | App _ -> 2 + let discr (t: t) = + match (expand_head t).ty_descr with + | TyVar _ -> 1 + | TyApp _ -> 2 + | Arrow _ -> 3 + | Pi _ -> 4 let rec compare (u : t) (v : t) = - if u == v || u.descr == v.descr then 0 else begin + if u == v || u.ty_descr == v.ty_descr then 0 else begin let hu = hash u and hv = hash v in if hu <> hv then hu - hv (* safe since both are positive *) - else match u.descr, v.descr with - | Var v, Var v' -> Id.compare v v' - | App (f, args), App (f', args') -> + else match (expand_head u).ty_descr, (expand_head v).ty_descr with + | TyVar v, TyVar v' -> Id.compare v v' + | TyApp (f, args), TyApp (f', args') -> Id.compare f f' <?> (lexicographic compare, args, args') + | Arrow (args, ret), Arrow (args', ret') -> + lexicographic compare args args' + <?> (compare, ret, ret') + | Pi (vars, body), Pi (vars', body') -> + List.compare_lengths vars vars' + <?> (compare_bound, (vars, body), (vars', body')) | _, _ -> Stdlib.compare (discr u) (discr v) end - let equal u v = compare u v = 0 - - (* Set a wildcard/hole to a concrete type - - Wildcard can be set to point to another type by mutating the - descr field of types (this is the only operation that mutates - this field). - In order to be correct, when we set a wildcard v to point at - another wildcard w, we must remember that, so that when we set - w to something, we also need to update v. *) - - let wildcard_tbl = ref Subst.empty - - let wildcard_get v = - match Subst.Var.get v !wildcard_tbl with - | l -> l - | exception Not_found -> [] - - let wildcard_add v l = - let l' = wildcard_get v in - wildcard_tbl := Subst.Var.bind !wildcard_tbl v (List.rev_append l l') - - let set_wildcard v (t: t) = - let set_descr (t : t) (s: t) = s.descr <- t.descr in - let l = wildcard_get v in - List.iter (set_descr t) l; - match t.descr with - | Var ({ builtin = Wildcard; _ } as w) -> wildcard_add w l; - | _ -> () + and compare_bound (vars, body) (vars', body') = + (* Since we only have prenex/rank-1 polymorphism, this can only happen + once by comparison. *) + let map = + List.fold_left2 Subst.Var.bind Subst.empty + vars (List.map of_var vars') + in + let body = subst ~fix:false map body in + compare body body' + let equal u v = compare u v = 0 (* Types definitions *) type adt_case = { - cstr : term_const; - tester : term_const; - dstrs : term_const option array; + cstr : term_cst; + tester : term_cst; + dstrs : term_cst option array; } type def = | Abstract | Adt of { - ty : ty_const; + ty : ty_cst; record : bool; cases : adt_case array; } let definition_tag : def Tag.t = Tag.create () - let definition c = Id.get_tag_last c definition_tag + let definition c = Id.get_tag c definition_tag let is_record c = match definition c with @@ -899,93 +831,88 @@ module Ty = struct let define c d = match definition c with - | None -> Id.tag c definition_tag d + | None -> Id.set_tag c definition_tag d | Some _ -> raise (Type_already_defined c) - (* view *) - let view = View.Ty.view - (* Tags *) - let tag (t : t) k v = t.tags <- Tag.add t.tags k v + let get_tag (t : t) k = Tag.get t.ty_tags k + let get_tag_last (t : t) k = Tag.get_last t.ty_tags k + let get_tag_list (t : t) k = Tag.get_list t.ty_tags k - let get_tag (t : t) k = Tag.get t.tags k + let set_tag (t : t) k l = t.ty_tags <- Tag.set t.ty_tags k l + let add_tag (t : t) k v = t.ty_tags <- Tag.add t.ty_tags k v + let add_tag_opt (t : t) k o = t.ty_tags <- Tag.add_opt t.ty_tags k o + let add_tag_list (t : t) k l = t.ty_tags <- Tag.add_list t.ty_tags k l - let get_tag_last (t : t) k = Tag.last t.tags k + let unset_tag (t: t) k = t.ty_tags <- Tag.unset t.ty_tags k (* Module for namespacing *) module Var = struct type t = ty_var - let tag = Id.tag let hash = Id.hash + let print = Id.print let equal = Id.equal let compare = Id.compare let get_tag = Id.get_tag let get_tag_last = Id.get_tag_last - let mk name = Id.mk name Type + let get_tag_list = Id.get_tag_list + let set_tag = Id.set_tag + let add_tag = Id.add_tag + let add_tag_opt = Id.add_tag_opt + let add_tag_list = Id.add_tag_list + let unset_tag = Id.unset_tag + + let mk name = Id.mk (Path.local name) Type + let wildcard () = wildcard_var () + let is_wildcard = function + | { builtin = Builtin.Wildcard _; _ } -> true + | _ -> false end + let add_wildcard_hook ~hook v = + if Var.is_wildcard v then Var.add_tag v wildcard_hook hook + module Const = struct - type t = ty_const - let tag = Id.tag + type t = ty_cst let hash = Id.hash + let print = Id.print let equal = Id.equal let compare = Id.compare let get_tag = Id.get_tag let get_tag_last = Id.get_tag_last - let mk name n = Id.const name [] (replicate n Type) Type - let arity (c : t) = List.length c.ty.fun_args - - let prop = Id.const ~builtin:Prop "Prop" [] [] Type - let unit = Id.const ~builtin:Unit "unit" [] [] Type - let base = Id.const ~builtin:Univ "$i" [] [] Type - let int = Id.const ~builtin:Int "int" [] [] Type - let rat = Id.const ~builtin:Rat "rat" [] [] Type - let real = Id.const ~builtin:Real "real" [] [] Type - let string = Id.const ~builtin:String "string" [] [] Type - let string_reg_lang = Id.const ~builtin:String_RegLan "string_reglang" [] [] Type - let array = Id.const ~builtin:Array "array" [] [Type; Type] Type + let get_tag_list = Id.get_tag_list + let set_tag = Id.set_tag + let add_tag = Id.add_tag + let add_tag_opt = Id.add_tag_opt + let add_tag_list = Id.add_tag_list + let unset_tag = Id.unset_tag + + let arity (c : t) = c.id_ty.arity + let mk path n = + Id.mk path { arity = n; alias = No_alias; } + let mk' ~builtin name n = + Id.mk ~builtin (Path.global name) { arity = n; alias = No_alias; } + + let prop = mk' ~builtin:Builtin.Prop "Prop" 0 + let unit = mk' ~builtin:Builtin.Unit "unit" 0 + let base = mk' ~builtin:Builtin.Univ "$i" 0 + let int = mk' ~builtin:Builtin.Int "int" 0 + let rat = mk' ~builtin:Builtin.Rat "rat" 0 + let real = mk' ~builtin:Builtin.Real "real" 0 + let string = mk' ~builtin:Builtin.String "string" 0 + let string_reg_lang = mk' ~builtin:Builtin.String_RegLan "string_reglang" 0 + let array = mk' ~builtin:Builtin.Array "array" 2 let bitv = with_cache ~cache:(Hashtbl.create 13) (fun i -> - Id.const ~builtin:(Bitv i) (Format.asprintf "Bitv_%d" i) [] [] Type + mk' ~builtin:(Builtin.Bitv i) (Format.asprintf "Bitv_%d" i) 0 ) let float = with_cache ~cache:(Hashtbl.create 13) (fun (e,s) -> - Id.const ~builtin:(Float(e,s)) (Format.asprintf "FloatingPoint_%d_%d" e s) [] [] Type + mk' ~builtin:(Builtin.Float(e,s)) (Format.asprintf "FloatingPoint_%d_%d" e s) 0 ) - let roundingMode = Id.const ~builtin:RoundingMode "RoundingMode" [] [] Type + let roundingMode = mk' ~builtin:Builtin.RoundingMode "RoundingMode" 0 end - let mk descr = { as_ = None; descr; hash = -1; tags = Tag.empty; } - - let as_ t v = { t with as_ = Some v; } - - let of_var v = mk (Var v) - - let wildcard () = - let v = Id.mk ~builtin:Wildcard "_" Type in - let t = of_var v in - wildcard_add v [t]; - t - - let rec check_filters res f args = function - | [] -> res - | (name, active, check) :: r -> - if !active then match (check f args) with - | `Pass -> check_filters res f args r - | `Warn -> check_filters res f args r - | `Error msg -> raise (Filter_failed_ty (name, res, msg)) - else - check_filters res f args r - - let apply (f : Const.t) (args : ty list) = - assert (f.ty.fun_vars = []); - if List.length args <> List.length f.ty.fun_args then - raise (Bad_ty_arity (f, args)) - else begin - let res = mk (App (f, args)) in - check_filters res f args (Const.get_tag f Filter.ty) - end - (* Builtin types *) let prop = apply Const.prop [] let unit = apply Const.unit [] @@ -1004,104 +931,217 @@ module Ty = struct (* alias for alt-ergo *) let bool = prop + (* *) + let split_pi t = + let rec aux acc ty = + let ty' = expand_head ty in + match ty'.ty_descr with + | Pi (vars, body) -> aux (vars :: acc) body + | _ -> + let vars = List.concat (List.rev acc) in + vars, ty' + in + aux [] t + + let split_arrow t = + let rec aux acc t = + let t' = expand_head t in + match t'.ty_descr with + | Arrow (args, ret) -> aux (args :: acc) ret + | TyVar _ | TyApp _ -> + let args = List.concat (List.rev acc) in + args, t' + | Pi _ -> raise (Prenex_polymorphism t) + in + aux [] t + + let poly_sig t = + let vars, t = split_pi t in + let args, ret = split_arrow t in + vars, args, ret + + let pi_arity t = + let l, _ = split_pi t in + List.length l + (* Matching *) exception Impossible_matching of ty * ty let rec pmatch subst (pat : ty) (t : ty) = - match pat, t with - | { descr = Var v; _ }, _ -> + match (expand_head pat), (expand_head t) with + | { ty_descr = TyVar v; _ }, _ -> begin match Subst.Var.get v subst with | t' -> if equal t t' then subst else raise (Impossible_matching (pat, t)) | exception Not_found -> - Subst.Var.bind subst v t + subst_bind subst v t end - | { descr = App (f, f_args); _ }, - { descr = App (g, g_args); _ } -> + | { ty_descr = TyApp (f, f_args); _ }, + { ty_descr = TyApp (g, g_args); _ } -> if Id.equal f g then List.fold_left2 pmatch subst f_args g_args else raise (Impossible_matching (pat, t)) + | { ty_descr = Arrow (pat_args, pat_ret); _ }, + { ty_descr = Arrow (t_args, t_ret); _ } -> + pmatch (pmatch_list pat t subst pat_args t_args) pat_ret t_ret | _ -> raise (Impossible_matching (pat, t)) + and pmatch_list pat t subst pat_args t_args = + match pat_args, t_args with + | [], [] -> subst + | x :: l, y :: r -> pmatch_list pat t (pmatch subst x y) l r + | [], _ :: _ + | _ :: _, [] -> raise (Impossible_matching (pat, t)) + (* Unification *) exception Impossible_unification of t * t let rec follow subst (t : t) = match t with - | { descr = Var v; _ } -> + | { ty_descr = TyVar v; _ } -> begin match Subst.Var.get v subst with | t' -> follow subst t' | exception Not_found -> t end | t -> t - let rec occurs subst l (t : t) = - match t with - | { descr = Var v; _ } -> + let rec occurs u subst l (t : t) = + (* no need to call expand_head here, since robinson_bind, + and thus this function also, always receive types that + have already been expanded. *) + match t.ty_descr with + | TyVar v -> List.exists (Id.equal v) l || begin match Subst.Var.get v subst with | exception Not_found -> false - | e -> occurs subst (v :: l) e + | e -> occurs u subst (v :: l) e end - | { descr = App (_, tys); _ } -> - List.exists (occurs subst l) tys + | TyApp (_, tys) -> + List.exists (occurs u subst l) tys + | Arrow (args, ret) -> + List.exists (occurs u subst l) args || occurs u subst l ret + | Pi _ -> raise (Prenex_polymorphism t) let robinson_bind subst m v u = - if occurs subst [v] u then - raise (Impossible_unification (m, u)) - else - Subst.Var.bind subst v u - - let robinson_as subst s t = - match s.as_ with - | None -> subst - | Some v -> robinson_bind subst s v t + match u.ty_descr with + | Pi _ -> raise (Prenex_polymorphism u) + | _ -> + if occurs u subst [v] u then + raise (Impossible_unification (m, u)) + else + Subst.Var.bind subst v u let rec robinson subst s t = - let subst = robinson_as (robinson_as subst s t) t s in - let s = follow subst s in - let t = follow subst t in + let s = expand_head (follow subst s) in + let t = expand_head (follow subst t) in match s, t with - | ({ descr = Var ({ builtin = Wildcard; _ } as v); _ } as m), u - | u, ({ descr = Var ({ builtin = Wildcard; _ } as v); _ } as m) -> + | ({ ty_descr = TyVar ({ builtin = Builtin.Wildcard _; _ } as v); _ } as m), u + | u, ({ ty_descr = TyVar ({ builtin = Builtin.Wildcard _; _ } as v); _ } as m) -> if equal m u then subst else robinson_bind subst m v u - | ({ descr = Var v; _}, { descr = Var v'; _ }) -> + | ({ ty_descr = TyVar v; _}, { ty_descr = TyVar v'; _ }) -> if Id.equal v v' then subst else raise (Impossible_unification (s, t)) - | { descr = App (f, f_args); _ }, - { descr = App (g, g_args); _ } -> + | { ty_descr = TyApp (f, f_args); _ }, + { ty_descr = TyApp (g, g_args); _ } -> if Id.equal f g then List.fold_left2 robinson subst f_args g_args else raise (Impossible_unification (s, t)) - | _, _ -> - raise (Impossible_unification (s, t)) - + | { ty_descr = Arrow (f_args, f_ret); _ }, + { ty_descr = Arrow (g_args, g_ret); _ } -> + robinson_arrow subst f_args f_ret g_args g_ret + | ({ ty_descr = Pi _; _ } as ty), _ + | _, ({ ty_descr = Pi _; _ } as ty) -> raise (Prenex_polymorphism ty) + | _, _ -> raise (Impossible_unification (s, t)) + + and robinson_arrow subst f_args f_ret g_args g_ret = + match f_args, g_args with + | [], [] -> robinson subst f_ret g_ret + | [], _ -> robinson subst f_ret (arrow g_args g_ret) + | _, [] -> robinson subst (arrow f_args f_ret) g_ret + | f :: f_r, g :: g_r -> + robinson_arrow (robinson subst f g) f_r f_ret g_r g_ret - (* Substitutions *) - let rec subst_aux ~fix var_map (t : t) = - match t.descr with - | Var v -> - begin match Subst.Var.get v var_map with - | exception Not_found -> t - | ty -> if fix then subst_aux ~fix var_map ty else ty - end - | App (f, args) -> - let new_args = List.map (subst_aux ~fix var_map) args in - if List.for_all2 (==) args new_args then t - else apply f new_args + (* typing annotations *) + let unify t t' = + match robinson Subst.empty t t' with + | s -> + Subst.iter set_wildcard s; + Some (subst s t) + | exception Impossible_unification _ -> + None - let subst ?(fix=true) var_map t = - if Subst.is_empty var_map then t - else subst_aux ~fix var_map t + (* free variables *) + let rec free_vars acc (t : t) = + match t.ty_descr with + | TyVar v -> FV.add (FV.Ty v) acc + | TyApp (_, l) -> List.fold_left free_vars acc l + | Arrow (args, ret) -> List.fold_left free_vars (free_vars acc ret) args + | Pi (vars, body) -> + let fv = free_vars FV.empty body in + let fv = FV.remove fv vars in + FV.merge fv acc + let fv t = + let s = free_vars FV.empty t in + let l, _ = FV.to_list s in + l + + (* Access to type descr *) + let descr (t: t) = (expand_head t).ty_descr + + (* View *) + type view = [ + | `Int + | `Rat + | `Real + | `Array of ty * ty + | `Bitv of int + | `Float of int * int + | `String + | `String_reg_lang + (* Generic cases *) + | `Var of ty_var + | `Wildcard of ty_var + | `App of [ + | `Generic of ty_cst + | `Builtin of builtin + ] * ty list + | `Arrow of ty list * ty + | `Pi of ty_var list * ty + ] - (* free variables *) - let rec free_vars acc (t : t) = match t.descr with - | Var v -> FV.add (FV.Ty v) acc - | App (_, l) -> List.fold_left free_vars acc l + let view (t : ty) : view = + match descr t with + | TyVar v -> + begin match v with + | { builtin = Builtin.Wildcard _;_ } -> `Wildcard v + | _ -> `Var v + end + | Pi _ -> + let vars, body = split_pi t in + `Pi (vars, body) + | Arrow _ -> + let args, ret = split_arrow t in + `Arrow (args, ret) + | TyApp (({ builtin; _ } as c), l) -> + begin match builtin with + | Builtin.Int -> `Int + | Builtin.Rat -> `Rat + | Builtin.Real -> `Real + | Builtin.Bitv i -> `Bitv i + | Builtin.Float (e, s) -> `Float (e, s) + | Builtin.Array -> begin match l with + | [src; dst] -> `Array (src, dst) + | _ -> assert false (* not possible *) + end + | Builtin.String -> `String + | Builtin.String_RegLan -> `String_reg_lang + | Builtin.Base -> `App (`Generic c, l) + | _ -> `App (`Builtin builtin, l) + end end @@ -1119,76 +1159,105 @@ module Term = struct type 'a tag = 'a Tag.t + (* Exceptions *) + exception Wrong_type of t * ty - exception Wrong_sum_type of term_const * ty - exception Wrong_record_type of term_const * ty_const + exception Wrong_sum_type of term_cst * ty + exception Wrong_record_type of term_cst * ty_cst - exception Field_repeated of term_const - exception Field_missing of term_const - exception Field_expected of term_const + exception Field_repeated of term_cst + exception Field_missing of term_cst + exception Field_expected of term_cst - exception Constructor_expected of term_const + exception Constructor_expected of term_cst + exception Over_application of t list + exception Bad_poly_arity of ty_var list * ty list + + (* *) + + (* Print *) let print = Print.term (* Tags *) - let tag (t : t) k v = t.tags <- Tag.add t.tags k v + let get_tag (t : t) k = Tag.get t.term_tags k + let get_tag_last (t : t) k = Tag.get_last t.term_tags k + let get_tag_list (t : t) k = Tag.get_list t.term_tags k - let get_tag (t : t) k = Tag.get t.tags k + let set_tag (t : t) k l = t.term_tags <- Tag.set t.term_tags k l + let add_tag (t : t) k v = t.term_tags <- Tag.add t.term_tags k v + let add_tag_opt (t : t) k o = t.term_tags <- Tag.add_opt t.term_tags k o + let add_tag_list (t : t) k l = t.term_tags <- Tag.add_list t.term_tags k l - let get_tag_last (t : t) k = Tag.last t.tags k + let unset_tag (t: t) k = t.term_tags <- Tag.unset t.term_tags k (* Hash *) let rec hash_aux t = - match t.descr with - | Var v -> hash2 3 (Id.hash v) + match t.term_descr with + | Var v -> + Misc.hash2 3 (Id.hash v) + | Cst c -> + Misc.hash2 5 (Id.hash c) | App (f, tys, args) -> - hash4 5 (Id.hash f) (hash_list Ty.hash tys) (hash_list hash args) + Misc.hash4 7 (hash f) (Misc.hash_list Ty.hash tys) (Misc.hash_list hash args) | Binder (b, body) -> - hash3 7 (hash_binder b) (hash body) + Misc.hash3 11 (hash_binder b) (hash body) | Match (scrutinee, branches) -> - hash3 11 (hash scrutinee) (hash_branches branches) + Misc.hash3 13 (hash scrutinee) (hash_branches branches) and hash t = - if t.hash <= 0 then t.hash <- hash_aux t; - t.hash + if t.term_hash >= 0 then t.term_hash + else begin + let res = hash_aux t in + t.term_hash <- res; + res + end and hash_binder = function + | Let_seq l -> + let aux (v, t) = Misc.hash2 (Id.hash v) (hash t) in + Misc.hash2 3 (Misc.hash_list aux l) + | Let_par l -> + let aux (v, t) = Misc.hash2 (Id.hash v) (hash t) in + Misc.hash2 5 (Misc.hash_list aux l) + | Lambda (tys, ts) -> + Misc.hash3 7 (Misc.hash_list Id.hash tys) (Misc.hash_list Id.hash ts) | Exists (tys, ts) -> - hash3 3 (hash_list Id.hash tys) (hash_list Id.hash ts) + Misc.hash3 11 (Misc.hash_list Id.hash tys) (Misc.hash_list Id.hash ts) | Forall (tys, ts) -> - hash3 5 (hash_list Id.hash tys) (hash_list Id.hash ts) - | Letin l -> - let aux (v, t) = hash2 (Id.hash v) (hash t) in - hash2 7 (hash_list aux l) + Misc.hash3 13 (Misc.hash_list Id.hash tys) (Misc.hash_list Id.hash ts) and hash_branch (pattern, body) = - hash2 (hash pattern) (hash body) + Misc.hash2 (hash pattern) (hash body) and hash_branches l = - hash_list hash_branch l + Misc.hash_list hash_branch l (* Comparison *) let discr t = - match t.descr with + match t.term_descr with | Var _ -> 1 - | App _ -> 2 - | Binder _ -> 3 - | Match _ -> 4 + | Cst _ -> 2 + | App _ -> 3 + | Binder _ -> 4 + | Match _ -> 5 let binder_discr = function - | Exists _ -> 1 - | Forall _ -> 2 - | Letin _ -> 3 + | Let_seq _ -> 1 + | Let_par _ -> 2 + | Lambda _ -> 3 + | Exists _ -> 4 + | Forall _ -> 5 let rec compare u v = if u == v then 0 else begin let hu = hash u and hv = hash v in if hu <> hv then hu - hv - else match u.descr, v.descr with + else match u.term_descr, v.term_descr with | Var v1, Var v2 -> Id.compare v1 v2 + | Cst c1, Cst c2 -> Id.compare c1 c2 | App (f1, tys1, args1), App (f2, tys2, args2) -> - Id.compare f1 f2 + compare f1 f2 <?> (lexicographic Ty.compare, tys1, tys2) <?> (lexicographic compare, args1, args2) | Binder (b, body), Binder (b', body') -> @@ -1202,15 +1271,21 @@ module Term = struct and compare_binder b b' = match b, b' with + | Let_seq l, Let_seq l' -> + let aux (v, t) (v', t') = Id.compare v v' <?> (compare, t, t') in + lexicographic aux l l' + | Let_par l, Let_par l' -> + let aux (v, t) (v', t') = Id.compare v v' <?> (compare, t, t') in + lexicographic aux l l' + | Lambda (tys, ts), Lambda (tys', ts') -> + lexicographic Id.compare tys tys' + <?> (lexicographic Id.compare, ts, ts') | Exists (tys, ts), Exists (tys', ts') -> lexicographic Id.compare tys tys' <?> (lexicographic Id.compare, ts, ts') | Forall (tys, ts), Forall (tys', ts') -> lexicographic Id.compare tys tys' <?> (lexicographic Id.compare, ts, ts') - | Letin l, Letin l' -> - let aux (v, t) (v', t') = Id.compare v v' <?> (compare, t, t') in - lexicographic aux l l' | _, _ -> (binder_discr b) - (binder_discr b') and compare_branch (p, b) (p', b') = @@ -1219,29 +1294,45 @@ module Term = struct let equal u v = compare u v = 0 (* Inspection *) - let ty { ty; _ } = ty + let ty { term_ty; _ } = term_ty (* free variables *) - let rec free_vars acc (t : t) = match t.descr with - | Var v -> FV.add (FV.Term v) (Ty.free_vars acc v.ty) - | App (_, tys, ts) -> + let rec free_vars acc (t : t) = + match t.term_descr with + | Var v -> FV.add (FV.Term v) (Ty.free_vars acc v.id_ty) + | Cst _ -> acc + | App (f, tys, ts) -> List.fold_left free_vars ( - List.fold_left Ty.free_vars acc tys + List.fold_left Ty.free_vars ( + free_vars acc f + ) tys ) ts - | Binder ((Exists (tys, ts) | Forall (tys, ts)), body) -> + | Binder ((Lambda (tys, ts) | Exists (tys, ts) | Forall (tys, ts)), body) -> let fv = free_vars FV.empty body in let fv = FV.remove fv tys in let fv = FV.remove fv ts in FV.merge fv acc - | Binder (Letin l, body) -> + | Binder (Let_seq l, body) -> let fv = free_vars FV.empty body in let fv = List.fold_right (fun (v, t) acc -> let acc = free_vars acc t in let acc = FV.del v acc in - let acc = Ty.free_vars acc v.ty in + let acc = Ty.free_vars acc v.id_ty in acc ) l fv in FV.merge fv acc + | Binder (Let_par l, body) -> + let fv = free_vars FV.empty body in + let fv = List.fold_right (fun (_, t) acc -> + free_vars acc t + ) l fv + in + let fv = List.fold_right (fun (v, _) acc -> + let acc = FV.del v acc in + Ty.free_vars acc v.id_ty + ) l fv + in + FV.merge fv acc | Match (scrutinee, branches) -> let acc = free_vars acc scrutinee in List.fold_left (fun fv (pat, body) -> @@ -1255,30 +1346,32 @@ module Term = struct FV.to_list s (* Helpers for adt definition *) - let mk_cstr ty_c name i vars args ret = - Id.const name vars args ret - ~builtin:(Constructor { adt = ty_c; case = i; }) + let mk_cstr ty_c path i vars args ret = + let ty = Ty.pi vars (Ty.arrow args ret) in + Id.mk path ty ~builtin:(Builtin.Constructor { adt = ty_c; case = i; }) - let mk_cstr_tester cstr = - let name = Format.asprintf "is:%a" Print.id cstr in - Id.const ~builtin:(Tester { cstr }) - name cstr.ty.fun_vars [cstr.ty.fun_ret] Ty.prop + let mk_cstr_tester ty_c cstr i = + let path = Path.rename (fun s -> "is:" ^ s) cstr.path in + let vars, _, ret = Ty.poly_sig cstr.id_ty in + let ty = Ty.pi vars (Ty.arrow [ret] Ty.prop) in + Id.mk ~builtin:(Builtin.Tester { adt = ty_c; cstr; case = i; }) path ty (* ADT definition *) let define_adt_aux ~record ty_const vars l = let ty = Ty.apply ty_const (List.map Ty.of_var vars) in let cases = ref [] in - let l' = List.mapi (fun i (cstr_name, args) -> + let l' = List.mapi (fun i (cstr_path, args) -> let args_ty = List.map fst args in - let cstr = mk_cstr ty_const cstr_name i vars args_ty ty in - let tester = mk_cstr_tester cstr in + let cstr = mk_cstr ty_const cstr_path i vars args_ty ty in + let tester = mk_cstr_tester ty_const cstr i in let dstrs = Array.make (List.length args) None in let l' = List.mapi (fun j -> function | (arg_ty, None) -> (arg_ty, None) | (arg_ty, Some name) -> + let dstr_ty = Ty.pi vars (Ty.arrow [ty] arg_ty) in let dstr = - Id.const name vars [ty] arg_ty - ~builtin:(Destructor { + Id.mk name dstr_ty + ~builtin:(Builtin.Destructor { adt = ty_const; cstr; case = i; field = j; }) in @@ -1296,11 +1389,11 @@ module Term = struct let define_adt = define_adt_aux ~record:false let define_record ty_const vars l = - let name = ty_const.name in + let path = ty_const.path in let cstr_args = List.map (fun (field_name, ty) -> ty, Some field_name ) l in - let l' = define_adt_aux ~record:true ty_const vars [name, cstr_args] in + let l' = define_adt_aux ~record:true ty_const vars [path, cstr_args] in match l' with | [ _, l'' ] -> List.map (function @@ -1316,116 +1409,148 @@ module Term = struct (* Variables *) module Var = struct type t = term_var - let tag = Id.tag let hash = Id.hash + let print = Id.print let equal = Id.equal let compare = Id.compare let get_tag = Id.get_tag let get_tag_last = Id.get_tag_last - let ty ({ ty; _ } : t) = ty - let mk name ty = Id.mk name ty + let get_tag_list = Id.get_tag_list + let set_tag = Id.set_tag + let add_tag = Id.add_tag + let add_tag_opt = Id.add_tag_opt + let add_tag_list = Id.add_tag_list + let unset_tag = Id.unset_tag + + let ty ({ id_ty; _ } : t) = id_ty + let create path ty = Id.mk path ty + let mk name ty = Id.mk (Path.local name) ty end (* Constants *) module Const = struct - type t = term_const - let tag = Id.tag + type t = term_cst let hash = Id.hash + let print = Id.print let equal = Id.equal let compare = Id.compare let get_tag = Id.get_tag let get_tag_last = Id.get_tag_last - - let mk name vars args ret = - let r = ref ret in - let b = ref true in - let args = List.map (fun ty -> - if !b || not (Ty.equal ty ret) then ty - else begin - let v = Ty.Var.mk "'ret" in - r := Ty.of_var v; - b := true; - Ty.as_ ty v - end - ) args - in - Id.const name vars args !r + let get_tag_list = Id.get_tag_list + let set_tag = Id.set_tag + let add_tag = Id.add_tag + let add_tag_opt = Id.add_tag_opt + let add_tag_list = Id.add_tag_list + let unset_tag = Id.unset_tag + + let mk path ty = + Id.mk path ty + + let mk' ?pos ?name ?builtin ?tags cname vars args ret = + let ty = Ty.pi vars (Ty.arrow args ret) in + Id.mk ?pos ?name ?builtin ?tags (Path.global cname) ty + + let indexed + ?pos ?name ?builtin ?tags + cname fun_vars fun_arg fun_ret = + with_cache ~cache:(Hashtbl.create 13) (fun i -> + let fun_args = replicate i fun_arg in + mk' ?pos ?name ?builtin ?tags cname fun_vars fun_args fun_ret + ) let arity (c : t) = - List.length c.ty.fun_vars, List.length c.ty.fun_args + let vars, args, _ = Ty.poly_sig c.id_ty in + List.length vars, List.length args (* Some constants *) let _true = - Id.const ~name:"â¤" ~builtin:True "True" [] [] Ty.prop + Id.mk ~name:"â¤" ~builtin:Builtin.True (Path.global "true") Ty.prop let _false = - Id.const ~name:"âĨ" ~builtin:False "False" [] [] Ty.prop - - let eq = - let a = Ty.Var.mk "alpha" in - let a_ty = Ty.of_var a in - Id.const - ~pos:Pretty.Infix ~name:"=" ~builtin:Equal - "Equal" [a] [a_ty; a_ty] Ty.prop + Id.mk ~name:"âĨ" ~builtin:Builtin.False (Path.global "false") Ty.prop let eqs = let a = Ty.Var.mk "alpha" in let a_ty = Ty.of_var a in - Id.indexed - ~pos:Pretty.Infix ~name:"=" ~builtin:Equal - "Equals" [a] a_ty Ty.prop + indexed + ~pos:Pretty.Infix ~name:"=" ~builtin:Builtin.Equal + "eqs" [a] a_ty Ty.prop + + let eq = eqs 2 let distinct = let a = Ty.Var.mk "alpha" in let a_ty = Ty.of_var a in - Id.indexed ~builtin:Distinct "Distinct" [a] a_ty Ty.prop + indexed ~builtin:Builtin.Distinct "Distinct" [a] a_ty Ty.prop + + let neq = distinct 2 - let neg = Id.const - ~pos:Pretty.Prefix ~name:"ÂŦ" ~builtin:Neg + let neg = mk' + ~pos:Pretty.Prefix ~name:"ÂŦ" ~builtin:Builtin.Neg "Neg" [] [Ty.prop] Ty.prop - let _and = Id.indexed - ~pos:Pretty.Infix ~name:"â§" ~builtin:And + let _and = indexed + ~pos:Pretty.Infix ~name:"â§" ~builtin:Builtin.And "And" [] Ty.prop Ty.prop - let _or = Id.indexed - ~pos:Pretty.Infix ~name:"â¨" ~builtin:Or + let and_ = _and 2 + + let _or = indexed + ~pos:Pretty.Infix ~name:"â¨" ~builtin:Builtin.Or "Or" [] Ty.prop Ty.prop - let nand = Id.const - ~pos:Pretty.Infix ~name:"âŧ" ~builtin:Nand + let or_ = _or 2 + + let nand = mk' + ~pos:Pretty.Infix ~name:"âŧ" ~builtin:Builtin.Nand "Nand" [] [Ty.prop; Ty.prop] Ty.prop - let nor = Id.const - ~pos:Pretty.Infix ~name:"V" ~builtin:Nor + let nor = mk' + ~pos:Pretty.Infix ~name:"V" ~builtin:Builtin.Nor "or" [] [Ty.prop; Ty.prop] Ty.prop - let xor = Id.const - ~pos:Pretty.Infix ~name:"âģ" ~builtin:Xor + let xor = mk' + ~pos:Pretty.Infix ~name:"âģ" ~builtin:Builtin.Xor "Xor" [] [Ty.prop; Ty.prop] Ty.prop - let imply = Id.const - ~pos:Pretty.Infix ~name:"â" ~builtin:Imply + let imply = mk' + ~pos:Pretty.Infix ~name:"â" ~builtin:Builtin.Imply "Imply" [] [Ty.prop; Ty.prop] Ty.prop - let equiv = Id.const - ~pos:Pretty.Infix ~name:"â" ~builtin:Equiv + let implied = mk' + ~pos:Pretty.Infix ~name:"â" ~builtin:Builtin.Implied + "Implied" [] [Ty.prop; Ty.prop] Ty.prop + + let equiv = mk' + ~pos:Pretty.Infix ~name:"â" ~builtin:Builtin.Equiv "Equiv" [] [Ty.prop; Ty.prop] Ty.prop let ite = let a = Ty.Var.mk "alpha" in let a_ty = Ty.of_var a in - Id.const - ~name:"ite" ~builtin:Ite + mk' + ~name:"ite" ~builtin:Builtin.Ite "Ite" [a] [Ty.prop; a_ty; a_ty] a_ty + let pi = + let a = Ty.Var.mk "alpha" in + let a_ty = Ty.of_var a in + mk' ~name:"Î " ~builtin:Builtin.Pi + "Pi" [a] [Ty.(arrow [a_ty] prop)] Ty.prop + + let sigma = + let a = Ty.Var.mk "alpha" in + let a_ty = Ty.of_var a in + mk' ~name:"ÎŖ" ~builtin:Builtin.Sigma + "Sigma" [a] [Ty.(arrow [a_ty] prop)] Ty.prop + let select = let a = Ty.Var.mk "alpha" in let a_ty = Ty.of_var a in let b = Ty.Var.mk "beta" in let b_ty = Ty.of_var b in - Id.const - ~name:"select" ~builtin:Select + mk' + ~name:"select" ~builtin:Builtin.Select "Select" [a; b] [Ty.array a_ty b_ty; a_ty] b_ty let store = @@ -1434,105 +1559,109 @@ module Term = struct let b = Ty.Var.mk "beta" in let b_ty = Ty.of_var b in let arr = Ty.array a_ty b_ty in - Id.const - ~name:"store" ~builtin:Store + mk' + ~name:"store" ~builtin:Builtin.Store "Store" [a; b] [arr; a_ty; b_ty] arr let coerce = let a = Ty.Var.mk "alpha" in let b = Ty.Var.mk "beta" in - Id.const ~builtin:Coercion "coerce" + mk' ~builtin:Builtin.Coercion "coerce" [a; b] [Ty.of_var a] (Ty.of_var b) module Int = struct let int = with_cache ~cache:(Hashtbl.create 113) (fun s -> - Id.const ~builtin:(Integer s) s [] [] Ty.int + mk' ~builtin:(Builtin.Integer s) s [] [] Ty.int ) - let minus = Id.const - ~pos:Pretty.Prefix ~name:"-" ~builtin:Minus + let minus = mk' + ~pos:Pretty.Prefix ~name:"-" ~builtin:Builtin.Minus "Minus" [] [Ty.int] Ty.int - let add = Id.const - ~pos:Pretty.Infix ~name:"+" ~builtin:Add + let add = mk' + ~pos:Pretty.Infix ~name:"+" ~builtin:Builtin.Add "Add" [] [Ty.int; Ty.int] Ty.int - let sub = Id.const - ~pos:Pretty.Infix ~name:"-" ~builtin:Sub + let sub = mk' + ~pos:Pretty.Infix ~name:"-" ~builtin:Builtin.Sub "Sub" [] [Ty.int; Ty.int] Ty.int - let mul = Id.const - ~pos:Pretty.Infix ~name:"*" ~builtin:Mul + let mul = mk' + ~pos:Pretty.Infix ~name:"*" ~builtin:Builtin.Mul "Mul" [] [Ty.int; Ty.int] Ty.int - let div_e = Id.const - ~pos:Pretty.Infix ~name:"/" ~builtin:Div_e + let pow = mk' + ~pos:Pretty.Infix ~name:"**" ~builtin:Builtin.Pow + "Pow" [] [Ty.int; Ty.int] Ty.int + + let div_e = mk' + ~pos:Pretty.Infix ~name:"/" ~builtin:Builtin.Div_e "Div_e" [] [Ty.int; Ty.int] Ty.int - let div_t = Id.const - ~pos:Pretty.Infix ~name:"/t" ~builtin:Div_t + let div_t = mk' + ~pos:Pretty.Infix ~name:"/t" ~builtin:Builtin.Div_t "Div_t" [] [Ty.int; Ty.int] Ty.int - let div_f = Id.const - ~pos:Pretty.Infix ~name:"/f" ~builtin:Div_f + let div_f = mk' + ~pos:Pretty.Infix ~name:"/f" ~builtin:Builtin.Div_f "Div_f" [] [Ty.int; Ty.int] Ty.int - let rem_e = Id.const - ~pos:Pretty.Infix ~name:"%" ~builtin:Modulo_e + let rem_e = mk' + ~pos:Pretty.Infix ~name:"%" ~builtin:Builtin.Modulo_e "Modulo" [] [Ty.int; Ty.int] Ty.int - let rem_t = Id.const - ~pos:Pretty.Infix ~name:"%e" ~builtin:Modulo_t + let rem_t = mk' + ~pos:Pretty.Infix ~name:"%e" ~builtin:Builtin.Modulo_t "Modulo" [] [Ty.int; Ty.int] Ty.int - let rem_f = Id.const - ~pos:Pretty.Infix ~name:"%f" ~builtin:Modulo_f + let rem_f = mk' + ~pos:Pretty.Infix ~name:"%f" ~builtin:Builtin.Modulo_f "Modulo" [] [Ty.int; Ty.int] Ty.int - let abs = Id.const - ~name:"abs" ~builtin:Abs + let abs = mk' + ~name:"abs" ~builtin:Builtin.Abs "Abs" [] [Ty.int] Ty.int - let lt = Id.const - ~pos:Pretty.Infix ~name:"<" ~builtin:Lt + let lt = mk' + ~pos:Pretty.Infix ~name:"<" ~builtin:Builtin.Lt "LessThan" [] [Ty.int; Ty.int] Ty.prop - let le = Id.const - ~pos:Pretty.Infix ~name:"<=" ~builtin:Leq + let le = mk' + ~pos:Pretty.Infix ~name:"<=" ~builtin:Builtin.Leq "LessOrEqual" [] [Ty.int; Ty.int] Ty.prop - let gt = Id.const - ~pos:Pretty.Infix ~name:">" ~builtin:Gt + let gt = mk' + ~pos:Pretty.Infix ~name:">" ~builtin:Builtin.Gt "GreaterThan" [] [Ty.int; Ty.int] Ty.prop - let ge = Id.const - ~pos:Pretty.Infix ~name:">=" ~builtin:Geq + let ge = mk' + ~pos:Pretty.Infix ~name:">=" ~builtin:Builtin.Geq "GreaterOrEqual" [] [Ty.int; Ty.int] Ty.prop - let floor = Id.const - ~name:"floor" ~builtin:Floor + let floor = mk' + ~name:"floor" ~builtin:Builtin.Floor "Floor" [] [Ty.int] Ty.int - let ceiling = Id.const - ~name:"ceiling" ~builtin:Ceiling + let ceiling = mk' + ~name:"ceiling" ~builtin:Builtin.Ceiling "Ceiling" [] [Ty.int] Ty.int - let truncate = Id.const - ~name:"truncate" ~builtin:Truncate + let truncate = mk' + ~name:"truncate" ~builtin:Builtin.Truncate "Truncate" [] [Ty.int] Ty.int - let round = Id.const - ~name:"round" ~builtin:Round + let round = mk' + ~name:"round" ~builtin:Builtin.Round "Round" [] [Ty.int] Ty.int - let is_int = Id.const - ~name:"is_int" ~builtin:Is_int + let is_int = mk' + ~name:"is_int" ~builtin:Builtin.Is_int "Is_int" [] [Ty.int] Ty.prop - let is_rat = Id.const - ~name:"is_rat" ~builtin:Is_rat + let is_rat = mk' + ~name:"is_rat" ~builtin:Builtin.Is_rat "Is_rat" [] [Ty.int] Ty.prop - let divisible = Id.const - ~builtin:Divisible "Divisible" + let divisible = mk' + ~builtin:Builtin.Divisible "Divisible" [] [Ty.int; Ty.int] Ty.prop end @@ -1541,86 +1670,86 @@ module Term = struct let rat = with_cache ~cache:(Hashtbl.create 113) (fun s -> - Id.const ~builtin:(Rational s) s [] [] Ty.rat + mk' ~builtin:(Builtin.Rational s) s [] [] Ty.rat ) - let minus = Id.const - ~pos:Pretty.Prefix ~name:"-" ~builtin:Minus + let minus = mk' + ~pos:Pretty.Prefix ~name:"-" ~builtin:Builtin.Minus "Minus" [] [Ty.rat] Ty.rat - let add = Id.const - ~pos:Pretty.Infix ~name:"+" ~builtin:Add + let add = mk' + ~pos:Pretty.Infix ~name:"+" ~builtin:Builtin.Add "Add" [] [Ty.rat; Ty.rat] Ty.rat - let sub = Id.const - ~pos:Pretty.Infix ~name:"-" ~builtin:Sub + let sub = mk' + ~pos:Pretty.Infix ~name:"-" ~builtin:Builtin.Sub "Sub" [] [Ty.rat; Ty.rat] Ty.rat - let mul = Id.const - ~pos:Pretty.Infix ~name:"*" ~builtin:Mul + let mul = mk' + ~pos:Pretty.Infix ~name:"*" ~builtin:Builtin.Mul "Mul" [] [Ty.rat; Ty.rat] Ty.rat - let div = Id.const - ~pos:Pretty.Infix ~name:"/" ~builtin:Div + let div = mk' + ~pos:Pretty.Infix ~name:"/" ~builtin:Builtin.Div "Div" [] [Ty.rat; Ty.rat] Ty.rat - let div_e = Id.const - ~pos:Pretty.Infix ~name:"/e" ~builtin:Div_e + let div_e = mk' + ~pos:Pretty.Infix ~name:"/e" ~builtin:Builtin.Div_e "Div_e" [] [Ty.rat; Ty.rat] Ty.rat - let div_t = Id.const - ~pos:Pretty.Infix ~name:"/t" ~builtin:Div_t + let div_t = mk' + ~pos:Pretty.Infix ~name:"/t" ~builtin:Builtin.Div_t "Div_t" [] [Ty.rat; Ty.rat] Ty.rat - let div_f = Id.const - ~pos:Pretty.Infix ~name:"/f" ~builtin:Div_f + let div_f = mk' + ~pos:Pretty.Infix ~name:"/f" ~builtin:Builtin.Div_f "Div_f" [] [Ty.rat; Ty.rat] Ty.rat - let rem_e = Id.const - ~pos:Pretty.Infix ~name:"%" ~builtin:Modulo_e + let rem_e = mk' + ~pos:Pretty.Infix ~name:"%" ~builtin:Builtin.Modulo_e "Modulo" [] [Ty.rat; Ty.rat] Ty.rat - let rem_t = Id.const - ~pos:Pretty.Infix ~name:"%" ~builtin:Modulo_t + let rem_t = mk' + ~pos:Pretty.Infix ~name:"%" ~builtin:Builtin.Modulo_t "Modulo" [] [Ty.rat; Ty.rat] Ty.rat - let rem_f = Id.const - ~pos:Pretty.Infix ~name:"%" ~builtin:Modulo_f + let rem_f = mk' + ~pos:Pretty.Infix ~name:"%" ~builtin:Builtin.Modulo_f "Modulo" [] [Ty.rat; Ty.rat] Ty.rat - let lt = Id.const - ~pos:Pretty.Infix ~name:"<" ~builtin:Lt + let lt = mk' + ~pos:Pretty.Infix ~name:"<" ~builtin:Builtin.Lt "LessThan" [] [Ty.rat; Ty.rat] Ty.prop - let le = Id.const - ~pos:Pretty.Infix ~name:"<=" ~builtin:Leq + let le = mk' + ~pos:Pretty.Infix ~name:"<=" ~builtin:Builtin.Leq "LessOrEqual" [] [Ty.rat; Ty.rat] Ty.prop - let gt = Id.const - ~pos:Pretty.Infix ~name:">" ~builtin:Gt + let gt = mk' + ~pos:Pretty.Infix ~name:">" ~builtin:Builtin.Gt "GreaterThan" [] [Ty.rat; Ty.rat] Ty.prop - let ge = Id.const - ~pos:Pretty.Infix ~name:">=" ~builtin:Geq + let ge = mk' + ~pos:Pretty.Infix ~name:">=" ~builtin:Builtin.Geq "GreaterOrEqual" [] [Ty.rat; Ty.rat] Ty.prop - let floor = Id.const - ~name:"floor" ~builtin:Floor + let floor = mk' + ~name:"floor" ~builtin:Builtin.Floor "Floor" [] [Ty.rat] Ty.rat - let ceiling = Id.const - ~name:"ceiling" ~builtin:Ceiling + let ceiling = mk' + ~name:"ceiling" ~builtin:Builtin.Ceiling "Ceiling" [] [Ty.rat] Ty.rat - let truncate = Id.const - ~name:"truncate" ~builtin:Truncate + let truncate = mk' + ~name:"truncate" ~builtin:Builtin.Truncate "Truncate" [] [Ty.rat] Ty.rat - let round = Id.const - ~name:"round" ~builtin:Round + let round = mk' + ~name:"round" ~builtin:Builtin.Round "Round" [] [Ty.rat] Ty.rat - let is_int = Id.const - ~name:"is_int" ~builtin:Is_int + let is_int = mk' + ~name:"is_int" ~builtin:Builtin.Is_int "Is_int" [] [Ty.rat] Ty.prop - let is_rat = Id.const - ~name:"is_rat" ~builtin:Is_rat + let is_rat = mk' + ~name:"is_rat" ~builtin:Builtin.Is_rat "Is_rat" [] [Ty.rat] Ty.prop end @@ -1628,284 +1757,292 @@ module Term = struct let real = with_cache ~cache:(Hashtbl.create 113) (fun s -> - Id.const ~builtin:(Decimal s) s [] [] Ty.real + mk' ~builtin:(Builtin.Decimal s) s [] [] Ty.real ) - let minus = Id.const - ~pos:Pretty.Prefix ~name:"-" ~builtin:Minus + let minus = mk' + ~pos:Pretty.Prefix ~name:"-" ~builtin:Builtin.Minus "Minus" [] [Ty.real] Ty.real - let add = Id.const - ~pos:Pretty.Infix ~name:"+" ~builtin:Add + let add = mk' + ~pos:Pretty.Infix ~name:"+" ~builtin:Builtin.Add "Add" [] [Ty.real; Ty.real] Ty.real - let sub = Id.const - ~pos:Pretty.Infix ~name:"-" ~builtin:Sub + let sub = mk' + ~pos:Pretty.Infix ~name:"-" ~builtin:Builtin.Sub "Sub" [] [Ty.real; Ty.real] Ty.real - let mul = Id.const - ~pos:Pretty.Infix ~name:"*" ~builtin:Mul + let mul = mk' + ~pos:Pretty.Infix ~name:"*" ~builtin:Builtin.Mul "Mul" [] [Ty.real; Ty.real] Ty.real - let div = Id.const - ~pos:Pretty.Infix ~name:"/" ~builtin:Div + let pow = mk' + ~pos:Pretty.Infix ~name:"**" ~builtin:Builtin.Pow + "Pow" [] [Ty.real; Ty.real] Ty.real + + let div = mk' + ~pos:Pretty.Infix ~name:"/" ~builtin:Builtin.Div "Div" [] [Ty.real; Ty.real] Ty.real - let div_e = Id.const - ~pos:Pretty.Infix ~name:"/" ~builtin:Div_e + let div_e = mk' + ~pos:Pretty.Infix ~name:"/" ~builtin:Builtin.Div_e "Div_e" [] [Ty.real; Ty.real] Ty.real - let div_t = Id.const - ~pos:Pretty.Infix ~name:"/t" ~builtin:Div_t + let div_t = mk' + ~pos:Pretty.Infix ~name:"/t" ~builtin:Builtin.Div_t "Div_t" [] [Ty.real; Ty.real] Ty.real - let div_f = Id.const - ~pos:Pretty.Infix ~name:"/f" ~builtin:Div_f + let div_f = mk' + ~pos:Pretty.Infix ~name:"/f" ~builtin:Builtin.Div_f "Div_f" [] [Ty.real; Ty.real] Ty.real - let rem_e = Id.const - ~pos:Pretty.Infix ~name:"%" ~builtin:Modulo_e + let rem_e = mk' + ~pos:Pretty.Infix ~name:"%" ~builtin:Builtin.Modulo_e "Modulo" [] [Ty.real; Ty.real] Ty.real - let rem_t = Id.const - ~pos:Pretty.Infix ~name:"%" ~builtin:Modulo_t + let rem_t = mk' + ~pos:Pretty.Infix ~name:"%" ~builtin:Builtin.Modulo_t "Modulo" [] [Ty.real; Ty.real] Ty.real - let rem_f = Id.const - ~pos:Pretty.Infix ~name:"%" ~builtin:Modulo_f + let rem_f = mk' + ~pos:Pretty.Infix ~name:"%" ~builtin:Builtin.Modulo_f "Modulo" [] [Ty.real; Ty.real] Ty.real - let lt = Id.const - ~pos:Pretty.Infix ~name:"<" ~builtin:Lt + let lt = mk' + ~pos:Pretty.Infix ~name:"<" ~builtin:Builtin.Lt "LessThan" [] [Ty.real; Ty.real] Ty.prop - let le = Id.const - ~pos:Pretty.Infix ~name:"<=" ~builtin:Leq + let le = mk' + ~pos:Pretty.Infix ~name:"<=" ~builtin:Builtin.Leq "LessOrEqual" [] [Ty.real; Ty.real] Ty.prop - let gt = Id.const - ~pos:Pretty.Infix ~name:">" ~builtin:Gt + let gt = mk' + ~pos:Pretty.Infix ~name:">" ~builtin:Builtin.Gt "GreaterThan" [] [Ty.real; Ty.real] Ty.prop - let ge = Id.const - ~pos:Pretty.Infix ~name:">=" ~builtin:Geq + let ge = mk' + ~pos:Pretty.Infix ~name:">=" ~builtin:Builtin.Geq "GreaterOrEqual" [] [Ty.real; Ty.real] Ty.prop - let floor = Id.const - ~name:"floor" ~builtin:Floor + let floor = mk' + ~name:"floor" ~builtin:Builtin.Floor "Floor" [] [Ty.real] Ty.real - let ceiling = Id.const - ~name:"ceiling" ~builtin:Ceiling + let floor_to_int = mk' + ~name:"floor_to_int" ~builtin:Builtin.Floor_to_int + "Floor" [] [Ty.real] Ty.int + + let ceiling = mk' + ~name:"ceiling" ~builtin:Builtin.Ceiling "Ceiling" [] [Ty.real] Ty.real - let truncate = Id.const - ~name:"truncate" ~builtin:Truncate + let truncate = mk' + ~name:"truncate" ~builtin:Builtin.Truncate "Truncate" [] [Ty.real] Ty.real - let round = Id.const - ~name:"round" ~builtin:Round + let round = mk' + ~name:"round" ~builtin:Builtin.Round "Round" [] [Ty.real] Ty.real - let is_int = Id.const - ~name:"is_int" ~builtin:Is_int + let is_int = mk' + ~name:"is_int" ~builtin:Builtin.Is_int "Is_int" [] [Ty.real] Ty.prop - let is_rat = Id.const - ~name:"is_rat" ~builtin:Is_rat + let is_rat = mk' + ~name:"is_rat" ~builtin:Builtin.Is_rat "Is_rat" [] [Ty.real] Ty.prop end module Bitv = struct let bitv s = - Id.const ~builtin:(Bitvec s) + mk' ~builtin:(Builtin.Bitvec s) (Format.asprintf "bv#%s#" s) [] [] (Ty.bitv (String.length s)) let concat = with_cache ~cache:(Hashtbl.create 13) (fun (i, j) -> - Id.const ~builtin:Bitv_concat "bitv_concat" + mk' ~builtin:Builtin.Bitv_concat "bitv_concat" [] [Ty.bitv i; Ty.bitv j] (Ty.bitv (i + j)) ) let extract = with_cache ~cache:(Hashtbl.create 13) (fun (i, j, n) -> - Id.const ~builtin:(Bitv_extract (i, j)) + mk' ~builtin:(Builtin.Bitv_extract (i, j)) (Format.asprintf "bitv_extract_%d_%d" i j) [] [Ty.bitv n] (Ty.bitv (i - j + 1)) ) let repeat = with_cache ~cache:(Hashtbl.create 13) (fun (k, n) -> - Id.const ~builtin:Bitv_repeat (Format.asprintf "bitv_repeat_%d" k) + mk' ~builtin:Builtin.Bitv_repeat (Format.asprintf "bitv_repeat_%d" k) [] [Ty.bitv n] (Ty.bitv (n * k)) ) let zero_extend = with_cache ~cache:(Hashtbl.create 13) (fun (k, n) -> - Id.const ~builtin:Bitv_zero_extend (Format.asprintf "zero_extend_%d" k) + mk' ~builtin:Builtin.Bitv_zero_extend (Format.asprintf "zero_extend_%d" k) [] [Ty.bitv n] (Ty.bitv (n + k)) ) let sign_extend = with_cache ~cache:(Hashtbl.create 13) (fun (k, n) -> - Id.const ~builtin:Bitv_sign_extend (Format.asprintf "sign_extend_%d" k) + mk' ~builtin:Builtin.Bitv_sign_extend (Format.asprintf "sign_extend_%d" k) [] [Ty.bitv n] (Ty.bitv (n + k)) ) let rotate_right = with_cache ~cache:(Hashtbl.create 13) (fun (k, n) -> - Id.const ~builtin:(Bitv_rotate_right k) + mk' ~builtin:(Builtin.Bitv_rotate_right k) (Format.asprintf "rotate_right_%d" k) [] [Ty.bitv n] (Ty.bitv n) ) let rotate_left = with_cache ~cache:(Hashtbl.create 13) (fun (k, n) -> - Id.const ~builtin:(Bitv_rotate_left k) + mk' ~builtin:(Builtin.Bitv_rotate_left k) (Format.asprintf "rotate_left_%d" k) [] [Ty.bitv n] (Ty.bitv n) ) let not = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_not "bvnot" [] [Ty.bitv n] (Ty.bitv n) + mk' ~builtin:Builtin.Bitv_not "bvnot" [] [Ty.bitv n] (Ty.bitv n) ) let and_ = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_and "bvand" [] + mk' ~builtin:Builtin.Bitv_and "bvand" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let or_ = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_or "bvor" [] + mk' ~builtin:Builtin.Bitv_or "bvor" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let nand = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_nand "bvnand" [] + mk' ~builtin:Builtin.Bitv_nand "bvnand" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let nor = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_nor "bvnor" [] + mk' ~builtin:Builtin.Bitv_nor "bvnor" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let xor = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_xor "bvxor" [] + mk' ~builtin:Builtin.Bitv_xor "bvxor" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let xnor = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_xnor "bvxnor" [] + mk' ~builtin:Builtin.Bitv_xnor "bvxnor" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let comp = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_comp "bvcomp" [] + mk' ~builtin:Builtin.Bitv_comp "bvcomp" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv 1) ) let neg = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_neg "bvneg" [] [Ty.bitv n] (Ty.bitv n) + mk' ~builtin:Builtin.Bitv_neg "bvneg" [] [Ty.bitv n] (Ty.bitv n) ) let add = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_add "bvadd" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) + mk' ~builtin:Builtin.Bitv_add "bvadd" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let sub = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_sub "bvsub" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) + mk' ~builtin:Builtin.Bitv_sub "bvsub" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let mul = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_mul "bvmul" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) + mk' ~builtin:Builtin.Bitv_mul "bvmul" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let udiv = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_udiv "bvudiv" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) + mk' ~builtin:Builtin.Bitv_udiv "bvudiv" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let urem = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_urem "bvurem" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) + mk' ~builtin:Builtin.Bitv_urem "bvurem" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let sdiv = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_sdiv "bvsdiv" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) + mk' ~builtin:Builtin.Bitv_sdiv "bvsdiv" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let srem = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_srem "bvsrem" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) + mk' ~builtin:Builtin.Bitv_srem "bvsrem" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let smod = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_smod "bvsmod" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) + mk' ~builtin:Builtin.Bitv_smod "bvsmod" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let shl = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_shl "bvshl" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) + mk' ~builtin:Builtin.Bitv_shl "bvshl" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let lshr = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_lshr "bvlshr" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) + mk' ~builtin:Builtin.Bitv_lshr "bvlshr" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let ashr = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_ashr "bvashr" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) + mk' ~builtin:Builtin.Bitv_ashr "bvashr" [] [Ty.bitv n; Ty.bitv n] (Ty.bitv n) ) let ult = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_ult "bvult" [] [Ty.bitv n; Ty.bitv n] Ty.prop + mk' ~builtin:Builtin.Bitv_ult "bvult" [] [Ty.bitv n; Ty.bitv n] Ty.prop ) let ule = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_ule "bvule" [] [Ty.bitv n; Ty.bitv n] Ty.prop + mk' ~builtin:Builtin.Bitv_ule "bvule" [] [Ty.bitv n; Ty.bitv n] Ty.prop ) let ugt = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_ugt "bvugt" [] [Ty.bitv n; Ty.bitv n] Ty.prop + mk' ~builtin:Builtin.Bitv_ugt "bvugt" [] [Ty.bitv n; Ty.bitv n] Ty.prop ) let uge = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_uge "bvuge" [] [Ty.bitv n; Ty.bitv n] Ty.prop + mk' ~builtin:Builtin.Bitv_uge "bvuge" [] [Ty.bitv n; Ty.bitv n] Ty.prop ) let slt = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_slt "bvslt" [] [Ty.bitv n; Ty.bitv n] Ty.prop + mk' ~builtin:Builtin.Bitv_slt "bvslt" [] [Ty.bitv n; Ty.bitv n] Ty.prop ) let sle = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_sle "bvsle" [] [Ty.bitv n; Ty.bitv n] Ty.prop + mk' ~builtin:Builtin.Bitv_sle "bvsle" [] [Ty.bitv n; Ty.bitv n] Ty.prop ) let sgt = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_sgt "bvsgt" [] [Ty.bitv n; Ty.bitv n] Ty.prop + mk' ~builtin:Builtin.Bitv_sgt "bvsgt" [] [Ty.bitv n; Ty.bitv n] Ty.prop ) let sge = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:Bitv_sge "bvsge" [] [Ty.bitv n; Ty.bitv n] Ty.prop + mk' ~builtin:Builtin.Bitv_sge "bvsge" [] [Ty.bitv n; Ty.bitv n] Ty.prop ) end @@ -1914,24 +2051,24 @@ module Term = struct let fp = with_cache ~cache:(Hashtbl.create 13) (fun (e, s) -> - Id.const ~builtin:(Fp(e, s)) "fp" [] + mk' ~builtin:(Builtin.Fp(e, s)) "fp" [] [Ty.bitv 1; Ty.bitv e; Ty.bitv (s-1)] (Ty.float e s) ) let roundNearestTiesToEven = - Id.const ~builtin:RoundNearestTiesToEven "RoundNearestTiesToEven" [] [] Ty.roundingMode + mk' ~builtin:Builtin.RoundNearestTiesToEven "RoundNearestTiesToEven" [] [] Ty.roundingMode let roundNearestTiesToAway = - Id.const ~builtin:RoundNearestTiesToAway "RoundNearestTiesToAway" [] [] Ty.roundingMode + mk' ~builtin:Builtin.RoundNearestTiesToAway "RoundNearestTiesToAway" [] [] Ty.roundingMode let roundTowardPositive = - Id.const ~builtin:RoundTowardPositive "RoundTowardPositive" [] [] Ty.roundingMode + mk' ~builtin:Builtin.RoundTowardPositive "RoundTowardPositive" [] [] Ty.roundingMode let roundTowardNegative = - Id.const ~builtin:RoundTowardNegative "RoundTowardNegative" [] [] Ty.roundingMode + mk' ~builtin:Builtin.RoundTowardNegative "RoundTowardNegative" [] [] Ty.roundingMode let roundTowardZero = - Id.const ~builtin:RoundTowardZero "RoundTowardZero" [] [] Ty.roundingMode + mk' ~builtin:Builtin.RoundTowardZero "RoundTowardZero" [] [] Ty.roundingMode (** Generic function for creating functions primarily on the same floating point format with optionally a rounding mode and a particular result @@ -1946,67 +2083,127 @@ module Term = struct | Some res -> res | None -> fp in - Id.const ~builtin:(builtin es) name [] args res + mk' ~builtin:(builtin es) name [] args res ) - let plus_infinity = fp_gen_fun ~args:0 "plus_infinity" (fun (e,s) -> Plus_infinity (e,s)) - let minus_infinity = fp_gen_fun ~args:0 "minus_infinity" (fun (e,s) -> Minus_infinity (e,s)) - let plus_zero = fp_gen_fun ~args:0 "plus_zero" (fun (e,s) -> Plus_zero (e,s)) - let minus_zero = fp_gen_fun ~args:0 "minus_zero" (fun (e,s) -> Minus_zero (e,s)) - let nan = fp_gen_fun ~args:0 "nan" (fun (e,s) -> NaN (e,s)) - let abs = fp_gen_fun ~args:1 "fp.abs" (fun (e,s) -> Fp_abs (e,s)) - let neg = fp_gen_fun ~args:1 "fp.neg" (fun (e,s) -> Fp_neg (e,s)) - let add = fp_gen_fun ~args:2 ~rm:() "fp.add" (fun (e,s) -> Fp_add (e,s)) - let sub = fp_gen_fun ~args:2 ~rm:() "fp.sub" (fun (e,s) -> Fp_sub (e,s)) - let mul = fp_gen_fun ~args:2 ~rm:() "fp.mul" (fun (e,s) -> Fp_mul (e,s)) - let div = fp_gen_fun ~args:2 ~rm:() "fp.div" (fun (e,s) -> Fp_div (e,s)) - let fma = fp_gen_fun ~args:3 ~rm:() "fp.fma" (fun (e,s) -> Fp_fma (e,s)) - let sqrt = fp_gen_fun ~args:1 ~rm:() "fp.sqrt" (fun (e,s) -> Fp_sqrt (e,s)) - let rem = fp_gen_fun ~args:2 "fp.rem" (fun (e,s) -> Fp_rem (e,s)) - let roundToIntegral = fp_gen_fun ~args:1 ~rm:() "fp.roundToIntegral" (fun (e,s) -> Fp_roundToIntegral (e,s)) - let min = fp_gen_fun ~args:2 "fp.min" (fun (e,s) -> Fp_min (e,s)) - let max = fp_gen_fun ~args:2 "fp.max" (fun (e,s) -> Fp_max (e,s)) - let leq = fp_gen_fun ~args:2 ~res:Ty.prop "fp.leq" (fun (e,s) -> Fp_leq (e,s)) - let lt = fp_gen_fun ~args:2 ~res:Ty.prop "fp.lt" (fun (e,s) -> Fp_lt (e,s)) - let geq = fp_gen_fun ~args:2 ~res:Ty.prop "fp.geq" (fun (e,s) -> Fp_geq (e,s)) - let gt = fp_gen_fun ~args:2 ~res:Ty.prop "fp.gt" (fun (e,s) -> Fp_gt (e,s)) - let eq = fp_gen_fun ~args:2 ~res:Ty.prop "fp.eq" (fun (e,s) -> Fp_eq (e,s)) - let isNormal = fp_gen_fun ~args:1 ~res:Ty.prop "fp.isnormal" (fun (e,s) -> Fp_isNormal (e,s)) - let isSubnormal = fp_gen_fun ~args:1 ~res:Ty.prop "fp.issubnormal" (fun (e,s) -> Fp_isSubnormal (e,s)) - let isZero = fp_gen_fun ~args:1 ~res:Ty.prop "fp.iszero" (fun (e,s) -> Fp_isZero (e,s)) - let isInfinite = fp_gen_fun ~args:1 ~res:Ty.prop "fp.isinfinite" (fun (e,s) -> Fp_isInfinite (e,s)) - let isNaN = fp_gen_fun ~args:1 ~res:Ty.prop "fp.isnan" (fun (e,s) -> Fp_isNaN (e,s)) - let isNegative = fp_gen_fun ~args:1 ~res:Ty.prop "fp.isnegative" (fun (e,s) -> Fp_isNegative (e,s)) - let isPositive = fp_gen_fun ~args:1 ~res:Ty.prop "fp.ispositive" (fun (e,s) -> Fp_isPositive (e,s)) - let to_real = fp_gen_fun ~args:1 ~res:Ty.real "fp.to_real" (fun (e,s) -> To_real (e,s)) + let plus_infinity = + fp_gen_fun ~args:0 "plus_infinity" + (fun (e,s) -> Builtin.Plus_infinity (e,s)) + let minus_infinity = + fp_gen_fun ~args:0 "minus_infinity" + (fun (e,s) -> Builtin.Minus_infinity (e,s)) + let plus_zero = + fp_gen_fun ~args:0 "plus_zero" + (fun (e,s) -> Builtin.Plus_zero (e,s)) + let minus_zero = + fp_gen_fun ~args:0 "minus_zero" + (fun (e,s) -> Builtin.Minus_zero (e,s)) + let nan = + fp_gen_fun ~args:0 "nan" + (fun (e,s) -> Builtin.NaN (e,s)) + let abs = + fp_gen_fun ~args:1 "fp.abs" + (fun (e,s) -> Builtin.Fp_abs (e,s)) + let neg = + fp_gen_fun ~args:1 "fp.neg" + (fun (e,s) -> Builtin.Fp_neg (e,s)) + let add = + fp_gen_fun ~args:2 ~rm:() "fp.add" + (fun (e,s) -> Builtin.Fp_add (e,s)) + let sub = + fp_gen_fun ~args:2 ~rm:() "fp.sub" + (fun (e,s) -> Builtin.Fp_sub (e,s)) + let mul = + fp_gen_fun ~args:2 ~rm:() "fp.mul" + (fun (e,s) -> Builtin.Fp_mul (e,s)) + let div = + fp_gen_fun ~args:2 ~rm:() "fp.div" + (fun (e,s) -> Builtin.Fp_div (e,s)) + let fma = + fp_gen_fun ~args:3 ~rm:() "fp.fma" + (fun (e,s) -> Builtin.Fp_fma (e,s)) + let sqrt = + fp_gen_fun ~args:1 ~rm:() "fp.sqrt" + (fun (e,s) -> Builtin.Fp_sqrt (e,s)) + let rem = + fp_gen_fun ~args:2 "fp.rem" + (fun (e,s) -> Builtin.Fp_rem (e,s)) + let roundToIntegral = + fp_gen_fun ~args:1 ~rm:() "fp.roundToIntegral" + (fun (e,s) -> Builtin.Fp_roundToIntegral (e,s)) + let min = + fp_gen_fun ~args:2 "fp.min" + (fun (e,s) -> Builtin.Fp_min (e,s)) + let max = + fp_gen_fun ~args:2 "fp.max" + (fun (e,s) -> Builtin.Fp_max (e,s)) + let leq = + fp_gen_fun ~args:2 ~res:Ty.prop "fp.leq" + (fun (e,s) -> Builtin.Fp_leq (e,s)) + let lt = + fp_gen_fun ~args:2 ~res:Ty.prop "fp.lt" + (fun (e,s) -> Builtin.Fp_lt (e,s)) + let geq = + fp_gen_fun ~args:2 ~res:Ty.prop "fp.geq" + (fun (e,s) -> Builtin.Fp_geq (e,s)) + let gt = + fp_gen_fun ~args:2 ~res:Ty.prop "fp.gt" + (fun (e,s) -> Builtin.Fp_gt (e,s)) + let eq = + fp_gen_fun ~args:2 ~res:Ty.prop "fp.eq" + (fun (e,s) -> Builtin.Fp_eq (e,s)) + let isNormal = + fp_gen_fun ~args:1 ~res:Ty.prop "fp.isnormal" + (fun (e,s) -> Builtin.Fp_isNormal (e,s)) + let isSubnormal = + fp_gen_fun ~args:1 ~res:Ty.prop "fp.issubnormal" + (fun (e,s) -> Builtin.Fp_isSubnormal (e,s)) + let isZero = + fp_gen_fun ~args:1 ~res:Ty.prop "fp.iszero" + (fun (e,s) -> Builtin.Fp_isZero (e,s)) + let isInfinite = + fp_gen_fun ~args:1 ~res:Ty.prop "fp.isinfinite" + (fun (e,s) -> Builtin.Fp_isInfinite (e,s)) + let isNaN = + fp_gen_fun ~args:1 ~res:Ty.prop "fp.isnan" + (fun (e,s) -> Builtin.Fp_isNaN (e,s)) + let isNegative = + fp_gen_fun ~args:1 ~res:Ty.prop "fp.isnegative" + (fun (e,s) -> Builtin.Fp_isNegative (e,s)) + let isPositive = + fp_gen_fun ~args:1 ~res:Ty.prop "fp.ispositive" + (fun (e,s) -> Builtin.Fp_isPositive (e,s)) + let to_real = + fp_gen_fun ~args:1 ~res:Ty.real "fp.to_real" + (fun (e,s) -> Builtin.To_real (e,s)) let ieee_format_to_fp = with_cache ~cache:(Hashtbl.create 13) (fun ((e,s) as es) -> - Id.const ~builtin:(Ieee_format_to_fp (e,s)) "to_fp" [] [Ty.bitv (e+s)] (Ty.float' es) + mk' ~builtin:(Builtin.Ieee_format_to_fp (e,s)) "to_fp" [] [Ty.bitv (e+s)] (Ty.float' es) ) let to_fp = with_cache ~cache:(Hashtbl.create 13) (fun (e1,s1,e2,s2) -> - Id.const ~builtin:(Fp_to_fp (e1,s1,e2,s2)) "to_fp" [] [Ty.roundingMode;Ty.float e1 s1] (Ty.float e2 s2) + mk' ~builtin:(Builtin.Fp_to_fp (e1,s1,e2,s2)) "to_fp" [] [Ty.roundingMode;Ty.float e1 s1] (Ty.float e2 s2) ) let real_to_fp = with_cache ~cache:(Hashtbl.create 13) (fun ((e,s) as es) -> - Id.const ~builtin:(Real_to_fp (e,s)) "to_fp" [] [Ty.roundingMode;Ty.real] (Ty.float' es) + mk' ~builtin:(Builtin.Real_to_fp (e,s)) "to_fp" [] [Ty.roundingMode;Ty.real] (Ty.float' es) ) let sbv_to_fp = with_cache ~cache:(Hashtbl.create 13) (fun (bv,e,s) -> - Id.const ~builtin:(Sbv_to_fp (bv,e,s)) "to_fp" [] [Ty.roundingMode;Ty.bitv bv] (Ty.float e s) + mk' ~builtin:(Builtin.Sbv_to_fp (bv,e,s)) "to_fp" [] [Ty.roundingMode;Ty.bitv bv] (Ty.float e s) ) let ubv_to_fp = with_cache ~cache:(Hashtbl.create 13) (fun (bv,e,s) -> - Id.const ~builtin:(Ubv_to_fp (bv,e,s)) "to_fp" [] [Ty.roundingMode;Ty.bitv bv] (Ty.float e s) + mk' ~builtin:(Builtin.Ubv_to_fp (bv,e,s)) "to_fp" [] [Ty.roundingMode;Ty.bitv bv] (Ty.float e s) ) let to_ubv = with_cache ~cache:(Hashtbl.create 13) (fun (e,s,bv) -> - Id.const ~builtin:(To_ubv (bv,e,s)) "fp.to_ubv" [] [Ty.roundingMode;Ty.float e s] (Ty.bitv bv) + mk' ~builtin:(Builtin.To_ubv (bv,e,s)) "fp.to_ubv" [] [Ty.roundingMode;Ty.float e s] (Ty.bitv bv) ) let to_sbv = with_cache ~cache:(Hashtbl.create 13) (fun (e,s,bv) -> - Id.const ~builtin:(To_sbv (bv,e,s)) "fp.to_sbv" [] [Ty.roundingMode;Ty.float e s] (Ty.bitv bv) + mk' ~builtin:(Builtin.To_sbv (bv,e,s)) "fp.to_sbv" [] [Ty.roundingMode;Ty.float e s] (Ty.bitv bv) ) end @@ -2015,121 +2212,121 @@ module Term = struct let string = with_cache ~cache:(Hashtbl.create 13) (fun s -> - Id.const ~builtin:(Str s) (Format.asprintf {|"%s"|} s) [] [] Ty.string + mk' ~builtin:(Builtin.Str s) (Format.asprintf {|"%s"|} s) [] [] Ty.string ) let length = - Id.const ~builtin:Str_length "length" + mk' ~builtin:Builtin.Str_length "length" [] [Ty.string] Ty.int let at = - Id.const ~builtin:Str_at "at" + mk' ~builtin:Builtin.Str_at "at" [] [Ty.string; Ty.int] Ty.string let to_code = - Id.const ~builtin:Str_to_code "to_code" + mk' ~builtin:Builtin.Str_to_code "to_code" [] [Ty.string] Ty.int let of_code = - Id.const ~builtin:Str_of_code "of_code" + mk' ~builtin:Builtin.Str_of_code "of_code" [] [Ty.int] Ty.string let is_digit = - Id.const ~builtin:Str_is_digit "is_digit" + mk' ~builtin:Builtin.Str_is_digit "is_digit" [] [Ty.string] Ty.prop let to_int = - Id.const ~builtin:Str_to_int "to_int" + mk' ~builtin:Builtin.Str_to_int "to_int" [] [Ty.string] Ty.int let of_int = - Id.const ~builtin:Str_of_int "of_int" + mk' ~builtin:Builtin.Str_of_int "of_int" [] [Ty.int] Ty.string let concat = - Id.const ~builtin:Str_concat ~pos:Pretty.Infix "++" + mk' ~builtin:Builtin.Str_concat ~pos:Pretty.Infix "++" [] [Ty.string; Ty.string] Ty.string let sub = - Id.const ~builtin:Str_sub "sub" + mk' ~builtin:Builtin.Str_sub "sub" [] [Ty.string; Ty.int; Ty.int] Ty.string let index_of = - Id.const ~builtin:Str_index_of "index_of" + mk' ~builtin:Builtin.Str_index_of "index_of" [] [Ty.string; Ty.string; Ty.int] Ty.int let replace = - Id.const ~builtin:Str_replace "replace" + mk' ~builtin:Builtin.Str_replace "replace" [] [Ty.string; Ty.string; Ty.string] Ty.string let replace_all = - Id.const ~builtin:Str_replace_all "replace_all" + mk' ~builtin:Builtin.Str_replace_all "replace_all" [] [Ty.string; Ty.string; Ty.string] Ty.string let replace_re = - Id.const ~builtin:Str_replace_re "replace_re" + mk' ~builtin:Builtin.Str_replace_re "replace_re" [] [Ty.string; Ty.string_reg_lang; Ty.string] Ty.string let replace_re_all = - Id.const ~builtin:Str_replace_re_all "replace_re_all" + mk' ~builtin:Builtin.Str_replace_re_all "replace_re_all" [] [Ty.string; Ty.string_reg_lang; Ty.string] Ty.string let is_prefix = - Id.const ~builtin:Str_is_prefix "is_prefix" + mk' ~builtin:Builtin.Str_is_prefix "is_prefix" [] [Ty.string; Ty.string] Ty.prop let is_suffix = - Id.const ~builtin:Str_is_suffix "is_suffix" + mk' ~builtin:Builtin.Str_is_suffix "is_suffix" [] [Ty.string; Ty.string] Ty.prop let contains = - Id.const ~builtin:Str_contains "contains" + mk' ~builtin:Builtin.Str_contains "contains" [] [Ty.string; Ty.string] Ty.prop let lt = - Id.const ~builtin:Str_lexicographic_strict + mk' ~builtin:Builtin.Str_lexicographic_strict ~pos:Pretty.Infix "lt" [] [Ty.string; Ty.string] Ty.prop let leq = - Id.const ~builtin:Str_lexicographic_large + mk' ~builtin:Builtin.Str_lexicographic_large ~pos:Pretty.Infix "leq" [] [Ty.string; Ty.string] Ty.prop let in_re = - Id.const ~builtin:Str_in_re "in_re" + mk' ~builtin:Builtin.Str_in_re "in_re" [] [Ty.string; Ty.string_reg_lang] Ty.prop module Reg_Lang = struct let empty = - Id.const ~builtin:Re_empty "empty" + mk' ~builtin:Builtin.Re_empty "empty" [] [] Ty.string_reg_lang let all = - Id.const ~builtin:Re_all "all" + mk' ~builtin:Builtin.Re_all "all" [] [] Ty.string_reg_lang let allchar = - Id.const ~builtin:Re_allchar "allchar" + mk' ~builtin:Builtin.Re_allchar "allchar" [] [] Ty.string_reg_lang let of_string = - Id.const ~builtin:Re_of_string "of_string" + mk' ~builtin:Builtin.Re_of_string "of_string" [] [Ty.string] Ty.string_reg_lang let range = - Id.const ~builtin:Re_range "range" + mk' ~builtin:Builtin.Re_range "range" [] [Ty.string; Ty.string] Ty.string_reg_lang let concat = - Id.const ~builtin:Re_concat ~pos:Pretty.Infix "++" + mk' ~builtin:Builtin.Re_concat ~pos:Pretty.Infix "++" [] [Ty.string_reg_lang; Ty.string_reg_lang] Ty.string_reg_lang let union = - Id.const ~builtin:Re_union ~pos:Pretty.Infix "âĒ" + mk' ~builtin:Builtin.Re_union ~pos:Pretty.Infix "âĒ" [] [Ty.string_reg_lang; Ty.string_reg_lang] Ty.string_reg_lang let inter = - Id.const ~builtin:Re_inter ~pos:Pretty.Infix "âŠ" + mk' ~builtin:Builtin.Re_inter ~pos:Pretty.Infix "âŠ" [] [Ty.string_reg_lang; Ty.string_reg_lang] Ty.string_reg_lang let diff = - Id.const ~builtin:Re_diff ~pos:Pretty.Infix "-" + mk' ~builtin:Builtin.Re_diff ~pos:Pretty.Infix "-" [] [Ty.string_reg_lang; Ty.string_reg_lang] Ty.string_reg_lang let star = - Id.const ~builtin:Re_star ~pos:Pretty.Prefix "*" + mk' ~builtin:Builtin.Re_star ~pos:Pretty.Prefix "*" [] [Ty.string_reg_lang] Ty.string_reg_lang let cross = - Id.const ~builtin:Re_cross ~pos:Pretty.Prefix "+" + mk' ~builtin:Builtin.Re_cross ~pos:Pretty.Prefix "+" [] [Ty.string_reg_lang] Ty.string_reg_lang let complement = - Id.const ~builtin:Re_complement "complement" + mk' ~builtin:Builtin.Re_complement "complement" [] [Ty.string_reg_lang] Ty.string_reg_lang let option = - Id.const ~builtin:Re_option "option" + mk' ~builtin:Builtin.Re_option "option" [] [Ty.string_reg_lang] Ty.string_reg_lang let power = with_cache ~cache:(Hashtbl.create 13) (fun n -> - Id.const ~builtin:(Re_power n) (Format.asprintf "power_%d" n) + mk' ~builtin:(Builtin.Re_power n) (Format.asprintf "power_%d" n) [] [Ty.string_reg_lang] Ty.string_reg_lang ) let loop = with_cache ~cache:(Hashtbl.create 13) (fun (n1, n2) -> - Id.const ~builtin:(Re_loop (n1, n2)) (Format.asprintf "loop_%d_%d" n1 n2) + mk' ~builtin:(Builtin.Re_loop (n1, n2)) (Format.asprintf "loop_%d_%d" n1 n2) [] [Ty.string_reg_lang] Ty.string_reg_lang ) @@ -2141,19 +2338,29 @@ module Term = struct (* Constructors are simply constants *) module Cstr = struct - type t = term_const - let tag = Id.tag + type t = term_cst let hash = Id.hash + let print = Id.print let equal = Id.equal let compare = Id.compare let get_tag = Id.get_tag let get_tag_last = Id.get_tag_last + let get_tag_list = Id.get_tag_list + let set_tag = Id.set_tag + let add_tag = Id.add_tag + let add_tag_opt = Id.add_tag_opt + let add_tag_list = Id.add_tag_list + let unset_tag = Id.unset_tag + + exception Bad_pattern_arity of term_cst * ty list * term list + let arity (c : t) = - List.length c.ty.fun_vars, List.length c.ty.fun_args + let vars, args, _ = Ty.poly_sig c.id_ty in + List.length vars, List.length args let tester c = match c.builtin with - | Constructor { adt; case; } -> + | Builtin.Constructor { adt; case; } -> begin match Ty.definition adt with | Some Adt { cases; _ } -> cases.(case).tester | _ -> assert false @@ -2161,27 +2368,37 @@ module Term = struct | _ -> raise (Constructor_expected c) let void = - match define_adt Ty.Const.unit [] ["void", []] with + match define_adt Ty.Const.unit [] [Path.global "void", []] with | [void, _] -> void | _ -> assert false let pattern_arity (c : t) ret tys = try - let s = List.fold_left2 Subst.Var.bind Subst.empty c.ty.fun_vars tys in - let s = Ty.robinson s c.ty.fun_ret ret in - List.map (Ty.subst s) c.ty.fun_args + let fun_vars, fun_args, fun_ret = Ty.poly_sig c.id_ty in + let s = List.fold_left2 Ty.subst_bind Subst.empty fun_vars tys in + let s = Ty.robinson s fun_ret ret in + List.map (Ty.subst s) fun_args with | Ty.Impossible_unification _ -> raise (Wrong_sum_type (c, ret)) - | Invalid_argument _ -> raise (Bad_term_arity (c, tys, [])) + | Invalid_argument _ -> raise (Bad_pattern_arity (c, tys, [])) end (* Record fields are represented as their destructors, i.e. constants *) module Field = struct - type t = term_const + type t = term_cst let hash = Id.hash + let print = Id.print let equal = Id.equal let compare = Id.compare + let get_tag = Id.get_tag + let get_tag_last = Id.get_tag_last + let get_tag_list = Id.get_tag_list + let set_tag = Id.set_tag + let add_tag = Id.add_tag + let add_tag_opt = Id.add_tag_opt + let add_tag_list = Id.add_tag_list + let unset_tag = Id.unset_tag (* Record field getter *) let find ty_c i = @@ -2197,7 +2414,7 @@ module Term = struct (* Record creation *) let index ty_c f = match f.builtin with - | Destructor { adt = ty_d; case = i; field = j; _ } -> + | Builtin.Destructor { adt = ty_d; case = i; field = j; _ } -> if Id.equal ty_c ty_d then begin assert (i = 0); j @@ -2208,26 +2425,44 @@ module Term = struct end - - (* Filter check *) - let rec check_filters res f tys args = function - | [] -> res - | (name, active, check) :: r -> - if !active then match (check f tys args) with - | `Pass -> check_filters res f tys args r - | `Warn -> check_filters res f tys args r - | `Error msg -> raise (Filter_failed_term (name, res, msg)) - else - check_filters res f tys args r - (* Term creation *) - let mk ?(tags=Tag.empty) descr ty = { descr; ty; hash = -1; tags; } + let mk ?(tags=Tag.empty) term_descr term_ty = + { term_descr; term_ty; term_hash = -1; term_tags = tags; } - let of_var v = mk (Var v) v.ty + let of_var v = mk (Var v) v.id_ty + let of_cst c = mk (Cst c) c.id_ty - (* This function does not check types enough, do not export outside the module *) + (* Binder creation *) let mk_bind b body = - mk (Binder (b, body)) (ty body) + match b with + | Let_seq [] + | Let_par [] + | Forall ([], []) + | Exists ([], []) + | Lambda ([], []) -> body + + | Forall _ + | Exists _ -> + if not (Ty.(equal prop) (ty body)) then + raise (Wrong_type (body, Ty.prop)); + mk (Binder (b, body)) Ty.prop + | Let_seq l | Let_par l -> + List.iter (fun ((v : Var.t), t) -> + if not (Ty.equal v.id_ty (ty t)) then raise (Wrong_type (t, v.id_ty)) + ) l; + mk (Binder (b, body)) (ty body) + | Lambda (tys, ts) -> + let res_ty = + Ty.pi tys + (Ty.arrow (List.map Var.ty ts) (ty body)) + in + mk (Binder (b, body)) res_ty + + let lam (tys, ts) body = mk_bind (Lambda (tys, ts)) body + let all (tys, ts) body = mk_bind (Forall (tys, ts)) body + let ex (tys, ts) body = mk_bind (Exists (tys, ts)) body + let letin l body = mk_bind (Let_seq l) body + let letand l body = mk_bind (Let_par l) body (* Substitutions *) let rec ty_var_list_subst ty_var_map = function @@ -2238,9 +2473,9 @@ module Term = struct let rec term_var_list_subst ty_var_map t_var_map acc = function | [] -> List.rev acc, t_var_map | (v :: r : term_var list) -> - let ty = Ty.subst ty_var_map v.ty in - if not (Ty.equal ty v.ty) then - let nv = Var.mk v.name ty in + let ty = Ty.subst ty_var_map v.id_ty in + if not (Ty.equal ty v.id_ty) then + let nv = Var.create v.path ty in term_var_list_subst ty_var_map (Subst.Var.bind t_var_map v (of_var nv)) (nv :: acc) r else @@ -2248,7 +2483,7 @@ module Term = struct (Subst.Var.remove v t_var_map) (v :: acc) r let rec subst_aux ~fix ty_var_map t_var_map (t : t) = - match t.descr with + match t.term_descr with | Var v -> begin match Subst.Var.get v t_var_map with | exception Not_found -> t @@ -2257,11 +2492,16 @@ module Term = struct then subst_aux ~fix ty_var_map t_var_map term else term end + | Cst _ -> t | App (f, tys, args) -> + let new_f = subst_aux ~fix ty_var_map t_var_map f in let new_tys = List.map (Ty.subst ~fix ty_var_map) tys in let new_args = List.map (subst_aux ~fix ty_var_map t_var_map) args in - if List.for_all2 (==) new_tys tys && List.for_all2 (==) new_args args then t - else apply f new_tys new_args + if new_f == f && + List.for_all2 (==) new_tys tys && + List.for_all2 (==) new_args args + then t + else apply new_f new_tys new_args | Binder (b, body) -> let b', ty_var_map, t_var_map = binder_subst ~fix ty_var_map t_var_map b in mk_bind b' (subst_aux ~fix ty_var_map t_var_map body) @@ -2271,6 +2511,17 @@ module Term = struct pattern_match scrutinee branches and binder_subst ~fix ty_var_map t_var_map = function + | Let_seq l -> + let l, t_var_map = binding_list_subst ~fix ty_var_map t_var_map [] l in + Let_seq l, ty_var_map, t_var_map + | Let_par l -> + let l, t_var_map = binding_list_subst ~fix ty_var_map t_var_map [] l in + Let_par l, ty_var_map, t_var_map + | Lambda (tys, ts) -> + (* term variables in ts may have their types changed by the subst *) + let ty_var_map = ty_var_list_subst ty_var_map tys in + let ts, t_var_map = term_var_list_subst ty_var_map t_var_map [] ts in + Lambda (tys, ts), ty_var_map, t_var_map | Exists (tys, ts) -> (* term variables in ts may have their types changed by the subst *) let ty_var_map = ty_var_list_subst ty_var_map tys in @@ -2281,20 +2532,17 @@ module Term = struct let ty_var_map = ty_var_list_subst ty_var_map tys in let ts, t_var_map = term_var_list_subst ty_var_map t_var_map [] ts in Forall (tys, ts), ty_var_map, t_var_map - | Letin l -> - let l, t_var_map = binding_list_subst ~fix ty_var_map t_var_map [] l in - Letin l, ty_var_map, t_var_map and binding_list_subst ~fix ty_var_map t_var_map acc = function | [] -> List.rev acc, t_var_map | ((v, t) :: r : (term_var * term) list) -> let t = subst_aux ~fix ty_var_map t_var_map t in - if Ty.equal (ty t) v.ty then begin + if Ty.equal (ty t) v.id_ty then begin let t_var_map = Subst.Var.remove v t_var_map in let acc = (v, t) :: acc in binding_list_subst ~fix ty_var_map t_var_map acc r end else begin - let nv = Var.mk v.name (ty t) in + let nv = Var.create v.path (ty t) in let t_var_map = Subst.Var.bind t_var_map v (of_var nv) in let acc = (nv, t) :: acc in binding_list_subst ~fix ty_var_map t_var_map acc r @@ -2313,27 +2561,96 @@ module Term = struct subst_aux ~fix ty_var_map t_var_map t (* Application typechecking *) - and instantiate (f : term_const) tys args = - if List.length f.ty.fun_vars <> List.length tys || - List.length f.ty.fun_args <> List.length args then begin - raise (Bad_term_arity (f, tys, args)) - end else begin - let map = List.fold_left2 Subst.Var.bind Subst.empty f.ty.fun_vars tys in - let s = List.fold_left2 (fun s expected term -> - try Ty.robinson s expected (ty term) - with Ty.Impossible_unification _ -> - raise (Wrong_type (term, Ty.subst s expected)) - ) map f.ty.fun_args args - in - Subst.iter Ty.set_wildcard s; - Ty.subst s f.ty.fun_ret - end + and instantiate_finalize subst ty = + let subst = Subst.map (Ty.subst ~fix:true subst) subst in + Subst.iter Ty.set_wildcard subst; + Ty.subst subst ty + + and instantiate_term_app subst fun_ty args = + let rec aux subst fun_ty_args fun_ty_ret args = + match fun_ty_args, args with + (* full application *) + | [], [] -> + instantiate_finalize subst fun_ty_ret + (* partial application *) + | _ :: _, [] -> + instantiate_finalize subst (Ty.arrow fun_ty_args fun_ty_ret) + (* over application *) + | [], arg :: rest -> + let ret = Ty.of_var (Ty.Var.wildcard ()) in + let potential_fun_ty = Ty.arrow [ty arg] ret in + begin match Ty.robinson subst fun_ty_ret potential_fun_ty with + | subst -> instantiate_term_app subst ret rest + | exception Ty.Impossible_unification _ -> + raise (Over_application args) + end + (* regular application, carry on *) + | expected :: fun_ty_args, arg :: args -> + begin match Ty.robinson subst expected (ty arg) with + | subst -> aux subst fun_ty_args fun_ty_ret args + | exception Ty.Impossible_unification _ -> + raise (Wrong_type (arg, Ty.subst subst expected)) + end + in + match args with + | [] -> instantiate_finalize subst fun_ty + | _ -> + let fun_ty_args, fun_ty_ret = Ty.split_arrow fun_ty in + aux subst fun_ty_args fun_ty_ret args + + and instantiate_ty_app subst fun_ty tys args = + let exception Bad_arity in + let rec aux subst fun_ty_vars fun_ty_body tys args = + match fun_ty_vars, tys with + (* full type application *) + | [], [] -> instantiate_term_app subst fun_ty_body args + (* partial type application *) + | _ :: _, [] -> + begin match args with + | [] -> instantiate_finalize subst (Ty.pi fun_ty_vars fun_ty) + | _ -> raise Bad_arity + end + (* over application + in prenex polymoprhism, type substitution cannot create Pi + quantifications (the substitution rhs cannot be Pi _). *) + | [], _ :: _ -> + raise Bad_arity + (* regular application + we prevent type schemas (i.e. Pi _) from beign instantiated + with polymorphic types to preserve prenex polymorphism. + The Ty.subst_bind function performs this check. *) + | ty_var :: fun_ty_vars, ty :: tys -> + let subst = Ty.subst_bind subst ty_var ty in + aux subst fun_ty_vars fun_ty_body tys args + in + match tys with + | [] -> instantiate_term_app subst fun_ty args + | _ -> + let fun_ty_vars, fun_ty_body = Ty.split_pi fun_ty in + begin + try aux subst fun_ty_vars fun_ty_body tys args + with Bad_arity -> raise (Bad_poly_arity (fun_ty_vars, tys)) + end + + and instantiate fun_ty tys args = + (* + Format.eprintf "@[<v 2>inst: %a@ %a@ %a@ @]@." + Print.ty fun_ty + (Format.pp_print_list Print.ty) tys + (Format.pp_print_list (fun fmt t -> + Format.fprintf fmt "%a: %a" Print.term t Print.ty (ty t) + )) args; + *) + instantiate_ty_app Subst.empty fun_ty tys args (* Application *) and apply f tys args = - let ret = instantiate f tys args in - let res = mk (App (f, tys, args)) ret in - check_filters res f tys args (Const.get_tag f Filter.term) + match tys, args with + | [], [] -> f + | _, _ -> + (* Format.eprintf "apply: %a@." Print.term f; *) + let ret_ty = instantiate (ty f) tys args in + mk (App (f, tys, args)) ret_ty (* Pattern matching *) and pattern_match scrutinee branches = @@ -2341,7 +2658,7 @@ module Term = struct (* first, unify the type of the scrutinee and all patterns, and unify the type of all bodies *) - let body_ty = Ty.wildcard () in + let body_ty = Ty.of_var (Ty.Var.wildcard ()) in let s = List.fold_left (fun acc (pattern, body) -> let acc = try Ty.robinson acc scrutinee_ty (ty pattern) @@ -2355,6 +2672,7 @@ module Term = struct ) Subst.empty branches in (* Apply the substitution to the scrutinee, patterns and bodies *) + let () = Subst.iter Ty.set_wildcard s in let scrutinee = subst s Subst.empty scrutinee in let branches = List.map (fun (pat, body) -> (subst s Subst.empty pat, subst s Subst.empty body) @@ -2362,28 +2680,34 @@ module Term = struct (* Check exhaustivity *) let () = check_exhaustivity (ty scrutinee) (List.map fst branches) in (* Build the pattern matching *) - mk (Match (scrutinee, branches)) (Ty.subst s body_ty) + mk (Match (scrutinee, branches)) body_ty (* Wrappers around application *) - let apply_cstr = apply + let apply_cst (c : term_cst) tys args = + apply (of_cst c) tys args - let apply_field (f : term_const) t = - let tys = init_list - (List.length f.ty.fun_vars) - (fun _ -> Ty.wildcard ()) + let apply_cstr (c : Cstr.t) tys args = + apply (of_cst c) tys args + + let apply_field (f : Field.t) t = + let f_ty_vars, _ = Ty.split_pi f.id_ty in + let tys = + init_list (List.length f_ty_vars) + (fun _ -> Ty.of_var (Ty.Var.wildcard ())) in - apply f tys [t] + apply (of_cst f) tys [t] (* ADT constructor tester *) let cstr_tester c t = let tester = Cstr.tester c in + let tester_ty_vars, _ = Ty.split_pi tester.id_ty in let ty_args = init_list - (List.length tester.ty.fun_vars) - (fun _ -> Ty.wildcard ()) + (List.length tester_ty_vars) + (fun _ -> Ty.of_var (Ty.Var.wildcard ())) in - apply tester ty_args [t] + apply_cst tester ty_args [t] (* Recor creation *) let build_record_fields ty_c l = @@ -2406,7 +2730,7 @@ module Term = struct | [] -> raise (Invalid_argument "Dolmen.Expr.record") | ((f, _) :: _) as l -> begin match f.builtin with - | Destructor { adt = ty_c; cstr = c; _ } when Ty.is_record ty_c -> + | Builtin.Destructor { adt = ty_c; cstr = c; _ } when Ty.is_record ty_c -> let fields = build_record_fields ty_c l in (* Check that all fields are indeed present, and create the list of term arguments *) @@ -2416,11 +2740,12 @@ module Term = struct | Some v -> v ) fields in (* Create type wildcard to be unified during application. *) + let c_ty_vars, _ = Ty.split_pi c.id_ty in let ty_args = init_list - (List.length c.ty.fun_vars) - (fun _ -> Ty.wildcard ()) + (List.length c_ty_vars) + (fun _ -> Ty.of_var (Ty.Var.wildcard ())) in - apply c ty_args t_args + apply_cst c ty_args t_args | _ -> raise (Field_expected f) end @@ -2439,107 +2764,114 @@ module Term = struct (* typing annotations *) let ensure t ty = - match Ty.robinson Subst.empty ty t.ty with - | s -> subst s Subst.empty t + match Ty.robinson Subst.empty ty t.term_ty with + | s -> + Subst.iter Ty.set_wildcard s; + subst s Subst.empty t | exception Ty.Impossible_unification _ -> raise (Wrong_type (t, ty)) (* coercion *) let coerce dst_ty t = let src_ty = ty t in - apply Const.coerce [src_ty; dst_ty] [t] + apply_cst Const.coerce [src_ty; dst_ty] [t] (* Common constructions *) - let void = apply Cstr.void [] [] + let void = apply_cst Cstr.void [] [] - let _true = apply Const._true [] [] - let _false = apply Const._false [] [] - - let eq a b = apply Const.eq [ty a] [a; b] + let _true = apply_cst Const._true [] [] + let _false = apply_cst Const._false [] [] let eqs = function - | [] -> apply (Const.eqs 0) [] [] - | (h :: _) as l -> apply (Const.eqs (List.length l)) [ty h] l + | [] -> apply_cst (Const.eqs 0) [] [] + | (h :: _) as l -> apply_cst (Const.eqs (List.length l)) [ty h] l + + let eq a b = eqs [a; b] let distinct = function - | [] -> apply (Const.distinct 0) [] [] - | (h :: _) as l -> apply (Const.distinct (List.length l)) [ty h] l + | [] -> apply_cst (Const.distinct 0) [] [] + | (h :: _) as l -> apply_cst (Const.distinct (List.length l)) [ty h] l + + let neq a b = distinct [a; b] - let neg x = apply Const.neg [] [x] + let neg x = apply_cst Const.neg [] [x] - let _and l = apply (Const._and (List.length l)) [] l + let _and l = apply_cst (Const._and (List.length l)) [] l - let _or l = apply (Const._or (List.length l)) [] l + let _or l = apply_cst (Const._or (List.length l)) [] l - let nand p q = apply Const.nand [] [p; q] + let nand p q = apply_cst Const.nand [] [p; q] - let nor p q = apply Const.nor [] [p; q] + let nor p q = apply_cst Const.nor [] [p; q] - let xor p q = apply Const.xor [] [p; q] + let xor p q = apply_cst Const.xor [] [p; q] - let imply p q = apply Const.imply [] [p; q] + let imply p q = apply_cst Const.imply [] [p; q] - let equiv p q = apply Const.equiv [] [p; q] + let implied p q = apply_cst Const.implied [] [p; q] - let int s = apply (Const.Int.int s) [] [] - let rat s = apply (Const.Rat.rat s) [] [] - let real s = apply (Const.Real.real s) [] [] + let equiv p q = apply_cst Const.equiv [] [p; q] + + let int s = apply_cst (Const.Int.int s) [] [] + let rat s = apply_cst (Const.Rat.rat s) [] [] + let real s = apply_cst (Const.Real.real s) [] [] (* arithmetic *) module Int = struct let mk = int - let minus t = apply Const.Int.minus [] [t] - let add a b = apply Const.Int.add [] [a; b] - let sub a b = apply Const.Int.sub [] [a; b] - let mul a b = apply Const.Int.mul [] [a; b] - let div a b = apply Const.Int.div_e [] [a; b] - let rem a b = apply Const.Int.rem_e [] [a; b] - let div_e a b = apply Const.Int.div_e [] [a; b] - let div_t a b = apply Const.Int.div_t [] [a; b] - let div_f a b = apply Const.Int.div_f [] [a; b] - let rem_e a b = apply Const.Int.rem_e [] [a; b] - let rem_t a b = apply Const.Int.rem_t [] [a; b] - let rem_f a b = apply Const.Int.rem_f [] [a; b] - let abs a = apply Const.Int.abs [] [a] - let lt a b = apply Const.Int.lt [] [a; b] - let le a b = apply Const.Int.le [] [a; b] - let gt a b = apply Const.Int.gt [] [a; b] - let ge a b = apply Const.Int.ge [] [a; b] - let floor a = apply Const.Int.floor [] [a] - let ceiling a = apply Const.Int.ceiling [] [a] - let truncate a = apply Const.Int.truncate [] [a] - let round a = apply Const.Int.round [] [a] - let is_int a = apply Const.Int.is_int [] [a] - let is_rat a = apply Const.Int.is_rat [] [a] + let minus t = apply_cst Const.Int.minus [] [t] + let add a b = apply_cst Const.Int.add [] [a; b] + let sub a b = apply_cst Const.Int.sub [] [a; b] + let mul a b = apply_cst Const.Int.mul [] [a; b] + let pow a b = apply_cst Const.Int.pow [] [a; b] + let div a b = apply_cst Const.Int.div_e [] [a; b] + let rem a b = apply_cst Const.Int.rem_e [] [a; b] + let div_e a b = apply_cst Const.Int.div_e [] [a; b] + let div_t a b = apply_cst Const.Int.div_t [] [a; b] + let div_f a b = apply_cst Const.Int.div_f [] [a; b] + let rem_e a b = apply_cst Const.Int.rem_e [] [a; b] + let rem_t a b = apply_cst Const.Int.rem_t [] [a; b] + let rem_f a b = apply_cst Const.Int.rem_f [] [a; b] + let abs a = apply_cst Const.Int.abs [] [a] + let lt a b = apply_cst Const.Int.lt [] [a; b] + let le a b = apply_cst Const.Int.le [] [a; b] + let gt a b = apply_cst Const.Int.gt [] [a; b] + let ge a b = apply_cst Const.Int.ge [] [a; b] + let floor a = apply_cst Const.Int.floor [] [a] + let ceiling a = apply_cst Const.Int.ceiling [] [a] + let truncate a = apply_cst Const.Int.truncate [] [a] + let round a = apply_cst Const.Int.round [] [a] + let is_int a = apply_cst Const.Int.is_int [] [a] + let is_rat a = apply_cst Const.Int.is_rat [] [a] let to_int t = coerce Ty.int t let to_rat t = coerce Ty.rat t let to_real t = coerce Ty.real t - let divisible s t = apply Const.Int.divisible [] [int s; t] + let divisible s t = apply_cst Const.Int.divisible [] [int s; t] end module Rat = struct (* let mk = rat *) - let minus t = apply Const.Rat.minus [] [t] - let add a b = apply Const.Rat.add [] [a; b] - let sub a b = apply Const.Rat.sub [] [a; b] - let mul a b = apply Const.Rat.mul [] [a; b] - let div a b = apply Const.Rat.div [] [a; b] - let div_e a b = apply Const.Rat.div_e [] [a; b] - let div_t a b = apply Const.Rat.div_t [] [a; b] - let div_f a b = apply Const.Rat.div_f [] [a; b] - let rem_e a b = apply Const.Rat.rem_e [] [a; b] - let rem_t a b = apply Const.Rat.rem_t [] [a; b] - let rem_f a b = apply Const.Rat.rem_f [] [a; b] - let lt a b = apply Const.Rat.lt [] [a; b] - let le a b = apply Const.Rat.le [] [a; b] - let gt a b = apply Const.Rat.gt [] [a; b] - let ge a b = apply Const.Rat.ge [] [a; b] - let floor a = apply Const.Rat.floor [] [a] - let ceiling a = apply Const.Rat.ceiling [] [a] - let truncate a = apply Const.Rat.truncate [] [a] - let round a = apply Const.Rat.round [] [a] - let is_int a = apply Const.Rat.is_int [] [a] - let is_rat a = apply Const.Rat.is_rat [] [a] + let minus t = apply_cst Const.Rat.minus [] [t] + let add a b = apply_cst Const.Rat.add [] [a; b] + let sub a b = apply_cst Const.Rat.sub [] [a; b] + let mul a b = apply_cst Const.Rat.mul [] [a; b] + let div a b = apply_cst Const.Rat.div [] [a; b] + let div_e a b = apply_cst Const.Rat.div_e [] [a; b] + let div_t a b = apply_cst Const.Rat.div_t [] [a; b] + let div_f a b = apply_cst Const.Rat.div_f [] [a; b] + let rem_e a b = apply_cst Const.Rat.rem_e [] [a; b] + let rem_t a b = apply_cst Const.Rat.rem_t [] [a; b] + let rem_f a b = apply_cst Const.Rat.rem_f [] [a; b] + let lt a b = apply_cst Const.Rat.lt [] [a; b] + let le a b = apply_cst Const.Rat.le [] [a; b] + let gt a b = apply_cst Const.Rat.gt [] [a; b] + let ge a b = apply_cst Const.Rat.ge [] [a; b] + let floor a = apply_cst Const.Rat.floor [] [a] + let ceiling a = apply_cst Const.Rat.ceiling [] [a] + let truncate a = apply_cst Const.Rat.truncate [] [a] + let round a = apply_cst Const.Rat.round [] [a] + let is_int a = apply_cst Const.Rat.is_int [] [a] + let is_rat a = apply_cst Const.Rat.is_rat [] [a] let to_int t = coerce Ty.int t let to_rat t = coerce Ty.rat t let to_real t = coerce Ty.real t @@ -2547,27 +2879,29 @@ module Term = struct module Real = struct let mk = real - let minus t = apply Const.Real.minus [] [t] - let add a b = apply Const.Real.add [] [a; b] - let sub a b = apply Const.Real.sub [] [a; b] - let mul a b = apply Const.Real.mul [] [a; b] - let div a b = apply Const.Real.div [] [a; b] - let div_e a b = apply Const.Real.div_e [] [a; b] - let div_t a b = apply Const.Real.div_t [] [a; b] - let div_f a b = apply Const.Real.div_f [] [a; b] - let rem_e a b = apply Const.Real.rem_e [] [a; b] - let rem_t a b = apply Const.Real.rem_t [] [a; b] - let rem_f a b = apply Const.Real.rem_f [] [a; b] - let lt a b = apply Const.Real.lt [] [a; b] - let le a b = apply Const.Real.le [] [a; b] - let gt a b = apply Const.Real.gt [] [a; b] - let ge a b = apply Const.Real.ge [] [a; b] - let floor a = apply Const.Real.floor [] [a] - let ceiling a = apply Const.Real.ceiling [] [a] - let truncate a = apply Const.Real.truncate [] [a] - let round a = apply Const.Real.round [] [a] - let is_int a = apply Const.Real.is_int [] [a] - let is_rat a = apply Const.Real.is_rat [] [a] + let minus t = apply_cst Const.Real.minus [] [t] + let add a b = apply_cst Const.Real.add [] [a; b] + let sub a b = apply_cst Const.Real.sub [] [a; b] + let mul a b = apply_cst Const.Real.mul [] [a; b] + let pow a b = apply_cst Const.Real.pow [] [a; b] + let div a b = apply_cst Const.Real.div [] [a; b] + let div_e a b = apply_cst Const.Real.div_e [] [a; b] + let div_t a b = apply_cst Const.Real.div_t [] [a; b] + let div_f a b = apply_cst Const.Real.div_f [] [a; b] + let rem_e a b = apply_cst Const.Real.rem_e [] [a; b] + let rem_t a b = apply_cst Const.Real.rem_t [] [a; b] + let rem_f a b = apply_cst Const.Real.rem_f [] [a; b] + let lt a b = apply_cst Const.Real.lt [] [a; b] + let le a b = apply_cst Const.Real.le [] [a; b] + let gt a b = apply_cst Const.Real.gt [] [a; b] + let ge a b = apply_cst Const.Real.ge [] [a; b] + let floor a = apply_cst Const.Real.floor [] [a] + let floor_to_int a = apply_cst Const.Real.floor_to_int [] [a] + let ceiling a = apply_cst Const.Real.ceiling [] [a] + let truncate a = apply_cst Const.Real.truncate [] [a] + let round a = apply_cst Const.Real.round [] [a] + let is_int a = apply_cst Const.Real.is_int [] [a] + let is_rat a = apply_cst Const.Real.is_rat [] [a] let to_int t = coerce Ty.int t let to_rat t = coerce Ty.rat t let to_real t = coerce Ty.real t @@ -2589,354 +2923,339 @@ module Term = struct let select t idx = let src, dst = match_array_type t in - apply Const.select [src; dst] [t; idx] + apply_cst Const.select [src; dst] [t; idx] let store t idx value = let src, dst = match_array_type t in - apply Const.store [src; dst] [t; idx; value] + apply_cst Const.store [src; dst] [t; idx; value] (* Bitvectors *) module Bitv = struct let match_bitv_type t = - match ty t with - | { descr = App ({ builtin = Bitv i; _ }, _); _ } -> i + match Ty.descr (ty t) with + | TyApp ({ builtin = Builtin.Bitv i; _ }, _) -> i | _ -> raise (Wrong_type (t, Ty.bitv 0)) - let mk s = apply (Const.Bitv.bitv s) [] [] + let mk s = apply_cst (Const.Bitv.bitv s) [] [] let concat u v = let i = match_bitv_type u in let j = match_bitv_type v in - apply (Const.Bitv.concat (i, j)) [] [u; v] + apply_cst (Const.Bitv.concat (i, j)) [] [u; v] let extract i j t = let n = match_bitv_type t in (* TODO: check that i and j are correct index for a bitv(n) *) - apply (Const.Bitv.extract (i, j, n)) [] [t] + apply_cst (Const.Bitv.extract (i, j, n)) [] [t] let repeat k t = let n = match_bitv_type t in - apply (Const.Bitv.repeat (k, n)) [] [t] + apply_cst (Const.Bitv.repeat (k, n)) [] [t] let zero_extend k t = let n = match_bitv_type t in - apply (Const.Bitv.zero_extend (k, n)) [] [t] + apply_cst (Const.Bitv.zero_extend (k, n)) [] [t] let sign_extend k t = let n = match_bitv_type t in - apply (Const.Bitv.sign_extend (k, n)) [] [t] + apply_cst (Const.Bitv.sign_extend (k, n)) [] [t] let rotate_right k t = let n = match_bitv_type t in - apply (Const.Bitv.rotate_right (k, n)) [] [t] + apply_cst (Const.Bitv.rotate_right (k, n)) [] [t] let rotate_left k t = let n = match_bitv_type t in - apply (Const.Bitv.rotate_left (k, n)) [] [t] + apply_cst (Const.Bitv.rotate_left (k, n)) [] [t] let not t = let n = match_bitv_type t in - apply (Const.Bitv.not n) [] [t] + apply_cst (Const.Bitv.not n) [] [t] let and_ u v = let n = match_bitv_type u in - apply (Const.Bitv.and_ n) [] [u; v] + apply_cst (Const.Bitv.and_ n) [] [u; v] let or_ u v = let n = match_bitv_type u in - apply (Const.Bitv.or_ n) [] [u; v] + apply_cst (Const.Bitv.or_ n) [] [u; v] let nand u v = let n = match_bitv_type u in - apply (Const.Bitv.nand n) [] [u; v] + apply_cst (Const.Bitv.nand n) [] [u; v] let nor u v = let n = match_bitv_type u in - apply (Const.Bitv.nor n) [] [u; v] + apply_cst (Const.Bitv.nor n) [] [u; v] let xor u v = let n = match_bitv_type u in - apply (Const.Bitv.xor n) [] [u; v] + apply_cst (Const.Bitv.xor n) [] [u; v] let xnor u v = let n = match_bitv_type u in - apply (Const.Bitv.xnor n) [] [u; v] + apply_cst (Const.Bitv.xnor n) [] [u; v] let comp u v = let n = match_bitv_type u in - apply (Const.Bitv.comp n) [] [u; v] + apply_cst (Const.Bitv.comp n) [] [u; v] let neg t = let n = match_bitv_type t in - apply (Const.Bitv.neg n) [] [t] + apply_cst (Const.Bitv.neg n) [] [t] let add u v = let n = match_bitv_type u in - apply (Const.Bitv.add n) [] [u; v] + apply_cst (Const.Bitv.add n) [] [u; v] let sub u v = let n = match_bitv_type u in - apply (Const.Bitv.sub n) [] [u; v] + apply_cst (Const.Bitv.sub n) [] [u; v] let mul u v = let n = match_bitv_type u in - apply (Const.Bitv.mul n) [] [u; v] + apply_cst (Const.Bitv.mul n) [] [u; v] let udiv u v = let n = match_bitv_type u in - apply (Const.Bitv.udiv n) [] [u; v] + apply_cst (Const.Bitv.udiv n) [] [u; v] let urem u v = let n = match_bitv_type u in - apply (Const.Bitv.urem n) [] [u; v] + apply_cst (Const.Bitv.urem n) [] [u; v] let sdiv u v = let n = match_bitv_type u in - apply (Const.Bitv.sdiv n) [] [u; v] + apply_cst (Const.Bitv.sdiv n) [] [u; v] let srem u v = let n = match_bitv_type u in - apply (Const.Bitv.srem n) [] [u; v] + apply_cst (Const.Bitv.srem n) [] [u; v] let smod u v = let n = match_bitv_type u in - apply (Const.Bitv.smod n) [] [u; v] + apply_cst (Const.Bitv.smod n) [] [u; v] let shl u v = let n = match_bitv_type u in - apply (Const.Bitv.shl n) [] [u; v] + apply_cst (Const.Bitv.shl n) [] [u; v] let lshr u v = let n = match_bitv_type u in - apply (Const.Bitv.lshr n) [] [u; v] + apply_cst (Const.Bitv.lshr n) [] [u; v] let ashr u v = let n = match_bitv_type u in - apply (Const.Bitv.ashr n) [] [u; v] + apply_cst (Const.Bitv.ashr n) [] [u; v] let ult u v = let n = match_bitv_type u in - apply (Const.Bitv.ult n) [] [u; v] + apply_cst (Const.Bitv.ult n) [] [u; v] let ule u v = let n = match_bitv_type u in - apply (Const.Bitv.ule n) [] [u; v] + apply_cst (Const.Bitv.ule n) [] [u; v] let ugt u v = let n = match_bitv_type u in - apply (Const.Bitv.ugt n) [] [u; v] + apply_cst (Const.Bitv.ugt n) [] [u; v] let uge u v = let n = match_bitv_type u in - apply (Const.Bitv.uge n) [] [u; v] + apply_cst (Const.Bitv.uge n) [] [u; v] let slt u v = let n = match_bitv_type u in - apply (Const.Bitv.slt n) [] [u; v] + apply_cst (Const.Bitv.slt n) [] [u; v] let sle u v = let n = match_bitv_type u in - apply (Const.Bitv.sle n) [] [u; v] + apply_cst (Const.Bitv.sle n) [] [u; v] let sgt u v = let n = match_bitv_type u in - apply (Const.Bitv.sgt n) [] [u; v] + apply_cst (Const.Bitv.sgt n) [] [u; v] let sge u v = let n = match_bitv_type u in - apply (Const.Bitv.sge n) [] [u; v] + apply_cst (Const.Bitv.sge n) [] [u; v] end module Float = struct (* Floats *) let match_float_type t = - match ty t with - | { descr = App ({ builtin = Float (e,s); _ }, _); _ } -> (e,s) + match Ty.descr (ty t) with + | TyApp ({ builtin = Builtin.Float (e,s); _ }, _) -> (e,s) | _ -> raise (Wrong_type (t, Ty.float 0 0)) let fp sign exp significand = let e = Bitv.match_bitv_type exp in let s = Bitv.match_bitv_type significand in - apply (Const.Float.fp (e, s+1)) [] [sign; exp; significand] - - let roundNearestTiesToEven = apply Const.Float.roundNearestTiesToEven [] [] - let roundNearestTiesToAway = apply Const.Float.roundNearestTiesToAway [] [] - let roundTowardPositive = apply Const.Float.roundTowardPositive [] [] - let roundTowardNegative = apply Const.Float.roundTowardNegative [] [] - let roundTowardZero = apply Const.Float.roundTowardZero [] [] - - let plus_infinity e s = apply (Const.Float.plus_infinity (e,s)) [] [] - let minus_infinity e s = apply (Const.Float.minus_infinity (e,s)) [] [] - let plus_zero e s = apply (Const.Float.plus_zero (e,s)) [] [] - let minus_zero e s = apply (Const.Float.minus_zero (e,s)) [] [] - let nan e s = apply (Const.Float.nan (e,s)) [] [] + apply_cst (Const.Float.fp (e, s+1)) [] [sign; exp; significand] + + let roundNearestTiesToEven = apply_cst Const.Float.roundNearestTiesToEven [] [] + let roundNearestTiesToAway = apply_cst Const.Float.roundNearestTiesToAway [] [] + let roundTowardPositive = apply_cst Const.Float.roundTowardPositive [] [] + let roundTowardNegative = apply_cst Const.Float.roundTowardNegative [] [] + let roundTowardZero = apply_cst Const.Float.roundTowardZero [] [] + + let plus_infinity e s = apply_cst (Const.Float.plus_infinity (e,s)) [] [] + let minus_infinity e s = apply_cst (Const.Float.minus_infinity (e,s)) [] [] + let plus_zero e s = apply_cst (Const.Float.plus_zero (e,s)) [] [] + let minus_zero e s = apply_cst (Const.Float.minus_zero (e,s)) [] [] + let nan e s = apply_cst (Const.Float.nan (e,s)) [] [] let abs x = let es = match_float_type x in - apply (Const.Float.abs es) [] [x] + apply_cst (Const.Float.abs es) [] [x] let neg x = let es = match_float_type x in - apply (Const.Float.neg es) [] [x] + apply_cst (Const.Float.neg es) [] [x] let add rm x y = let es = match_float_type x in - apply (Const.Float.add es) [] [rm;x;y] + apply_cst (Const.Float.add es) [] [rm;x;y] let sub rm x y = let es = match_float_type x in - apply (Const.Float.sub es) [] [rm;x;y] + apply_cst (Const.Float.sub es) [] [rm;x;y] let mul rm x y = let es = match_float_type x in - apply (Const.Float.mul es) [] [rm;x;y] + apply_cst (Const.Float.mul es) [] [rm;x;y] let div rm x y = let es = match_float_type x in - apply (Const.Float.div es) [] [rm;x;y] + apply_cst (Const.Float.div es) [] [rm;x;y] let fma rm x y z = let es = match_float_type x in - apply (Const.Float.fma es) [] [rm;x;y;z] + apply_cst (Const.Float.fma es) [] [rm;x;y;z] let sqrt rm x = let es = match_float_type x in - apply (Const.Float.sqrt es) [] [rm;x] + apply_cst (Const.Float.sqrt es) [] [rm;x] let rem x y = let es = match_float_type x in - apply (Const.Float.rem es) [] [x;y] + apply_cst (Const.Float.rem es) [] [x;y] let roundToIntegral rm x = let es = match_float_type x in - apply (Const.Float.roundToIntegral es) [] [rm;x] + apply_cst (Const.Float.roundToIntegral es) [] [rm;x] let min x y = let es = match_float_type x in - apply (Const.Float.min es) [] [x;y] + apply_cst (Const.Float.min es) [] [x;y] let max x y = let es = match_float_type x in - apply (Const.Float.max es) [] [x;y] + apply_cst (Const.Float.max es) [] [x;y] let leq x y = let es = match_float_type x in - apply (Const.Float.leq es) [] [x;y] + apply_cst (Const.Float.leq es) [] [x;y] let lt x y = let es = match_float_type x in - apply (Const.Float.lt es) [] [x;y] + apply_cst (Const.Float.lt es) [] [x;y] let geq x y = let es = match_float_type x in - apply (Const.Float.geq es) [] [x;y] + apply_cst (Const.Float.geq es) [] [x;y] let gt x y = let es = match_float_type x in - apply (Const.Float.gt es) [] [x;y] + apply_cst (Const.Float.gt es) [] [x;y] let eq x y = let es = match_float_type x in - apply (Const.Float.eq es) [] [x;y] + apply_cst (Const.Float.eq es) [] [x;y] let isNormal x = let es = match_float_type x in - apply (Const.Float.isNormal es) [] [x] + apply_cst (Const.Float.isNormal es) [] [x] let isSubnormal x = let es = match_float_type x in - apply (Const.Float.isSubnormal es) [] [x] + apply_cst (Const.Float.isSubnormal es) [] [x] let isZero x = let es = match_float_type x in - apply (Const.Float.isZero es) [] [x] + apply_cst (Const.Float.isZero es) [] [x] let isInfinite x = let es = match_float_type x in - apply (Const.Float.isInfinite es) [] [x] + apply_cst (Const.Float.isInfinite es) [] [x] let isNaN x = let es = match_float_type x in - apply (Const.Float.isNaN es) [] [x] + apply_cst (Const.Float.isNaN es) [] [x] let isNegative x = let es = match_float_type x in - apply (Const.Float.isNegative es) [] [x] + apply_cst (Const.Float.isNegative es) [] [x] let isPositive x = let es = match_float_type x in - apply (Const.Float.isPositive es) [] [x] + apply_cst (Const.Float.isPositive es) [] [x] let to_real x = let es = match_float_type x in - apply (Const.Float.to_real es) [] [x] + apply_cst (Const.Float.to_real es) [] [x] let ieee_format_to_fp e s bv = - apply (Const.Float.ieee_format_to_fp (e,s)) [] [bv] + apply_cst (Const.Float.ieee_format_to_fp (e,s)) [] [bv] let to_fp e2 s2 rm x = let (e1,s1) = match_float_type x in - apply (Const.Float.to_fp (e1,s1,e2,s2)) [] [rm;x] + apply_cst (Const.Float.to_fp (e1,s1,e2,s2)) [] [rm;x] let real_to_fp e s rm r = - apply (Const.Float.real_to_fp (e,s)) [] [rm;r] + apply_cst (Const.Float.real_to_fp (e,s)) [] [rm;r] let sbv_to_fp e s rm bv = let n = Bitv.match_bitv_type bv in - apply (Const.Float.sbv_to_fp (n,e,s)) [] [rm;bv] + apply_cst (Const.Float.sbv_to_fp (n,e,s)) [] [rm;bv] let ubv_to_fp e s rm bv = let n = Bitv.match_bitv_type bv in - apply (Const.Float.ubv_to_fp (n,e,s)) [] [rm;bv] + apply_cst (Const.Float.ubv_to_fp (n,e,s)) [] [rm;bv] let to_ubv m rm x = let (e,s) = match_float_type x in - apply (Const.Float.to_ubv (e,s,m)) [] [rm;x] + apply_cst (Const.Float.to_ubv (e,s,m)) [] [rm;x] let to_sbv m rm x = let (e,s) = match_float_type x in - apply (Const.Float.to_sbv (e,s,m)) [] [rm;x] + apply_cst (Const.Float.to_sbv (e,s,m)) [] [rm;x] end module String = struct - let of_ustring s = apply (Const.String.string s) [] [] - let length s = apply Const.String.length [] [s] - let at s i = apply Const.String.at [] [s; i] - let is_digit s = apply Const.String.is_digit [] [s] - let to_code s = apply Const.String.to_code [] [s] - let of_code i = apply Const.String.of_code [] [i] - let to_int s = apply Const.String.to_int [] [s] - let of_int i = apply Const.String.of_int [] [i] - let concat s s' = apply Const.String.concat [] [s;s'] - let sub s i n = apply Const.String.sub [] [s; i; n] - let index_of s s' i = apply Const.String.index_of [] [s; s'; i] - let replace s pat by = apply Const.String.replace [] [s; pat; by] - let replace_all s pat by = apply Const.String.replace_all [] [s; pat; by] - let replace_re s pat by = apply Const.String.replace_re [] [s; pat; by] - let replace_re_all s pat by = apply Const.String.replace_re_all [] [s; pat; by] - let is_prefix s s' = apply Const.String.is_prefix [] [s; s'] - let is_suffix s s' = apply Const.String.is_suffix [] [s; s'] - let contains s s' = apply Const.String.contains [] [s; s'] - let lt s s' = apply Const.String.lt [] [s; s'] - let leq s s' = apply Const.String.leq [] [s; s'] - let in_re s re = apply Const.String.in_re [] [s; re] + let of_ustring s = apply_cst (Const.String.string s) [] [] + let length s = apply_cst Const.String.length [] [s] + let at s i = apply_cst Const.String.at [] [s; i] + let is_digit s = apply_cst Const.String.is_digit [] [s] + let to_code s = apply_cst Const.String.to_code [] [s] + let of_code i = apply_cst Const.String.of_code [] [i] + let to_int s = apply_cst Const.String.to_int [] [s] + let of_int i = apply_cst Const.String.of_int [] [i] + let concat s s' = apply_cst Const.String.concat [] [s;s'] + let sub s i n = apply_cst Const.String.sub [] [s; i; n] + let index_of s s' i = apply_cst Const.String.index_of [] [s; s'; i] + let replace s pat by = apply_cst Const.String.replace [] [s; pat; by] + let replace_all s pat by = apply_cst Const.String.replace_all [] [s; pat; by] + let replace_re s pat by = apply_cst Const.String.replace_re [] [s; pat; by] + let replace_re_all s pat by = apply_cst Const.String.replace_re_all [] [s; pat; by] + let is_prefix s s' = apply_cst Const.String.is_prefix [] [s; s'] + let is_suffix s s' = apply_cst Const.String.is_suffix [] [s; s'] + let contains s s' = apply_cst Const.String.contains [] [s; s'] + let lt s s' = apply_cst Const.String.lt [] [s; s'] + let leq s s' = apply_cst Const.String.leq [] [s; s'] + let in_re s re = apply_cst Const.String.in_re [] [s; re] module RegLan = struct - let empty = apply Const.String.Reg_Lang.empty [] [] - let all = apply Const.String.Reg_Lang.all [] [] - let allchar = apply Const.String.Reg_Lang.allchar [] [] - let of_string s = apply Const.String.Reg_Lang.of_string [] [s] - let range s s' = apply Const.String.Reg_Lang.range [] [s; s'] - let concat re re' = apply Const.String.Reg_Lang.concat [] [re; re'] - let union re re' = apply Const.String.Reg_Lang.union [] [re; re'] - let inter re re' = apply Const.String.Reg_Lang.inter [] [re; re'] - let diff re re' = apply Const.String.Reg_Lang.diff [] [re; re'] - let star re = apply Const.String.Reg_Lang.star [] [re] - let cross re = apply Const.String.Reg_Lang.cross [] [re] - let complement re = apply Const.String.Reg_Lang.complement [] [re] - let option re = apply Const.String.Reg_Lang.option [] [re] - let power n re = apply (Const.String.Reg_Lang.power n) [] [re] - let loop n1 n2 re = apply (Const.String.Reg_Lang.loop (n1, n2)) [] [re] + let empty = apply_cst Const.String.Reg_Lang.empty [] [] + let all = apply_cst Const.String.Reg_Lang.all [] [] + let allchar = apply_cst Const.String.Reg_Lang.allchar [] [] + let of_string s = apply_cst Const.String.Reg_Lang.of_string [] [s] + let range s s' = apply_cst Const.String.Reg_Lang.range [] [s; s'] + let concat re re' = apply_cst Const.String.Reg_Lang.concat [] [re; re'] + let union re re' = apply_cst Const.String.Reg_Lang.union [] [re; re'] + let inter re re' = apply_cst Const.String.Reg_Lang.inter [] [re; re'] + let diff re re' = apply_cst Const.String.Reg_Lang.diff [] [re; re'] + let star re = apply_cst Const.String.Reg_Lang.star [] [re] + let cross re = apply_cst Const.String.Reg_Lang.cross [] [re] + let complement re = apply_cst Const.String.Reg_Lang.complement [] [re] + let option re = apply_cst Const.String.Reg_Lang.option [] [re] + let power n re = apply_cst (Const.String.Reg_Lang.power n) [] [re] + let loop n1 n2 re = apply_cst (Const.String.Reg_Lang.loop (n1, n2)) [] [re] end end - (* Wrappers for the tff typechecker *) - let all _ (tys, ts) body = - if Ty.(equal prop) (ty body) then mk_bind (Forall (tys, ts)) body - else raise (Wrong_type (body, Ty.prop)) - - let ex _ (tys, ts) body = - if Ty.(equal prop) (ty body) then mk_bind (Exists (tys, ts)) body - else raise (Wrong_type (body, Ty.prop)) + (* If-then-else *) let ite cond t_then t_else = let ty = ty t_then in - apply Const.ite [ty] [cond; t_then; t_else] + apply_cst Const.ite [ty] [cond; t_then; t_else] - (* let-bindings *) + (* Let-bindings *) let bind v t = - let () = Id.tag v Tags.bound t in + let () = Id.set_tag v Tags.bound t in of_var v - let letin l body = - List.iter (fun ((v : Var.t), t) -> - if not (Ty.equal v.ty (ty t)) then raise (Wrong_type (t, v.ty)) - ) l; - mk_bind (Letin l) body - - - end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/expr.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/expr.mli index 2c51c4a5ede0bedb33b19f7ab8ec23c4a024795e..829d4471f7b09a7d2e4e1bb0d6dbeceae985bc76 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/expr.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/expr.mli @@ -16,732 +16,106 @@ type hash = private int type index = private int type 'a tag = 'a Tag.t -type builtin = .. -(* Extensible variant type for builtin operations. Encodes in its type - arguments the lengths of the expected ty and term arguments respectively. *) +(** {3 Type definitions} *) -type ttype = Type -(** The type of types. *) - -type 'ty id = private { - ty : 'ty; - name : string; - index : index; (** unique *) +type builtin = < + ty : ty; + ty_var : ty_var; + ty_cst : ty_cst; + term : term; + term_var : term_var; + term_cst : term_cst; +> Builtin.t +(** Extensible variant type for builtin operations. *) + +and 'ty id = private { + id_ty : 'ty; + index : index; (** unique index *) + path : Path.t; builtin : builtin; mutable tags : Tag.map; } (** The type of identifiers. ['ty] is the type for representing the type of - the id, ['ty] and ['t_n] are the lengths of arguments as described by - the {builtin} type. *) + the id. *) -type ('ttype, 'ty) function_type = { - fun_vars : 'ttype id list; (* prenex forall *) - fun_args : 'ty list; - fun_ret : 'ty; -} -(** The type for representing function types. *) +and type_ = Type +and type_fun = { + arity : int; + mutable alias : type_alias; +} -(** {3 Types} *) +and type_alias = + | No_alias + | Alias of { + alias_vars : ty_var list; + alias_body : ty; + } -type ty_var = ttype id +and ty_var = type_ id (** Abbreviation for type variables. *) -and ty_const = (unit, ttype) function_type id +and ty_cst = type_fun id (** Type symbols have the expected length of their argument encoded. *) and ty_descr = - | Var of ty_var (** Type variables *) - | App of ty_const * ty list (** Application *) + | TyVar of ty_var (** Type variables *) + | TyApp of ty_cst * ty list (** Application *) + | Arrow of ty list * ty (** Function type *) + | Pi of ty_var list * ty (** Type quantification *) (** Type descriptions. *) -and ty = { - as_ : ty_var option; - mutable descr : ty_descr; - mutable hash : hash; (* lazy hash *) - mutable tags : Tag.map; +and ty = private { + mutable ty_hash : hash; (* lazy hash *) + mutable ty_tags : Tag.map; + mutable ty_descr : ty_descr; + mutable ty_head : ty; } (** Types, which wrap type description with a memoized hash and some tags. *) -(** {3 Terms and formulas} *) - -type term_var = ty id +and term_var = ty id (** Term variables *) -and term_const = (ttype, ty) function_type id +and term_cst = ty id (** Term symbols, which encode their expected type and term argument lists lengths. *) and pattern = term (** patterns are simply terms *) and term_descr = - | Var of term_var (** Variables *) - | App of term_const * ty list * term list (** Application *) - | Binder of binder * term (** Binders *) - | Match of term * (pattern * term) list (** Pattern matching *) + | Var of term_var (** Variables *) + | Cst of term_cst (** Constants *) + | App of term * ty list * term list (** Application *) + | Binder of binder * term (** Binders *) + | Match of term * (pattern * term) list (** Pattern matching *) (** Term descriptions. *) and binder = - | Exists of ty_var list * term_var list - | Forall of ty_var list * term_var list - | Letin of (term_var * term) list (**) + | Let_seq of (term_var * term) list + | Let_par of (term_var * term) list + | Lambda of ty_var list * term_var list + | Exists of ty_var list * term_var list + | Forall of ty_var list * term_var list (**) (** Binders. *) and term = { - ty : ty; - descr : term_descr; - mutable hash : hash; - mutable tags : Tag.map; + term_ty : ty; + term_descr : term_descr; + mutable term_hash : hash; + mutable term_tags : Tag.map; } (** Term, which wrap term descriptions. *) -type formula = term +and formula = term (** Alias for signature compatibility (with Dolmen_loop.Pipes.Make for instance). *) (** {2 Exceptions} *) (* ************************************************************************* *) -exception Bad_ty_arity of ty_const * ty list -exception Bad_term_arity of term_const * ty list * term list -exception Type_already_defined of ty_const - -exception Filter_failed_ty of string * ty * string -exception Filter_failed_term of string * term * string - -(* {2 Builtins} *) -(* ************************************************************************* *) - -(** This section presents the builtins that are defined by Dolmen. - - Users are encouraged to match builtins rather than specific symbols, - as this basically allows to match on the semantics of an identifier - rather than matching on the syntaxic value of an identifier. For - instance, equality can take an arbitrary number of arguments, and thus - in order to have well-typed terms, each arity of equality gives rise to - a different symbol (because the symbol's type depends on the arity - desired), but all these symbols have the [Equal] builtin. - - In the following we will use pseudo-code to describe the arity and - actual type associated to symbols. These will follow ocaml's notation - for types with an additional syntax using dots for arbitrary arity. - Some examples: - - [ttype] is a type constant - - [ttype -> ttype] is a type constructor (e.g. [list]) - - [int] is a constant of type [int] - - [float -> int] is a unary function - - ['a. 'a -> 'a] is a polymorphic unary function - - ['a. 'a -> ... -> Prop] describes a family of functions that take - a type and then an arbitrary number of arguments of that type, and - return a proposition (this is for instance the type of equality). - - Additionally, due to some languages having overloaded operators, and in - order to not have too verbose names, some of these builtins may have - ovreloaded signtures, such as comparisons on numbers which can operate - on integers, rationals, or reals. Note that arbitrary arity operators - (well family of operators) can be also be seen as overloaded operators. - Overloaded types (particularly for numbers) are written: - - [{a=(Int|Rational|Real)} a -> a -> Prop], which the notable difference - form polymorphic function that this functions of this type does not - take a type argument. - - Finally, remember that expressions are polymorphic and that type arguments - are explicit. -*) - -type builtin += - | Base - (** The base builtin; it is the default builtin for identifiers. *) - | Wildcard - (** Wildcards, currently used internally to represent implicit type - variables during type-checking. *) - -type builtin += - | Prop - (** [Prop: ttype]: the builtin type constant for the type of - propositions / booleans. *) - | Unit - (** The unit type, which has only one element (named void). *) - | Univ - (** [Univ: ttype]: a builtin type constant used for languages - with a default type for elements (such as tptp's `$i`). *) - -type builtin += - | Coercion - (** [Coercion: 'a 'b. 'a -> 'b]: - Coercion/cast operator, i.e. allows to cast values of some type to - another type. This is a polymorphic operator that takes two type - arguments [a] and [b], a value of type [a], and returns a value of - type [b]. - The interpretation/semantics of this cast can remain - up to the user. This operator is currently mainly used to cast - numeric types when this transormation is exact (i.e. an integer - casted into a rational, which is always possible and exact, - or the cast of a rational into an integer, as long as the cast is - guarded by a clause verifying the rational is an integer). *) - -type builtin += - | True (** [True: Prop]: the [true] proposition. *) - | False (** [False: Prop]: the [false] proposition. *) - | Equal (** [Equal: 'a. 'a -> ... -> Prop]: equality beetween values. *) - | Distinct (** [Distinct: 'a. 'a -> ... -> Prop]: pairwise dis-equality beetween arguments. *) - | Neg (** [Neg: Prop -> Prop]: propositional negation. *) - | And (** [And: Prop -> Prop]: propositional conjunction. *) - | Or (** [Or: Prop -> ... -> Prop]: propositional disjunction. *) - | Nand (** [Nand: Prop -> Prop -> Prop]: propositional negated conjunction. *) - | Nor (** [Nor: Prop -> Prop -> Prop]: propositional negated disjunction. *) - | Xor (** [Xor: Prop -> Prop -> Prop]: ppropositional exclusive disjunction. *) - | Imply (** [Imply: Prop -> Prop -> Prop]: propositional implication. *) - | Equiv (** [Equiv: Prop -> Prop -> Prop]: propositional Equivalence. *) - -type builtin += - | Ite - (** [Ite: 'a. Prop -> 'a -> 'a -> 'a]: branching operator. *) - -type builtin += - | Tester of { - cstr : term_const; - } - (** [Tester { cstr; }] is the tester for constructor [cstr]. *) - | Constructor of { - adt : ty_const; - case : int; - } - (** [Constructor { adt; case}] is the case-th constructor of the algebraic - datatype defined by [adt]. *) - | Destructor of { - adt : ty_const; - cstr : term_const; - case : int; - field: int; - } - (** [Destructor { adt; cstr; case; field; }] is the destructor retuning the - field-th argument of the case-th constructor of type [adt] which should - be [cstr]. *) - -type builtin += - | Int - (** [Int: ttype] the type for signed integers of arbitrary precision. *) - | Integer of string - (** [Integer s: Int]: integer litteral. The string [s] should be the - decimal representation of an integer with arbitrary precision (hence - the use of strings rather than the limited precision [int]). *) - | Rat - (** [Rat: ttype] the type for signed rationals. *) - | Rational of string - (** [Rational s: Rational]: rational litteral. The string [s] should be - the decimal representation of a rational (see the various languages - spec for more information). *) - | Real - (** [Real: ttype] the type for signed reals. *) - | Decimal of string - (** [Decimal s: Real]: real litterals. The string [s] should be a - floating point representatoin of a real. Not however that reals - here means the mathematical abstract notion of real numbers, including - irrational, non-algebric numbers, and is thus not restricted to - floating point numbers, although these are the only litterals - supported. *) - | Lt - (** [Lt: {a=(Int|Rational|Real)} a -> a -> Prop]: - strict comparison (less than) on numbers - (whether integers, rationals, or reals). *) - | Leq - (** [Leq:{a=(Int|Rational|Real)} a -> a -> Prop]: - large comparison (less or equal than) on numbers - (whether integers, rationals, or reals). *) - | Gt - (** [Gt:{a=(Int|Rational|Real)} a -> a -> Prop]: - strict comparison (greater than) on numbers - (whether integers, rationals, or reals). *) - | Geq - (** [Geq:{a=(Int|Rational|Real)} a -> a -> Prop]: - large comparison (greater or equal than) on numbers - (whether integers, rationals, or reals). *) - | Minus - (** [Minus:{a=(Int|Rational|Real)} a -> a]: - arithmetic unary negation/minus on numbers - (whether integers, rationals, or reals). *) - | Add - (** [Add:{a=(Int|Rational|Real)} a -> a -> a]: - arithmetic addition on numbers - (whether integers, rationals, or reals). *) - | Sub - (** [Sub:{a=(Int|Rational|Real)} a -> a -> a]: - arithmetic substraction on numbers - (whether integers, rationals, or reals). *) - | Mul - (** [Mul:{a=(Int|Rational|Real)} a -> a -> a]: - arithmetic multiplication on numbers - (whether integers, rationals, or reals). *) - | Div - (** [Div:{a=(Rational|Real)} a -> a -> a]: - arithmetic exact division on numbers - (rationals, or reals, but **not** integers). *) - | Div_e - (** [Div_e:{a=(Int|Rational|Real)} a -> a -> a]: - arithmetic integer euclidian quotient - (whether integers, rationals, or reals). - If D is positive then [Div_e (N,D)] is the floor - (in the type of N and D) of the real division [N/D], - and if D is negative then [Div_e (N,D)] is the ceiling - of [N/D]. *) - | Div_t - (** [Div_t:{a=(Int|Rational|Real)} a -> a -> a]: - arithmetic integer truncated quotient - (whether integers, rationals, or reals). - [Div_t (N,D)] is the truncation of the real - division [N/D]. *) - | Div_f - (** [Div_f:{a=(Int|Rational|Real)} a -> a -> a]: - arithmetic integer floor quotient - (whether integers, rationals, or reals). - [Div_t (N,D)] is the floor of the real - division [N/D]. *) - | Modulo_e - (** [Modulo_e:{a=(Int|Rational|Real)} a -> a -> a]: - arithmetic integer euclidian remainder - (whether integers, rationals, or reals). - It is defined by the following equation: - [Div_e (N, D) * D + Modulo(N, D) = N]. *) - | Modulo_t - (** [Modulo_t:{a=(Int|Rational|Real)} a -> a -> a]: - arithmetic integer truncated remainder - (whether integers, rationals, or reals). - It is defined by the following equation: - [Div_t (N, D) * D + Modulo_t(N, D) = N]. *) - | Modulo_f - (** [Modulo_f:{a=(Int|Rational|Real)} a -> a -> a]: - arithmetic integer floor remainder - (whether integers, rationals, or reals). - It is defined by the following equation: - [Div_f (N, D) * D + Modulo_f(N, D) = N]. *) - | Abs - (** [Abs: Int -> Int]: - absolute value on integers. *) - | Divisible - (** [Divisible: Int -> Int -> Prop]: - divisibility predicate on integers. Smtlib restricts - applications of this predicate to have a litteral integer - for the divisor/second argument. *) - | Is_int - (** [Is_int:{a=(Int|Rational|Real)} a -> Prop]: - integer predicate for numbers: is the given number - an integer. *) - | Is_rat - (** [Is_rat:{a=(Int|Rational|Real)} a -> Prop]: - rational predicate for numbers: is the given number - an rational. *) - | Floor - (** [Floor:{a=(Int|Rational|Real)} a -> a]: - floor function on numbers, defined in tptp as - the largest intger not greater than the argument. *) - | Ceiling - (** [Ceiling:{a=(Int|Rational|Real)} a -> a]: - ceiling function on numbers, defined in tptp as - the smallest intger not less than the argument. *) - | Truncate - (** [Truncate:{a=(Int|Rational|Real)} a -> a]: - ceiling function on numbers, defined in tptp as - the nearest integer value with magnitude not greater - than the absolute value of the argument. *) - | Round - (** [Round:{a=(Int|Rational|Real)} a -> a]: - rounding function on numbers, defined in tptp as - the nearest intger to the argument; when the argument - is halfway between two integers, the nearest even integer - to the argument. *) - -(* arrays *) -type builtin += - | Array - (** [Array: ttype -> ttype -> ttype]: the type constructor for - polymorphic functional arrays. An [(src, dst) Array] is an array - from expressions of type [src] to expressions of type [dst]. - Typically, such arrays are immutables. *) - | Store - (** [Store: 'a 'b. ('a, 'b) Array -> 'a -> 'b -> ('a, 'b) Array]: - store operation on arrays. Returns a new array with the key bound - to the given value (shadowing the previous value associated to - the key). *) - | Select - (** [Select: 'a 'b. ('a, 'b) Array -> 'a -> 'b]: - select operation on arrays. Returns the value associated to the - given key. Typically, functional arrays are complete, i.e. all - keys are mapped to a value. *) - -(* Bitvectors *) -type builtin += - | Bitv of int - (** [Bitv n: ttype]: type constructor for bitvectors of length [n]. *) - | Bitvec of string - (** [Bitvec s: Bitv]: bitvector litteral. The string [s] should - be a binary representation of bitvectors using characters - ['0'], and ['1'] (lsb last) *) - | Bitv_concat - (** [Bitv_concat: Bitv(n) -> Bitv(m) -> Bitv(n+m)]: - concatenation operator on bitvectors. *) - | Bitv_extract of int * int - (** [Bitv_extract(i, j): Bitv(n) -> Bitv(i - j + 1)]: - bitvector extraction, from index [j] up to [i] (both included). *) - | Bitv_repeat - (** [Bitv_repeat: Bitv(n) -> Bitv(n*k)]: - bitvector repeatition. NOTE: inlcude [k] in the builtin ? *) - | Bitv_zero_extend - (** [Bitv_zero_extend: Bitv(n) -> Bitv(n + k)]: - zero extension for bitvectors (produces a representation of the - same unsigned integer). *) - | Bitv_sign_extend - (** [Bitv_sign_extend: Bitv(n) -> Bitv(n + k)]: - sign extension for bitvectors ((produces a representation of the - same signed integer). *) - | Bitv_rotate_right of int - (** [Bitv_rotate_right(i): Bitv(n) -> Bitv(n)]: - logical rotate right for bitvectors by [i]. *) - | Bitv_rotate_left of int - (** [Bitv_rotate_left(i): Bitv(n) -> Bitv(n)]: - logical rotate left for bitvectors by [i]. *) - | Bitv_not - (** [Bitv_not: Bitv(n) -> Bitv(n)]: - bitwise negation for bitvectors. *) - | Bitv_and - (** [Bitv_and: Bitv(n) -> Bitv(n) -> Bitv(n)]: - bitwise conjunction for bitvectors. *) - | Bitv_or - (** [bitv_or: Bitv(n) -> Bitv(n) -> Bitv(n)]: - bitwise disjunction for bitvectors. *) - | Bitv_nand - (** [Bitv_nand: Bitv(n) -> Bitv(n) -> Bitv(n)]: - bitwise negated conjunction for bitvectors. - [Bitv_nand s t] abbreviates [Bitv_not (Bitv_and s t))]. *) - | Bitv_nor - (** [Bitv_nor: Bitv(n) -> Bitv(n) -> Bitv(n)]: - bitwise negated disjunction for bitvectors. - [Bitv_nor s t] abbreviates [Bitv_not (Bitv_or s t))]. *) - | Bitv_xor - (** [Bitv_xor: Bitv(n) -> Bitv(n) -> Bitv(n)]: - bitwise exclusive disjunction for bitvectors. - [Bitv_xor s t] abbreviates - [Bitv_or (Bitv_and s (Bitv_not t)) - (Bitv_and (Bitv_not s) t) ]. *) - | Bitv_xnor - (** [Bitv_xnor: Bitv(n) -> Bitv(n) -> Bitv(n)]: - bitwise negated exclusive disjunction for bitvectors. - [Bitv_xnor s t] abbreviates - [Bitv_or (Bitv_and s t) - (Bitv_and (Bitv_not s) (Bitv_not t))]. *) - | Bitv_comp - (** [Bitv_comp: Bitv(n) -> Bitv(n) -> Bitv(1)]: - Returns the constant bitvector ["1"] is all bits are equal, - and the bitvector ["0"] if not. *) - | Bitv_neg - (** [Bitv_neg: Bitv(n) -> Bitv(n)]: - 2's complement unary minus. *) - | Bitv_add - (** [Bitv_add: Bitv(n) -> Bitv(n) -> Bitv(n)]: - addition modulo 2^n. *) - | Bitv_sub - (** [Bitv_sub: Bitv(n) -> Bitv(n) -> Bitv(n)]: - 2's complement subtraction modulo 2^n. *) - | Bitv_mul - (** [Bitv_mul: Bitv(n) -> Bitv(n) -> Bitv(n)]: - multiplication modulo 2^n. *) - | Bitv_udiv - (** [Bitv_udiv: Bitv(n) -> Bitv(n) -> Bitv(n)]: - unsigned division, truncating towards 0. *) - | Bitv_urem - (** [Bitv_urem: Bitv(n) -> Bitv(n) -> Bitv(n)]: - unsigned remainder from truncating division. *) - | Bitv_sdiv - (** [Bitv_sdiv: Bitv(n) -> Bitv(n) -> Bitv(n)]: - 2's complement signed division. *) - | Bitv_srem - (** [Bitv_srem: Bitv(n) -> Bitv(n) -> Bitv(n)]: - 2's complement signed remainder (sign follows dividend). *) - | Bitv_smod - (** [Bitv_smod: Bitv(n) -> Bitv(n) -> Bitv(n)]: - 2's complement signed remainder (sign follows divisor). *) - | Bitv_shl - (** [Bitv_shl: Bitv(n) -> Bitv(n) -> Bitv(n)]: - shift left (equivalent to multiplication by 2^x where x - is the value of the second argument). *) - | Bitv_lshr - (** [Bitv_lshr: Bitv(n) -> Bitv(n) -> Bitv(n)]: - logical shift right (equivalent to unsigned division by 2^x, - where x is the value of the second argument). *) - | Bitv_ashr - (** [Bitv_ashr: Bitv(n) -> Bitv(n) -> Bitv(n)]: - Arithmetic shift right, like logical shift right except that - the most significant bits of the result always copy the most - significant bit of the first argument. *) - | Bitv_ult - (** [Bitv_ult: Bitv(n) -> Bitv(n) -> Prop]: - binary predicate for unsigned less-than. *) - | Bitv_ule - (** [Bitv_ule: Bitv(n) -> Bitv(n) -> Prop]: - binary predicate for unsigned less than or equal. *) - | Bitv_ugt - (** [Bitv_ugt: Bitv(n) -> Bitv(n) -> Prop]: - binary predicate for unsigned greater-than. *) - | Bitv_uge - (** [Bitv_uge: Bitv(n) -> Bitv(n) -> Prop]: - binary predicate for unsigned greater than or equal. *) - | Bitv_slt - (** [Bitv_slt: Bitv(n) -> Bitv(n) -> Prop]: - binary predicate for signed less-than. *) - | Bitv_sle - (** [Bitv_sle: Bitv(n) -> Bitv(n) -> Prop]: - binary predicate for signed less than or equal. *) - | Bitv_sgt - (** [Bitv_sgt: Bitv(n) -> Bitv(n) -> Prop]: - binary predicate for signed greater-than. *) - | Bitv_sge - (** [Bitv_sge: Bitv(n) -> Bitv(n) -> Prop]: - binary predicate for signed greater than or equal. *) - -(* Floats *) -type builtin += - | RoundingMode - (** [RoundingMode: ttype]: type for enumerated type of rounding modes. *) - | RoundNearestTiesToEven - (** [RoundNearestTiesToEven : RoundingMode]: *) - | RoundNearestTiesToAway - (** [RoundNearestTiesToAway : RoundingMode]: *) - | RoundTowardPositive - (** [RoundTowardPositive : RoundingMode *) - | RoundTowardNegative - (** [RoundTowardNegative : RoundingMode *) - | RoundTowardZero - (** [RoundTowardZero : RoundingMode *) - | Float of int * int - (** [Float(e,s): ttype]: type constructor for floating point of exponent of - size [e] and significand of size [s] (hidden bit included). Those size are - greater than 1 *) - | Fp of int * int - (** [Fp(e, s): Bitv(1) -> Bitv(e) -> Bitv(s-1) -> Fp(e,s)]: bitvector literal. - The IEEE-format is used for the conversion [sb^se^ss]. - All the NaN are converted to the same value. *) - | Plus_infinity of int * int - (** [Plus_infinity(s,e) : Fp(s,e)] *) - | Minus_infinity of int * int - (** [Minus_infinity(s,e) : Fp(s,e)] *) - | Plus_zero of int * int - (** [Plus_zero(s,e) : Fp(s,e)] *) - | Minus_zero of int * int - (** [Minus_zero(s,e) : Fp(s,e)] *) - | NaN of int * int - (** [NaN(s,e) : Fp(s,e)] *) - | Fp_abs of int * int - (** [Fp_abs(s,e): Fp(s,e) -> Fp(s,e)]: absolute value *) - | Fp_neg of int * int - (** [Fp_neg(s,e): Fp(s,e) -> Fp(s,e)]: negation *) - | Fp_add of int * int - (** [Fp_add(s,e): RoundingMode -> Fp(s,e) -> Fp(s,e) -> Fp(s,e)]: addition *) - | Fp_sub of int * int - (** [Fp_sub(s,e): RoundingMode -> Fp(s,e) -> Fp(s,e) -> Fp(s,e)]: subtraction *) - | Fp_mul of int * int - (** [Fp_mul(s,e): RoundingMode -> Fp(s,e) -> Fp(s,e) -> Fp(s,e)]: multiplication *) - | Fp_div of int * int - (** [Fp_div(s,e): RoundingMode -> Fp(s,e) -> Fp(s,e) -> Fp(s,e)]: division *) - | Fp_fma of int * int - (** [Fp_fma(s,e): RoundingMode -> Fp(s,e) -> Fp(s,e)]: fuse multiply add *) - | Fp_sqrt of int * int - (** [Fp_sqrt(s,e): RoundingMode -> Fp(s,e) -> Fp(s,e)]: square root *) - | Fp_rem of int * int - (** [Fp_rem(s,e): Fp(s,e) -> Fp(s,e) -> Fp(s,e)]: remainder *) - | Fp_roundToIntegral of int * int - (** [Fp_roundToIntegral(s,e): RoundingMode -> Fp(s,e) -> Fp(s,e)]: round to integral *) - | Fp_min of int * int - (** [Fp_min(s,e): Fp(s,e) -> Fp(s,e) -> Fp(s,e)]: minimum *) - | Fp_max of int * int - (** [Fp_max(s,e): Fp(s,e) -> Fp(s,e) -> Fp(s,e)]: maximum *) - | Fp_leq of int * int - (** [Fp_leq(s,e): Fp(s,e) -> Fp(s,e) -> Prop]: IEEE less or equal *) - | Fp_lt of int * int - (** [Fp_lt(s,e): Fp(s,e) -> Fp(s,e) -> Prop]: IEEE less than *) - | Fp_geq of int * int - (** [Fp_geq(s,e): Fp(s,e) -> Fp(s,e) -> Prop]: IEEE greater or equal *) - | Fp_gt of int * int - (** [Fp_gt(s,e): Fp(s,e) -> Fp(s,e) -> Prop]: IEEE greater than *) - | Fp_eq of int * int - (** [Fp_eq(s,e): Fp(s,e) -> Fp(s,e) -> Prop]: IEEE equality *) - | Fp_isNormal of int * int - (** [Fp_isNormal(s,e): Fp(s,e) -> Prop]: test if it is a normal floating point *) - | Fp_isSubnormal of int * int - (** [Fp_isSubnormal(s,e): Fp(s,e) -> Prop]: test if it is a subnormal floating point *) - | Fp_isZero of int * int - (** [Fp_isZero(s,e): Fp(s,e) -> Prop]: test if it is a zero *) - | Fp_isInfinite of int * int - (** [Fp_isInfinite(s,e): Fp(s,e) -> Prop]: test if it is an infinite *) - | Fp_isNaN of int * int - (** [Fp_isNaN(s,e): Fp(s,e) -> Prop]: test if it is Not a Number *) - | Fp_isNegative of int * int - (** [Fp_isNegative(s,e): Fp(s,e) -> Prop]: test if it is negative *) - | Fp_isPositive of int * int - (** [Fp_isPositive(s,e): Fp(s,e) -> Prop]: test if it is positive *) - | Ieee_format_to_fp of int * int - (** [Ieee_format_to_fp(s,e): Bv(s+e) -> Fp(s,e)]: Convert from IEEE interchange format *) - | Fp_to_fp of int * int * int * int - (** [Fp_to_fp(s1,e1,s2,e2): RoundingMode -> Fp(s1,e1) -> Fp(s2,e2)]: Convert from another floating point format *) - | Real_to_fp of int * int - (** [Real_to_fp(s,e): RoundingMode -> Real -> Fp(s,e)]: Convert from a real *) - | Sbv_to_fp of int * int * int - (** [Sbv_to_fp(m,s,e): RoundingMode -> Bitv(m) -> Fp(s,e)]: Convert from a signed integer *) - | Ubv_to_fp of int * int * int - (** [Ubv_to_fp(m,s,e): RoundingMode -> Bitv(m) -> Fp(s,e)]: Convert from a unsigned integer *) - | To_ubv of int * int * int - (** [To_ubv(s,e,m): RoundingMode -> Fp(s,e) -> Bitv(m)]: Convert to an unsigned integer *) - | To_sbv of int * int * int - (** [To_ubv(s,e,m): RoundingMode -> Fp(s,e) -> Bitv(m)]: Convert to an signed integer *) - | To_real of int * int - (** [To_real(s,e,m): RoundingMode -> Fp(s,e) -> Real]: Convert to real *) - -(* Strings *) -type builtin += - | String - (** [String: ttype]: type constructor for strings. *) - | Str of string - (** [Str s: String]: string literals. *) - | Str_length - (** [Str_length: String -> Int]: string length. *) - | Str_at - (** [Str_at: String -> Int -> String]: - Singleton string containing a character at given position - or empty string when position is out of range. - The leftmost position is 0. *) - | Str_to_code - (** [Str_to_code: String -> Int]: - [Str_to_code s] is the code point of the only character of s, - if s is a singleton string; otherwise, it is -1. *) - | Str_of_code - (** [Str_of_code: Int -> String]: - [Str_of_code n] is the singleton string whose only character is - code point n if n is in the range [0, 196607]; otherwise, it is the - empty string. *) - | Str_is_digit - (** [Str_is_digit: String -> Prop]: Digit check - [Str.is_digit s] is true iff s consists of a single character which is - a decimal digit, that is, a code point in the range 0x0030 ... 0x0039. *) - | Str_to_int - (** [Str_to_int: String -> Int]: Conversion to integers - [Str.to_int s] with s consisting of digits (in the sense of str.is_digit) - evaluates to the positive integer denoted by s when seen as a number in - base 10 (possibly with leading zeros). - It evaluates to -1 if s is empty or contains non-digits. *) - | Str_of_int - (** [Str_of_int : Int -> String]: Conversion from integers. - [Str.from_int n] with n non-negative is the corresponding string in - decimal notation, with no leading zeros. If n < 0, it is the empty string. *) - | Str_concat - (** [Str_concat: String -> String -> String]: string concatenation. *) - | Str_sub - (** [Str_sub: String -> Int -> Int -> String]: - [Str_sub s i n] evaluates to the longest (unscattered) substring - of s of length at most n starting at position i. - It evaluates to the empty string if n is negative or i is not in - the interval [0,l-1] where l is the length of s. *) - | Str_index_of - (** [Str_index_of: String -> String -> Int -> Int]: - Index of first occurrence of second string in first one starting at - the position specified by the third argument. - [Str_index_of s t i], with 0 <= i <= |s| is the position of the first - occurrence of t in s at or after position i, if any. - Otherwise, it is -1. Note that the result is i whenever i is within - the range [0, |s|] and t is empty. *) - | Str_replace - (** [Str_replace: String -> String -> String -> String]: Replace - [Str_replace s t t'] is the string obtained by replacing the first - occurrence of t in s, if any, by t'. Note that if t is empty, the - result is to prepend t' to s; also, if t does not occur in s then - the result is s. *) - | Str_replace_all - (** [Str_replace_all: String -> String -> String -> String]: - [Str_replace_all s t tâ] is s if t is the empty string. Otherwise, it - is the string obtained from s by replacing all occurrences of t in s - by tâ, starting with the first occurrence and proceeding in - left-to-right order. *) - | Str_replace_re - (** [Str_replace_re: String -> String_RegLan -> String -> String]: - [Str_replace_re s r t] is the string obtained by replacing the - shortest leftmost non-empty match of r in s, if any, by t. - Note that if t is empty, the result is to prepend t to s. *) - | Str_replace_re_all - (** [Str_replace_re_all: String -> String_RegLan -> String -> String]: - [Str_replace_re_all s r t] is the string obtained by replacing, - left-to right, each shortest *non-empty* match of r in s by t. *) - | Str_is_prefix - (** [Str_is_prefix: String -> String -> Prop]: Prefix check - [Str_is_prefix s t] is true iff s is a prefix of t. *) - | Str_is_suffix - (** [Str_is_suffix: String -> String -> Prop]: Suffix check - [Str_is_suffix s t] is true iff s is a suffix of t. *) - | Str_contains - (** [Str_contains: String -> String -> Prop]: Inclusion check - [Str_contains s t] is true iff s contains t. *) - | Str_lexicographic_strict - (** [Str_lexicographic_strict: String -> String -> Prop]: - lexicographic ordering (strict). *) - | Str_lexicographic_large - (** [Str_lexicographic_large: String -> String -> Prop]: - reflexive closure of the lexicographic ordering. *) - | Str_in_re - (** [Str_in_re: String -> String_RegLan -> Prop]: set membership. *) - -(* String Regular languages *) -type builtin += - | String_RegLan - (** [String_RegLan: ttype]: - type constructor for Regular languages over strings. *) - | Re_empty - (** [Re_empty: String_RegLan]: - the empty language. *) - | Re_all - (** [Re_all: String_RegLan]: - the language of all strings. *) - | Re_allchar - (** [Re_allchar: String_RegLan]: - the language of all singleton strings. *) - | Re_of_string - (** [Re_of_string: String -> String_RegLan]: - the singleton language with a single string. *) - | Re_range - (** [Re_range: String -> String -> String_RegLan]: Language range - [Re_range s1 s2] is the set of all *singleton* strings [s] such that - [Str_lexicographic_large s1 s s2] provided [s1] and [s1] are singleton. - Otherwise, it is the empty language. *) - | Re_concat - (** [Re_concat: String_RegLan -> String_RegLan -> String_RegLan]: - language concatenation. *) - | Re_union - (** [Re_union: String_RegLan -> String_RegLan -> String_RegLan]: - language union. *) - | Re_inter - (** [Re_inter: String_RegLan -> String_RegLan -> String_RegLan]: - language intersection. *) - | Re_star - (** [Re_star: String_RegLan -> String_RegLan]: Kleen star. *) - | Re_cross - (** [Re_cross: String_RegLan -> String_RegLan]: Kleen cross. *) - | Re_complement - (** [Re_complement: String_RegLan -> String_RegLan]: language complement. *) - | Re_diff - (** [Re_diff: String_RegLan -> String_RegLan -> String_RegLan]: - language difference. *) - | Re_option - (** [Re_option: String_RegLan -> String_RegLan]: language option - [Re_option e] abbreviates [Re_union e (Str_to_re "")]. *) - | Re_power of int - (** [Re_power(n): String_RegLan -> String_RegLan]: - [Re_power(n) e] is the nth power of e: - - [Re_power(0) e] is [Str_to_re ""] - - [Re_power(n+1) e] is [Re_concat e (Re_power(n) e)] *) - | Re_loop of int * int - (** [Re_loop(n1,n2): String_RegLan -> String_RegLan]: - Defined as: - - [Re_loop(nâ, nâ) e] is [Re_empty] if nâ > nâ - - [Re_loop(nâ, nâ) e] is [Re_power(nâ) e] if nâ = nâ - - [Re_loop(nâ, nâ) e] is - [Re_union ((Re_power(nâ) e) ... (Re_power(nâ) e))] if nâ < nâ - *) +exception Already_aliased of ty_cst +exception Type_already_defined of ty_cst +exception Record_type_expected of ty_cst (** {2 Native Tags} *) @@ -763,28 +137,9 @@ module Tags : sig include Dolmen_intf.Tag.Zf_Base with type 'a t := 'a t (** Satsify the Zf interface. *) -end - -(** {2 Filters} *) -(* ************************************************************************* *) - -module Filter : sig - - type ty_filter - type term_filter - - (** The common external signature for filters. *) - module type S = sig - - val name : string - (** The name of the filter. *) - - val active : bool ref - (** Whether the filter is active *) - - val reset : unit -> unit - (** Reset the filter to its default state. *) - end + include Dolmen_intf.Tag.Ae_Base with type 'a t := 'a t + and type term := term + (** Satsify the Ae interface. *) end @@ -813,19 +168,22 @@ module Print : sig val id : _ id t (** Printer for ids *) - val ttype : ttype t - (** Printer for ttype. *) + val type_ : type_ t + (** Printer for type_. *) + + val type_fun : type_fun t + (** Printer for type_fun. *) val ty_var : ty_var t (** Printer to print type variables along with their types. *) + val ty_cst : ty_cst t + (** Printer to print type constants along with their types. *) + val term_var : term_var t (** Printer to print term variables along with their types. *) - val ty_const : ty_const t - (** Printer to print type constants along with their types. *) - - val term_const : term_const t + val term_cst : term_cst t (** Printer to print term constants along with their types. *) val ty : ty t @@ -834,6 +192,9 @@ module Print : sig val term : term t (** Printer for terms. *) + val formula : formula t + (** Printer for formulas. *) + end (** {2 Substitutions} *) @@ -943,28 +304,38 @@ module Id : sig val print : Format.formatter -> 'a t -> unit (** Printing function *) - val tag : 'a t -> 'b Tag.t -> 'b -> unit - (** Add a tag to an identifier *) - - val get_tag : 'a t -> 'b Tag.t -> 'b list - (** Get all the tags added to the identifier *) - - val get_tag_last : 'a t -> 'b Tag.t -> 'b option - (** Get the last tag added to the identifier *) - val mk : - ?builtin:builtin -> ?tags:Tag.map -> string -> 'a -> 'a t - (** Create a new fresh identifier *) - - val const : ?pos:Pretty.pos -> ?name:string -> - ?builtin:builtin -> ?tags:Tag.map -> - ?ty_filters:Filter.ty_filter list -> - ?term_filters:Filter.term_filter list -> - string -> 'a t list -> 'b list -> 'b -> ('a, 'b) function_type t - (** Create a new function or type constructor identifier *) + ?builtin:builtin -> + Path.t -> 'a -> 'a t + (** Create a new identifier *) + + val get_tag : _ t -> 'a Tag.t -> 'a option + (** Get the value bound to a tag. *) + + val get_tag_list : _ t -> 'a list Tag.t -> 'a list + (** Get the list of values bound to a list tag, returning the + empty list if no value is bound. *) + + val get_tag_last : _ t -> 'a list Tag.t -> 'a option + (** Get the last value bound to a list tag. *) + + val set_tag : _ t -> 'a Tag.t -> 'a -> unit + (** Set the value bound to the tag. *) + + val add_tag : _ t -> 'a list Tag.t -> 'a -> unit + (** Bind an additional value to a list tag. *) + + val add_tag_opt : _ t -> 'a list Tag.t -> 'a option -> unit + (** Optionally bind an additional value to a list tag. *) + + val add_tag_list : _ t -> 'a list Tag.t -> 'a list -> unit + (** Bind a list of additional values to a list tag. *) + + val unset_tag : _ t -> _ Tag.t -> unit + (** Remove the binding to the given tag. *) end @@ -984,6 +355,14 @@ module Ty : sig type 'a tag = 'a Tag.t (** A type for tags to attach to arbitrary types. *) + exception Bad_arity of ty_cst * t list + (** Raised when applying a type constant to the wrong number + of arguments. *) + + exception Prenex_polymorphism of t + (** Raised when the type provided is polymorphic, but occurred in a + place where polymorphic types are forbidden by prenex/rank-1 + polymorphism. *) val hash : t -> int (** A hash function for types, should be suitable to create hashtables. *) @@ -998,6 +377,15 @@ module Ty : sig (** Printing function. *) + (** {4 Alias management} *) + + val alias_to : ty_cst -> ty_var list -> ty -> unit + (** Alias the given type constant. *) + + val expand_head : t -> t + (** Expand head aliases. *) + + (** {4 View} *) type view = [ @@ -1018,41 +406,53 @@ module Ty : sig | `String_reg_lang (** Regular languages over strings *) | `Var of ty_var - (** Variables *) + (** Variables (excluding wildcards) *) + | `Wildcard of ty_var + (** Wildcards *) | `App of [ - | `Generic of ty_const + | `Generic of ty_cst | `Builtin of builtin ] * ty list (** Generic applications. *) + | `Arrow of ty list * ty + | `Pi of ty_var list * ty ] (** View on types. *) val view : t -> view (** View on types. *) + val pi_arity : t -> int + (** Reutnrs the number of expected type arguments that the given + type expects (i.e. the number of prenex polymorphic variables + in the given type). *) + + val poly_sig : t -> ty_var list * ty list * ty + (** Split a type into a polymorphic signature. *) + (** {4 Type structure definition} *) type adt_case = { - cstr : term_const; - tester : term_const; - dstrs : term_const option array; + cstr : term_cst; + tester : term_cst; + dstrs : term_cst option array; } (** One case of an algebraic datatype definition. *) type def = | Abstract | Adt of { - ty : ty_const; + ty : ty_cst; record : bool; cases : adt_case array; } (** *) (** The various ways to define a type inside the solver. *) - val define : ty_const -> def -> unit + val define : ty_cst -> def -> unit (** Register a type definition. *) - val definition : ty_const -> def option + val definition : ty_cst -> def option (** Return the definition of a type (if it exists). *) @@ -1064,6 +464,9 @@ module Ty : sig type t = ty_var (** The type of variables the can occur in types *) + val print : Format.formatter -> t -> unit + (** Printer. *) + val hash : t -> int (** A hash function for type variables, should be suitable to create hashtables. *) @@ -1076,23 +479,48 @@ module Ty : sig val mk : string -> t (** Create a new type variable with the given name. *) - val tag : t -> 'a tag -> 'a -> unit - (** Tag a variable. *) + val wildcard : unit -> t + (** Type wildcard *) + + val is_wildcard : t -> bool + (** Predictae to distinguish wildcard type variables. *) + + val get_tag : t -> 'a Tag.t -> 'a option + (** Get the value bound to a tag. *) - val get_tag : t -> 'a tag -> 'a list - (** Return the list of value associated to the tag. *) + val get_tag_list : t -> 'a list Tag.t -> 'a list + (** Get the list of values bound to a list tag, returning the + empty list if no value is bound. *) - val get_tag_last : t -> 'a tag -> 'a option - (** Return the last value associated to the tag (if any). *) + val get_tag_last : t -> 'a list Tag.t -> 'a option + (** Get the last value bound to a list tag. *) + + val set_tag : t -> 'a Tag.t -> 'a -> unit + (** Set the value bound to the tag. *) + + val add_tag : t -> 'a list Tag.t -> 'a -> unit + (** Bind an additional value to a list tag. *) + + val add_tag_opt : t -> 'a list Tag.t -> 'a option -> unit + (** Optionally bind an additional value to a list tag. *) + + val add_tag_list : t -> 'a list Tag.t -> 'a list -> unit + (** Bind a list of additional values to a list tag. *) + + val unset_tag : t -> _ Tag.t -> unit + (** Remove the binding to the given tag. *) end (** A module for constant symbols the occur in types. *) module Const : sig - type t = ty_const + type t = ty_cst (** The type of constant symbols the can occur in types *) + val print : Format.formatter -> t -> unit + (** Printer. *) + val hash : t -> int (** A hash function for type constants, should be suitable to create hashtables. *) @@ -1105,17 +533,33 @@ module Ty : sig val arity : t -> int (** Return the arity of the given symbol. *) - val mk : string -> int -> t + val mk : Path.t -> int -> t (** Create a type constant with the given arity. *) - val tag : t -> 'a tag -> 'a -> unit - (** Tag a variable. *) + val get_tag : t -> 'a Tag.t -> 'a option + (** Get the value bound to a tag. *) + + val get_tag_list : t -> 'a list Tag.t -> 'a list + (** Get the list of values bound to a list tag, returning the + empty list if no value is bound. *) + + val get_tag_last : t -> 'a list Tag.t -> 'a option + (** Get the last value bound to a list tag. *) + + val set_tag : t -> 'a Tag.t -> 'a -> unit + (** Set the value bound to the tag. *) - val get_tag : t -> 'a tag -> 'a list - (** Return the list of values associated to the tag. *) + val add_tag : t -> 'a list Tag.t -> 'a -> unit + (** Bind an additional value to a list tag. *) - val get_tag_last : t -> 'a tag -> 'a option - (** Return the last value associated to the tag (if any). *) + val add_tag_opt : t -> 'a list Tag.t -> 'a option -> unit + (** Optionally bind an additional value to a list tag. *) + + val add_tag_list : t -> 'a list Tag.t -> 'a list -> unit + (** Bind a list of additional values to a list tag. *) + + val unset_tag : t -> _ Tag.t -> unit + (** Remove the binding to the given tag. *) val int : t (** The type constant for integers *) @@ -1176,18 +620,18 @@ module Ty : sig val string_reg_lang : t (** The type of regular language over strings. *) - val wildcard : unit -> t - (** Type wildcard *) - - val as_ : t -> Var.t -> t - (** Add a pattern ascription to a type. *) - val of_var : Var.t -> t (** Create a type from a variable. *) val apply : Const.t -> t list -> t (** Application for types. *) + val arrow : t list -> t -> t + (** Create an arrow type (i.e. function type) *) + + val pi : Var.t list -> t -> t + (** Create a prenex/rank-1 polymorphic type. *) + val array : t -> t -> t (** Build an array type from source to destination types. *) @@ -1198,18 +642,47 @@ module Ty : sig (** Floating point of given exponent and significand. *) val roundingMode : t + (** Type for the various Floating point rounding modes. *) - val tag : t -> 'a tag -> 'a -> unit - (** Annotate the given type with the given tag and value. *) + val subst : ?fix:bool -> subst -> t -> t + (** Substitution on types. *) - val get_tag : t -> 'a tag -> 'a list - (** Return the list of value associated to the tag. *) + val fv : t -> Var.t list + (** Returns the list of free variables in the type. *) - val get_tag_last : t -> 'a tag -> 'a option - (** Return the last value associated to the tag (if any). *) + val unify : t -> t -> t option + (** Try and unify two types. *) - val subst : ?fix:bool -> subst -> t -> t - (** Substitution on types. *) + val set_wildcard : ty_var -> t -> unit + (** Instantiate the given wildcard. *) + + val add_wildcard_hook : hook:(ty_var -> ty -> unit) -> ty_var -> unit + (** Tag for hooks called upon the wildcard instantiation. *) + + val get_tag : t -> 'a Tag.t -> 'a option + (** Get the value bound to a tag. *) + + val get_tag_list : t -> 'a list Tag.t -> 'a list + (** Get the list of values bound to a list tag, returning the + empty list if no value is bound. *) + + val get_tag_last : t -> 'a list Tag.t -> 'a option + (** Get the last value bound to a list tag. *) + + val set_tag : t -> 'a Tag.t -> 'a -> unit + (** Set the value bound to the tag. *) + + val add_tag : t -> 'a list Tag.t -> 'a -> unit + (** Bind an additional value to a list tag. *) + + val add_tag_opt : t -> 'a list Tag.t -> 'a option -> unit + (** Optionally bind an additional value to a list tag. *) + + val add_tag_list : t -> 'a list Tag.t -> 'a list -> unit + (** Bind a list of additional values to a list tag. *) + + val unset_tag : t -> _ Tag.t -> unit + (** Remove the binding to the given tag. *) end @@ -1250,12 +723,40 @@ module Term : sig val ty : t -> ty (** Returns the type of a term. *) + val get_tag : t -> 'a Tag.t -> 'a option + (** Get the value bound to a tag. *) + + val get_tag_list : t -> 'a list Tag.t -> 'a list + (** Get the list of values bound to a list tag, returning the + empty list if no value is bound. *) + + val get_tag_last : t -> 'a list Tag.t -> 'a option + (** Get the last value bound to a list tag. *) + + val set_tag : t -> 'a Tag.t -> 'a -> unit + (** Set the value bound to the tag. *) + + val add_tag : t -> 'a list Tag.t -> 'a -> unit + (** Bind an additional value to a list tag. *) + + val add_tag_opt : t -> 'a list Tag.t -> 'a option -> unit + (** Optionally bind an additional value to a list tag. *) + + val add_tag_list : t -> 'a list Tag.t -> 'a list -> unit + (** Bind a list of additional values to a list tag. *) + + val unset_tag : t -> _ Tag.t -> unit + (** Remove the binding to the given tag. *) + (** A module for variables that occur in terms. *) module Var : sig type t = term_var (** The type of variables the can occur in terms *) + val print : Format.formatter -> t -> unit + (** Printer. *) + val hash : t -> int (** A hash function for term variables, should be suitable to create hashtables. *) @@ -1271,22 +772,42 @@ module Term : sig val ty : t -> ty (** Return the type of the variable. *) - val tag : t -> 'a tag -> 'a -> unit - (** Tag a variable. *) + val get_tag : t -> 'a Tag.t -> 'a option + (** Get the value bound to a tag. *) + + val get_tag_list : t -> 'a list Tag.t -> 'a list + (** Get the list of values bound to a list tag, returning the + empty list if no value is bound. *) + + val get_tag_last : t -> 'a list Tag.t -> 'a option + (** Get the last value bound to a list tag. *) + + val set_tag : t -> 'a Tag.t -> 'a -> unit + (** Set the value bound to the tag. *) + + val add_tag : t -> 'a list Tag.t -> 'a -> unit + (** Bind an additional value to a list tag. *) + + val add_tag_opt : t -> 'a list Tag.t -> 'a option -> unit + (** Optionally bind an additional value to a list tag. *) + + val add_tag_list : t -> 'a list Tag.t -> 'a list -> unit + (** Bind a list of additional values to a list tag. *) - val get_tag : t -> 'a tag -> 'a list - (** Return the list of value associated to the tag. *) + val unset_tag : t -> _ Tag.t -> unit + (** Remove the binding to the given tag. *) - val get_tag_last : t -> 'a tag -> 'a option - (** Return the last value associated to the tag (if any). *) end (** A module for constant symbols that occur in terms. *) module Const : sig - type t = term_const + type t = term_cst (** The type of constant symbols that can occur in terms *) + val print : Format.formatter -> t -> unit + (** Printer. *) + val hash : t -> int (** A hash function for term constants, should be suitable to create hashtables. *) @@ -1299,29 +820,52 @@ module Term : sig val arity : t -> int * int (** Returns the arity of a term constant. *) - val mk : string -> ty_var list -> ty list -> ty -> t - (** Create a polymorphic constant symbol. *) + val mk : Path.t -> ty -> t + (** Create a constant symbol. *) + + val get_tag : t -> 'a Tag.t -> 'a option + (** Get the value bound to a tag. *) + + val get_tag_list : t -> 'a list Tag.t -> 'a list + (** Get the list of values bound to a list tag, returning the + empty list if no value is bound. *) + + val get_tag_last : t -> 'a list Tag.t -> 'a option + (** Get the last value bound to a list tag. *) + + val set_tag : t -> 'a Tag.t -> 'a -> unit + (** Set the value bound to the tag. *) + + val add_tag : t -> 'a list Tag.t -> 'a -> unit + (** Bind an additional value to a list tag. *) + + val add_tag_opt : t -> 'a list Tag.t -> 'a option -> unit + (** Optionally bind an additional value to a list tag. *) + + val add_tag_list : t -> 'a list Tag.t -> 'a list -> unit + (** Bind a list of additional values to a list tag. *) - val tag : t -> 'a tag -> 'a -> unit - (** Tag a constant. *) + val unset_tag : t -> _ Tag.t -> unit + (** Remove the binding to the given tag. *) - val get_tag : t -> 'a tag -> 'a list - (** Return the list of values associated to the tag. *) + include Dolmen_intf.Term.Tptp_Thf_Core_Const with type t := t + (** Satisfy the required interface for the typing of tptp's Thf. *) - val get_tag_last : t -> 'a tag -> 'a option - (** Return the last value associated to the tag (if any). *) end (** A module for Algebraic datatype constructors. *) module Cstr : sig - type t = term_const + type t = term_cst (** An algebraic type constructor. Note that such constructors are used to build terms, and not types, e.g. consider the following: [type 'a list = Nil | Cons of 'a * 'a t], then [Nil] and [Cons] are the constructors, while [list] would be a type constant of arity 1 used to name the type. *) + val print : Format.formatter -> t -> unit + (** Printer. *) + val hash : t -> int (** A hash function for adt constructors, should be suitable to create hashtables. *) @@ -1345,23 +889,42 @@ module Term : sig @raise Bad_term_arity if the provided type argument list is not of the correct length *) - val tag : t -> 'a tag -> 'a -> unit - (** Tag a constant. *) + val get_tag : t -> 'a Tag.t -> 'a option + (** Get the value bound to a tag. *) + + val get_tag_list : t -> 'a list Tag.t -> 'a list + (** Get the list of values bound to a list tag, returning the + empty list if no value is bound. *) + + val get_tag_last : t -> 'a list Tag.t -> 'a option + (** Get the last value bound to a list tag. *) + + val set_tag : t -> 'a Tag.t -> 'a -> unit + (** Set the value bound to the tag. *) + + val add_tag : t -> 'a list Tag.t -> 'a -> unit + (** Bind an additional value to a list tag. *) - val get_tag : t -> 'a tag -> 'a list - (** Return the list of values associated to the tag. *) + val add_tag_opt : t -> 'a list Tag.t -> 'a option -> unit + (** Optionally bind an additional value to a list tag. *) - val get_tag_last : t -> 'a tag -> 'a option - (** Return the last value associated to the tag (if any). *) + val add_tag_list : t -> 'a list Tag.t -> 'a list -> unit + (** Bind a list of additional values to a list tag. *) + + val unset_tag : t -> _ Tag.t -> unit + (** Remove the binding to the given tag. *) end (** A module for Record fields. *) module Field : sig - type t = term_const + type t = term_cst (** A record field. *) + val print : Format.formatter -> t -> unit + (** Printer. *) + val hash : t -> int (** A hash function for adt destructors. *) @@ -1371,15 +934,40 @@ module Term : sig val compare : t -> t -> int (** A comparison function on adt constructors. *) + val get_tag : t -> 'a Tag.t -> 'a option + (** Get the value bound to a tag. *) + + val get_tag_list : t -> 'a list Tag.t -> 'a list + (** Get the list of values bound to a list tag, returning the + empty list if no value is bound. *) + + val get_tag_last : t -> 'a list Tag.t -> 'a option + (** Get the last value bound to a list tag. *) + + val set_tag : t -> 'a Tag.t -> 'a -> unit + (** Set the value bound to the tag. *) + + val add_tag : t -> 'a list Tag.t -> 'a -> unit + (** Bind an additional value to a list tag. *) + + val add_tag_opt : t -> 'a list Tag.t -> 'a option -> unit + (** Optionally bind an additional value to a list tag. *) + + val add_tag_list : t -> 'a list Tag.t -> 'a list -> unit + (** Bind a list of additional values to a list tag. *) + + val unset_tag : t -> _ Tag.t -> unit + (** Remove the binding to the given tag. *) + end val define_record : - ty_const -> ty_var list -> (string * ty) list -> Field.t list + ty_const -> ty_var list -> (Path.t * ty) list -> Field.t list (** Define a new record type. *) val define_adt : ty_const -> ty_var list -> - (string * (ty * string option) list) list -> + (Path.t * (ty * Path.t option) list) list -> (Cstr.t * (ty * Const.t option) list) list (** [define_aft t vars cstrs] defines the type constant [t], parametrised over the type variables [ty_vars] as defining an algebraic datatypes with constructors @@ -1404,7 +992,7 @@ module Term : sig This definition defines the usual type of polymorphic linked lists, as well as two destructors "hd" and "tl". "hd" would have type [forall alpha. alpha list -> a], and be the partial function returning the head of the list. - *) + *) exception Wrong_type of t * ty (** Exception raised in case of typing error during term construction. @@ -1426,18 +1014,36 @@ module Term : sig exception Field_missing of Field.t (** Field missing in a record expression. *) - exception Field_expected of term_const + exception Field_expected of term_cst (** A field was expected but the returned term constant is not a record field. *) + exception Constructor_expected of Cstr.t + (** Raised when trying to access the tester of an ADT constructor, but the constant + provided was not a constructor. *) + + exception Over_application of t list + (** Raised when an application was provided too many term arguments. The + extraneous arguments are returned by the exception. *) + + exception Bad_poly_arity of ty_var list * ty list + (** Raised when a polymorphic application does not have an + adequate number of arguments. *) + val ensure : t -> ty -> t (** Ensure a term has the given type. *) val of_var : Var.t -> t (** Create a term from a variable *) - val apply : Const.t -> ty list -> t list -> t + val of_cst : Const.t -> t + (** Create a term from a constant. *) + + val apply : t -> ty list -> t list -> t (** Polymorphic application. *) + val apply_cst : term_cst -> ty list -> t list -> t + (** Polymorphic application of a constructor. *) + val apply_cstr : Cstr.t -> ty list -> t list -> t (** Polymorphic application of a constructor. *) @@ -1478,6 +1084,9 @@ module Term : sig val eqs : t list -> t (** Build equalities with arbitrary arities. *) + val neq : t -> t -> t + (** Disequality *) + val distinct : t list -> t (** Distinct constraints on terms. *) @@ -1502,6 +1111,9 @@ module Term : sig val imply : t -> t -> t (** Implication *) + val implied : t -> t -> t + (** Reverse Implication *) + val equiv : t -> t -> t (** Equivalence *) @@ -1511,18 +1123,17 @@ module Term : sig val store : t -> t -> t -> t (** Array store *) - val all : - ty_var list * Var.t list -> - ty_var list * Var.t list -> - t -> t + val lam : ty_var list * Var.t list -> t -> t + (** Create a local function. + The first pair of arguments are the variables that are free in the resulting + quantified formula, and the second pair are the variables bound. *) + + val all : ty_var list * Var.t list -> t -> t (** Universally quantify the given formula over the type and terms variables. The first pair of arguments are the variables that are free in the resulting quantified formula, and the second pair are the variables bound. *) - val ex : - ty_var list * Var.t list -> - ty_var list * Var.t list -> - t -> t + val ex : ty_var list * Var.t list -> t -> t (** Existencially quantify the given formula over the type and terms variables. The first pair of arguments are the variables that are free in the resulting quantified formula, and the second pair are the variables bound. *) @@ -1533,20 +1144,14 @@ module Term : sig variable with its defining term. *) val letin : (Var.t * t) list -> t -> t - (** Let-binding. Variabels can be bound to either terms or formulas. *) + (** Sequential let-binding. Variables can be bound to either terms or formulas. *) + + val letand : (Var.t * t) list -> t -> t + (** Parrallel let-binding. Variables can be bound to either terms or formulas. *) val ite : t -> t -> t -> t (** [ite condition then_t else_t] creates a conditional branch. *) - val tag : t -> 'a tag -> 'a -> unit - (** Annotate the given formula wiht the tag and value. *) - - val get_tag : t -> 'a tag -> 'a list - (** Return the list of values associated to the tag. *) - - val get_tag_last : t -> 'a tag -> 'a option - (** Return the last value associated to the tag (if any). *) - val fv : t -> ty_var list * Var.t list (** Returns the list of free variables in the formula. *) @@ -1578,9 +1183,12 @@ module Term : sig include Dolmen_intf.Term.Smtlib_Int with type t := t (** Satisfy the required interface for the typing of smtlib integers. *) - include Dolmen_intf.Term.Tptp_Arith_Common with type t := t + include Dolmen_intf.Term.Tptp_Tff_Arith_Common with type t := t (** Satisfy the common interface for TPTP's arithmetic on integers. *) + include Dolmen_intf.Term.Ae_Arith_Common with type t := t + (** Satisfy the common interface for Alt-Ergo's arithmetic types. *) + val div : t -> t -> t (** Euclidian division quotient *) @@ -1604,7 +1212,7 @@ module Term : sig (** Rational operations *) module Rat : sig - include Dolmen_intf.Term.Tptp_Arith_Common with type t := t + include Dolmen_intf.Term.Tptp_Tff_Arith_Common with type t := t (** Satisfy the common interface for TPTP's arithmetic over Rationals *) val div : t -> t -> t @@ -1617,12 +1225,18 @@ module Term : sig include Dolmen_intf.Term.Smtlib_Real with type t := t (** Satisfy the required interface for the typing of smtlib's reals *) - include Dolmen_intf.Term.Tptp_Arith_Common with type t := t + include Dolmen_intf.Term.Tptp_Tff_Arith_Common with type t := t (** Satisfy the common interface for TPTP's arithmetic over reals *) include Dolmen_intf.Term.Smtlib_Float_Real with type t := t (** Satisfy the real part of the SMTLIB's Float requirements *) + include Dolmen_intf.Term.Ae_Arith_Common with type t := t + (** Satisfy the common interface for Alt-Ergo's arithmetic types. *) + + val floor_to_int : t -> t + (** Greatest integer smaller than the given real *) + end (** String operations *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/id.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/id.ml index d8ebf9bf03eb242ecaf85baa8160f7badce32f21..728672e1b0a1b2bee11f8a1b6fa28648c99c2547 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/id.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/id.ml @@ -1,88 +1,134 @@ (* This file is free software, part of dolmen. See file "LICENSE" for more information. *) -type value = - | Integer - | Rational - | Real - | Binary - | Hexadecimal - | Bitvector - | String - -type namespace = - | Var - | Sort - | Term - | Attr - | Decl - | Track - | Module of string - | Value of value +(* Types *) +(* ************************************************************************* *) + +type namespace = Namespace.t type t = { + name : Name.t; ns : namespace; - name : string; } -let hash = Hashtbl.hash -let compare = Stdlib.compare -let equal = Stdlib.(=) -(* Name&Printing *) +(* Std functions *) +(* ************************************************************************* *) + +let hash { ns; name; } = + Misc.hash2 (Namespace.hash ns) (Name.hash name) + +let compare { ns; name; } { ns = ns'; name = name'; } = + let (<?>) = Misc.(<?>) in + Namespace.compare ns ns' + <?> (Name.compare, name, name') + +let equal id id' = + id == id' || compare id id' = 0 + +let print fmt { name; ns = _; } = + Name.print fmt name + + +(* Namespaces *) +(* ************************************************************************* *) + +let var = Namespace.var +let sort = Namespace.sort +let term = Namespace.term +let attr = Namespace.attr +let decl = Namespace.decl +let track = Namespace.track + + +(* Inspection & Creation *) +(* ************************************************************************* *) -let split { name; _ } = - Misc.split_on_char '\000' name +let ns { ns; _ } = ns +let name { name; _ } = name -let to_string ({ name; _} as id) = - match split id with - | [s] -> s - | l -> - let b = Buffer.create (String.length name + List.length l + 3) in - Buffer.add_string b "(_"; - List.iter (fun s -> Buffer.add_char b ' '; Buffer.add_string b s) l; - Buffer.add_char b ')'; - Buffer.contents b +let create ns name = { ns; name; } -let pp b id = - Printf.bprintf b "%s" (to_string id) +let mk ns s = + let name = Name.simple s in + create ns name -let print fmt id = - Format.fprintf fmt "%s" (to_string id) +let indexed ns basename indexes = + let name = Name.indexed basename indexes in + create ns name -let full_name = function - | { ns = Module m; _ } as id -> - Printf.sprintf "%s.%s" m (to_string id) - | id -> - to_string id +let qualified ns path name = + let name = Name.qualified path name in + create ns name (* Tracked hashtbl *) +(* ************************************************************************* *) + let trackers = Hashtbl.create 13 let trackeds = Hashtbl.create 13 -(* Namespaces *) -let var = Var -let sort = Sort -let term = Term -let attr = Attr -let decl = Decl -let track = Track -let mod_name s = Module s - -(* Identifiers *) -let mk ns name = { ns; name; } - -let tracked ~track ns name = - let id = mk ns name in + +let tracked ~track ns path = + let id = mk ns path in Hashtbl.add trackers track id; Hashtbl.add trackeds id track; id + (* Standard attributes *) +(* ************************************************************************* *) + let ac_symbol = mk Attr "ac" let case_split = mk Decl "case_split" let theory_decl = mk Decl "theory" let rwrt_rule = mk Decl "rewrite_rule" let tptp_role = mk Decl "tptp_role" +let tptp_kind = mk Decl "tptp_kind" + + +(* Maps *) +(* ************************************************************************* *) + +module Map = struct + + type 'a t = 'a Name.Map.t Namespace.Map.t + + let empty = Namespace.Map.empty + + let find_exn k t = + Name.Map.find_exn k.name (Namespace.Map.find_exn k.ns t) + + let find_opt k t = + match Namespace.Map.find_opt k.ns t with + | None -> None + | Some map -> Name.Map.find_opt k.name map + + let add k v t = + Namespace.Map.find_add k.ns (function + | None -> Name.Map.add k.name v Name.Map.empty + | Some map -> Name.Map.add k.name v map + ) t + + let find_add k f t = + Namespace.Map.find_add k.ns (function + | None -> Name.Map.find_add k.name f Name.Map.empty + | Some map -> Name.Map.find_add k.name f map + ) t + + let iter f t = + Namespace.Map.iter (fun ns map -> + Name.Map.iter (fun name v -> + f { ns; name; } v + ) map + ) t + + let fold f t acc = + Namespace.Map.fold (fun ns map acc -> + Name.Map.fold (fun name v acc -> + f { ns; name; } v acc + ) map acc + ) t acc + +end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/id.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/id.mli index c6d3d083bcf37ffb6201b3b1b766bfef0826eced..7e0c78b836c97dff69ad32cc90d0659ab689ef77 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/id.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/id.mli @@ -5,50 +5,11 @@ (** {2 Type definitions} *) -type value = - | Integer - (** Integers (in base 10 notation), e.g. ["123456789"] *) - | Rational - (** Rational (using quotient notation with '/'), e.g. ["123/456"] *) - | Real - (** Real (using decimal floating point notation with exponent), - e.g. ["123.456e789"] *) - | Binary - (** Bitvector in binary notation, e.g. ["0b011010111010"] *) - | Hexadecimal - (** Bitvector in hexadecimal notation, e.g. ["0x9a23e5f"] *) - | Bitvector - (** Bitvector litteral. *) - | String - (** String litterals. *) -(** Types of lexical values typically encountered. *) - -type namespace = - | Var - (** Namespace for variables. Not all variables are necessarily in - this namespace, but ids in this namespace must be variables. *) - | Sort - (** Namepsace for sorts and types (only used in smtlib) *) - | Term - (** Most used namespace, used for terms in general (and types outside smtlib). *) - | Attr - (** Namespace for attributes (also called annotations). *) - | Decl - (** Namespace used for naming declarations/definitions/statements... *) - | Track - (** Namespace used to track specific values throughout some files. *) - | Module of string - (** Namespaces defined by modules (used for instance in dedukti). *) - | Value of value - (** The identifier is a value, encoded in a string. Examples include - arithmetic constants (e.g. ["123456", "123/456", "123.456e789"]), - bitvectors (i.e. binary notation), etc... *) -(** Namespaces, used to record the lexical scop in which an identifier - was parsed. *) +type namespace = Namespace.t type t = { - ns : namespace; - name : string; + name : Name.t; + ns : Namespace.t; } (** The type of identifiers, basically a name together with a namespace. *) @@ -65,23 +26,23 @@ val equal : t -> t -> bool val compare : t -> t -> int (** Usual functions for hashing, comparisons, equality. *) -(** {2 Additional functions} *) +val print : Format.formatter -> t -> unit +(** Printing functions. *) -val mk : namespace -> string -> t -(** Create an identifier. *) +module Map : Maps.S with type key := t +(** Maps for ids *) -val full_name : t -> string -(** Returns a full name for the identifier. - NOTE: full names may not be unique and therefore not - suitable for comparison of identifiers. *) -val pp : Buffer.t -> t -> unit -val print : Format.formatter -> t -> unit -(** Printing functions. *) +(** {2 Additional functions} *) + +val create : namespace -> Name.t -> t +(** Create an identifier. *) -val split : t -> string list -(** Split an id into a list of strings (used notably for SMTLIb). *) +val ns : t -> namespace +(** Accessor for the id's namespace. *) +val name : t -> Name.t +(** Accessor for the id's name. *) (** {2 Standard attributes} *) @@ -103,4 +64,7 @@ val tptp_role : t (** The tagged statement has a tptp role. Should be used as a function symbol applied to the actual tptp role. *) +val tptp_kind : t +(** The tagged statement is of the given kind (e.g. tff, thf, ...). + Should be used as a function symbol applied to the actual tptp kind. *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/loc.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/loc.ml index a91c28cc742fa3fc647e02bb26d2d4d0e85fd75a..714592e1f7c2606533e7d138bb842e5419c4d975 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/loc.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/loc.ml @@ -18,6 +18,7 @@ module type S = Dolmen_intf.Location.S starts. *) type file = { name : string; + mutable max_size : int; mutable table : int Vec.t; } @@ -47,14 +48,20 @@ type loc = { file : string; start_line : int; start_column : int; + start_line_offset : int; stop_line : int; stop_column : int; + stop_line_offset : int; + max_line_length : int; } -exception Uncaught of t * exn +exception Uncaught of t * exn * Printexc.raw_backtrace exception Lexing_error of t * string -exception Syntax_error of t * Msg.t +exception Syntax_error of t * [ + | `Regular of Msg.t + | `Advanced of Msg.t * Msg.t * Msg.t + ] (** Exceptions that may occur during parsing *) (* Compact locations *) @@ -87,31 +94,26 @@ let mk_compact offset length = (* File table *) (* ************************************************************************* *) -let tables = Hashtbl.create 5 - let file_name { name; _ } = name -let reset_files () = Hashtbl.reset tables - let mk_file name = - try Hashtbl.find tables name - with Not_found -> - let table = Vec.create () in - let () = Vec.push table (-1) in - let file = { name; table; } in - Hashtbl.add tables name file; - file + let table = Vec.create () in + let () = Vec.push table (-1) in + { name; table; max_size = 0; } let new_line file offset = assert (Vec.last file.table < offset); - Vec.push file.table (offset - 1) + Vec.push file.table (offset - 1); + file.max_size <- offset + +let newline file lexbuf = + Lexing.new_line lexbuf; + let offset = Lexing.lexeme_end lexbuf in + new_line file offset -let newline filename = - let file = mk_file filename in - (fun lexbuf -> - Lexing.new_line lexbuf; - let offset = Lexing.lexeme_end lexbuf in - new_line file offset) +let update_size file lexbuf = + let offset = Lexing.lexeme_end lexbuf in + file.max_size <- offset let find_line file offset = let rec aux vec offset start stop = @@ -131,6 +133,21 @@ let find_line file offset = let line_offset = Vec.get file.table (line - 1) in line_offset, line +let line_length file line = + let line_offset = Vec.get file.table (line - 1) in + let next_line_offset = + try Vec.get file.table line + with Invalid_argument _ -> file.max_size + in + next_line_offset - line_offset + +let max_line_length file start_line stop_line = + let res = ref 0 in + for line = start_line to stop_line do + res := max !res (line_length file line) + done; + !res + (* Full locations *) (* ************************************************************************* *) @@ -139,10 +156,21 @@ let eq a b = a = b let hash a = Hashtbl.hash a (* Constructor functions *) -let mk file start_line start_column stop_line stop_column = - { file; start_line; start_column; stop_line; stop_column; } - -let no_loc = mk_compact 0 0 +let mk file + ~start_line ~start_column ~start_line_offset + ~stop_line ~stop_column ~stop_line_offset + ~max_line_length = + { file; max_line_length; + start_line; start_column; start_line_offset; + stop_line; stop_column; stop_line_offset; } + +let no_loc : t = + mk_compact 0 0 + +let dummy : loc = + mk "" ~max_line_length:0 + ~start_line:0 ~start_column:0 ~start_line_offset:0 + ~stop_line:0 ~stop_column:0 ~stop_line_offset:0 let mk_pos start stop = let open Lexing in @@ -161,20 +189,41 @@ let of_lexbuf lexbuf = let stop = Lexing.lexeme_end_p lexbuf in mk_pos start stop +let lexing_positions (loc : loc) = + let start = Lexing.{ + pos_fname = loc.file; + pos_lnum = loc.start_line; + pos_bol = loc.start_line_offset; + pos_cnum = loc.start_column + loc.start_line_offset; + } in + let stop = Lexing.{ + pos_fname = loc.file; + pos_lnum = loc.stop_line; + pos_bol = loc.stop_line_offset; + pos_cnum = loc.stop_column + loc.stop_line_offset; + } in + start, stop + + (* Compact<->full translations *) (* ************************************************************************* *) -let loc file c = +let loc file c : loc = let start_offset, length = split_compact c in if length = 0 then - mk file.name 0 0 0 0 + mk file.name ~max_line_length:0 + ~start_line:0 ~start_column:0 ~start_line_offset:0 + ~stop_line:0 ~stop_column:0 ~stop_line_offset:0 else begin let stop_offset = start_offset + length in let start_line_offset, start_line = find_line file start_offset in let start_column = start_offset - start_line_offset - 1 in let stop_line_offset, stop_line = find_line file stop_offset in let stop_column = stop_offset - stop_line_offset - 1 in - mk file.name start_line start_column stop_line stop_column + let max_line_length = max_line_length file start_line stop_line in + mk file.name ~max_line_length + ~start_line ~start_column ~start_line_offset:(start_line_offset + 1) + ~stop_line ~stop_column ~stop_line_offset:(stop_line_offset + 1) end let full_loc { file; loc = l; } = loc file l diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/loc.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/loc.mli index 21c4b1e48cdcb9fbdb48f78df290d3260090afde..a590799e3aa7de2ed7cbf5f36e11ac99e87fd74d 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/loc.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/loc.mli @@ -5,12 +5,15 @@ (** {2 Interface definition} *) -type loc = { +type loc = private { file : string; start_line : int; start_column : int; + start_line_offset : int; stop_line : int; stop_column : int; + stop_line_offset : int; + max_line_length : int; } (** A full location, including file, start position and end position. Dummy positions (i.e. with [start_line = stop_line] and @@ -36,7 +39,7 @@ module type S = Dolmen_intf.Location.S (** An anstract module type for providing locations. Used as argumentby much of the functors provided in Dolmen. *) -include S with type t := t +include S with type t := t and type file := file (** This module implements the signature {S}. *) val hash : t -> int @@ -46,22 +49,23 @@ val eq : t -> t -> bool (** Location equality. *) val no_loc : t -(** An dummy location pointing at the first byte of a file. *) +(** A dummy location pointing at the first byte of a file. *) + +val dummy : loc +(** A dummy location pointing at the first byte of a file. *) val is_dummy : loc -> bool -(** Is the location ana ctual location, or a dummy one ? *) +(** Is the location an actual location, or a dummy one ? *) (** {2 Compact location handling} *) val mk_file : string -> file -(** Return the meta-data associated to a file. *) +(** Create a new set of meta-data for the given filename. *) val new_line : file -> int -> unit (** Register a new line whose first char is at the given offset. *) -val reset_files : unit -> unit -(** Reset the meta-data associated to a file *) (** {2 Compact<->full translations} *) @@ -72,6 +76,8 @@ val full_loc : full -> loc val compact : loc -> file * t (** Compactify a full location into a compact representation. *) +val lexing_positions : loc -> Lexing.position * Lexing.position +(** Reutnr the pari of lexing positions corresponding to a location. *) (** {2 Printing locations} *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/maps.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/maps.ml new file mode 100644 index 0000000000000000000000000000000000000000..644b77a3c689faa6033a285d148adefd3f0a76ca --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/maps.ml @@ -0,0 +1,23 @@ + +module type S = Dolmen_intf.Map.S + +module Make(Ord : Map.OrderedType) = struct + + include Map.Make(Ord) + + let find_exn = find + + let find_opt k t = + match find k t with + | res -> Some res + | exception Not_found -> None + + let find_add k f t = + update k (fun o -> Some (f o)) t + +end + +module Int = Make(Int) + +module String = Maps_string + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/maps.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/maps.mli new file mode 100644 index 0000000000000000000000000000000000000000..db622017b8563098735056be8d72d0a2cba2cf5d --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/maps.mli @@ -0,0 +1,11 @@ + +module type S = Dolmen_intf.Map.S + +module Int : S with type key := int + +module String : S with type key := string + +module Make(Ord : Map.OrderedType) : S with type key := Ord.t + + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/maps_string.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/maps_string.ml new file mode 100644 index 0000000000000000000000000000000000000000..626ee92b54946a95b25820aca505b53e1443c5f8 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/maps_string.ml @@ -0,0 +1,496 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +(* PARTS: Persistent Adaptive Radix Tree for Strings + + This implementation is heavily inspired by the following paper: + + The Adaptive Radix Tree: ARTful Indexing for Main-Memory Databases + by Viktor Leis, Alfons Kemper, Thomas Neumann + https://db.in.tum.de/~leis/papers/ART.pdf + +*) + +(* Some constants *) +(* ************************************************************************* *) + +let dichotomy_threshold = 16 +let indirect_threshold = 48 + +let () = + assert (indirect_threshold - dichotomy_threshold > 2); + assert (indirect_threshold < 255); + () + + +(* Type definitions *) +(* ************************************************************************* *) + +type 'a t = + | Leaf of { + value : 'a option; + } + | Prefix of { + value : 'a option; + prefix : string; + child : 'a t; + } + | Dichotomy of { + value : 'a option; + keys : string; + children : 'a t array; + } + | Indirect of { + value : 'a option; + keys : string; + children : 'a t array; + } + | Direct of { + value : 'a option; + children : 'a t array; + } + +(* Find *) +(* ************************************************************************* *) + +let rec find_aux key len offset t = + if offset = len then + match t with + | Leaf { value; } + | Prefix { value; _ } + | Dichotomy { value; _ } + | Indirect { value; _ } + | Direct { value; _ } -> value + else begin + assert (offset < len); + match t with + | Leaf _ -> None + | Prefix { prefix; child; value = _; } -> + let n = String.length prefix in + if offset + n > len then None + else begin + let i = ref 0 in + while !i < n && offset + !i < len && + String.unsafe_get prefix !i = + String.unsafe_get key (offset + !i) do + i := !i + 1 + done; + if !i = n + then find_aux key len (offset + !i) child + else None + end + | Dichotomy { keys; children; value = _; } -> + let c = String.unsafe_get key offset in + let n = Array.length children in + let l = ref 0 in + let r = ref n in + while !l < !r do + let m = (!l + !r) / 2 in + if String.unsafe_get keys m < c + then l := m + 1 + else r := m + done; + let m = !l in + if m < n && String.unsafe_get keys m = c + then find_aux key len (offset + 1) (Array.unsafe_get children m) + else None + | Indirect { keys; children; value = _; } -> + let i = Char.code (String.unsafe_get key offset) in + let j = Char.code (String.unsafe_get keys i) in + if j > indirect_threshold then None + else find_aux key len (offset + 1) (Array.unsafe_get children j) + | Direct { children; value = _; } -> + let i = Char.code (String.unsafe_get key offset) in + find_aux key len (offset + 1) (Array.unsafe_get children i) + end + + +(* Creation/Insertion *) +(* ************************************************************************* *) + +let[@inline] array_insert a m x = + let n = Array.length a in + let a' = Array.make (n + 1) x in + Array.blit a 0 a' 0 m; + Array.blit a m a' (m + 1) (n - m); + a' + +let[@inline] string_insert s m c = + let n = String.length s in + let b = Bytes.make (n + 1) c in + Bytes.blit_string s 0 b 0 m; + Bytes.blit_string s m b (m + 1) (n - m); + Bytes.unsafe_to_string b + +let empty = Leaf { value = None; } + +let[@inline] with_value value = function + | Leaf _ -> Leaf { value; } + | Prefix { value = _; prefix; child; } -> Prefix { value; prefix; child; } + | Dichotomy { value = _; keys; children; } -> Dichotomy { value; keys; children; } + | Indirect { value = _; keys; children } -> Indirect { value; keys; children; } + | Direct { value = _; children; } -> Direct { value; children; } + +let[@inline] mk_prefix value prefix child = + match prefix, value, child with + | "", None, _ -> child + | "", (Some _), _ -> with_value value child + | _, _, Prefix { prefix = p'; child; value = None; } -> + let prefix = prefix ^ p' in + Prefix { prefix; child; value; } + | _ -> Prefix { prefix; child; value; } + +let sub_after s off = + if off = String.length s then "" + else String.sub s off (String.length s - off) + +let rec add_aux key v len offset t = + if offset = len then + with_value (Some v) t + else begin + assert (offset < len); + match t with + | Leaf { value; } -> + let prefix = String.sub key offset (len - offset) in + let child = Leaf { value = Some v; } in + mk_prefix value prefix child + + | Prefix { prefix; child; value; } -> + let i = ref 0 in + let n = String.length prefix in + while !i < n && offset + !i < len && + prefix.[!i] = key.[offset + !i] do + i := !i + 1 + done; + if !i = n then begin + let child = add_aux key v len (offset + !i) child in + mk_prefix value prefix child + end else if offset + !i = len then begin + let pre = String.sub prefix 0 !i in + let post = String.sub prefix !i (n - !i) in + mk_prefix value pre (mk_prefix (Some v) post child) + end else begin + let pre = String.sub prefix 0 !i in + let post = sub_after prefix (!i + 1) in + let post_key = sub_after key (offset + !i + 1) in + let c0 = String.unsafe_get prefix !i in + let child0 = mk_prefix None post child in + let c1 = String.unsafe_get key (offset + !i) in + let child1 = mk_prefix None post_key (Leaf { value = Some v; }) in + let c0, child0, c1, child1 = + if c0 <= c1 then c0, child0, c1, child1 else c1, child1, c0, child0 + in + let keys = Bytes.make 2 c1 in + Bytes.unsafe_set keys 0 c0; + let keys = Bytes.unsafe_to_string keys in + let children = [| child0; child1; |] in + let node = Dichotomy { value = None; keys; children; } in + mk_prefix value pre node + end + + | Dichotomy { keys; children; value; } -> + let c = key.[offset] in + let n = Array.length children in + let l = ref 0 in + let r = ref n in + while !l < !r do + let m = (!l + !r) / 2 in + if keys.[m] < c + then l := m + 1 + else r := m + done; + let m = !l in + if m < n && keys.[m] = c then begin + let child = add_aux key v len (offset + 1) children.(m) in + let children = Array.copy children in + children.(m) <- child; + Dichotomy { keys; children; value; } + end else begin + let key_post = sub_after key (offset + 1) in + let child = mk_prefix None key_post (Leaf { value = Some v; }) in + if n < dichotomy_threshold then begin + let children = array_insert children m child in + let keys = string_insert keys m c in + Dichotomy { keys; children; value; } + end else begin + let children = Array.append children [| child |] in + let b = Bytes.make 256 (Char.unsafe_chr 255) in + Bytes.unsafe_set b (Char.code c) (Char.unsafe_chr n); + for i = 0 to n - 1 do + let c = String.unsafe_get keys i in + Bytes.unsafe_set b (Char.code c) (Char.unsafe_chr i) + done; + let keys = Bytes.unsafe_to_string b in + Indirect { keys; children; value; } + end + end + + | Indirect { keys; children; value; } -> + let i = Char.code (String.unsafe_get key offset) in + let j = Char.code (String.unsafe_get keys i) in + if j < 255 then begin + let child = add_aux key v len (offset + 1) (Array.unsafe_get children j) in + let children = Array.copy children in + children.(j) <- child; + let b = Bytes.of_string keys in + Bytes.unsafe_set b i (Char.unsafe_chr j); + let keys = Bytes.unsafe_to_string b in + Indirect { keys; children; value; } + end else begin + let key_post = sub_after key (offset + 1) in + let child = mk_prefix None key_post (Leaf { value = Some v; }) in + let n = Array.length children in + if n < indirect_threshold then begin + let children = Array.append children [| child |] in + let b = Bytes.of_string keys in + Bytes.unsafe_set b i (Char.unsafe_chr n); + let keys = Bytes.unsafe_to_string b in + Indirect { keys; children; value; } + end else begin + let new_children = Array.make 256 empty in + Array.unsafe_set new_children i child; + for c = 0 to 255 do + let k = Char.code (String.unsafe_get keys c) in + if k < 255 then + Array.unsafe_set new_children c (Array.unsafe_get children k) + done; + Direct { value; children = new_children; } + end + end + + | Direct { children; value; } -> + let i = Char.code (String.unsafe_get key offset) in + let child = add_aux key v len (offset + 1) (Array.unsafe_get children i) in + let children = Array.copy children in + Array.unsafe_set children i child; + Direct { children; value; } + + end + + +(* Updating *) +(* ************************************************************************* *) + +let rec find_add_aux f key len offset t = + if offset = len then + match t with + | Leaf { value; _ } + | Prefix { value; _ } + | Dichotomy { value; _ } + | Indirect { value; _ } + | Direct { value; _ } -> + with_value (Some (f value)) t + else begin + assert (offset < len); + match t with + | Leaf { value; } -> + let v = f None in + let prefix = String.sub key offset (len - offset) in + let child = Leaf { value = Some v; } in + mk_prefix value prefix child + + | Prefix { prefix; child; value; } -> + let i = ref 0 in + let n = String.length prefix in + while !i < n && offset + !i < len && + prefix.[!i] = key.[offset + !i] do + i := !i + 1 + done; + if !i = n then begin + let child = find_add_aux f key len (offset + !i) child in + mk_prefix value prefix child + end else if offset + !i = len then begin + let v = f None in + let pre = String.sub prefix 0 !i in + let post = String.sub prefix !i (n - !i) in + mk_prefix value pre (mk_prefix (Some v) post child) + end else begin + let v = f None in + let pre = String.sub prefix 0 !i in + let post = sub_after prefix (!i + 1) in + let post_key = sub_after key (offset + !i + 1) in + let c0 = String.unsafe_get prefix !i in + let child0 = mk_prefix None post child in + let c1 = String.unsafe_get key (offset + !i) in + let child1 = mk_prefix None post_key (Leaf { value = Some v; }) in + let c0, child0, c1, child1 = + if c0 <= c1 then c0, child0, c1, child1 else c1, child1, c0, child0 + in + let keys = Bytes.make 2 c1 in + Bytes.unsafe_set keys 0 c0; + let keys = Bytes.unsafe_to_string keys in + let children = [| child0; child1; |] in + let node = Dichotomy { value = None; keys; children; } in + mk_prefix value pre node + end + + | Dichotomy { keys; children; value; } -> + let c = key.[offset] in + let n = Array.length children in + let l = ref 0 in + let r = ref n in + while !l < !r do + let m = (!l + !r) / 2 in + if keys.[m] < c + then l := m + 1 + else r := m + done; + let m = !l in + if m < n && keys.[m] = c then begin + let child = find_add_aux f key len (offset + 1) children.(m) in + let children = Array.copy children in + children.(m) <- child; + Dichotomy { keys; children; value; } + end else begin + let v = f None in + let key_post = sub_after key (offset + 1) in + let child = mk_prefix None key_post (Leaf { value = Some v; }) in + if n < dichotomy_threshold then begin + let children = array_insert children m child in + let keys = string_insert keys m c in + Dichotomy { keys; children; value; } + end else begin + let children = Array.append children [| child |] in + let b = Bytes.make 256 (Char.unsafe_chr 255) in + Bytes.unsafe_set b (Char.code c) (Char.unsafe_chr n); + for i = 0 to n - 1 do + let c = String.unsafe_get keys i in + Bytes.unsafe_set b (Char.code c) (Char.unsafe_chr i) + done; + let keys = Bytes.unsafe_to_string b in + Indirect { keys; children; value; } + end + end + + | Indirect { keys; children; value; } -> + let i = Char.code (String.unsafe_get key offset) in + let j = Char.code (String.unsafe_get keys i) in + if j < 255 then begin + let child = find_add_aux f key len (offset + 1) (Array.unsafe_get children j) in + let children = Array.copy children in + children.(j) <- child; + let b = Bytes.of_string keys in + Bytes.unsafe_set b i (Char.unsafe_chr j); + let keys = Bytes.unsafe_to_string b in + Indirect { keys; children; value; } + end else begin + let v = f None in + let key_post = sub_after key (offset + 1) in + let child = mk_prefix None key_post (Leaf { value = Some v; }) in + let n = Array.length children in + if n < indirect_threshold then begin + let children = Array.append children [| child |] in + let b = Bytes.of_string keys in + Bytes.unsafe_set b i (Char.unsafe_chr n); + let keys = Bytes.unsafe_to_string b in + Indirect { keys; children; value; } + end else begin + let new_children = Array.make 256 empty in + Array.unsafe_set new_children i child; + for c = 0 to 255 do + let k = Char.code (String.unsafe_get keys c) in + if k < 255 then + Array.unsafe_set new_children c (Array.unsafe_get children k) + done; + Direct { value; children = new_children; } + end + end + + | Direct { children; value; } -> + let i = Char.code (String.unsafe_get key offset) in + let child = find_add_aux f key len (offset + 1) (Array.unsafe_get children i) in + let children = Array.copy children in + Array.unsafe_set children i child; + Direct { children; value; } + + end + + +(* Iteration *) +(* ************************************************************************* *) + +let rec sum_lengths acc = function + | [] -> acc + | s :: r -> sum_lengths (String.length s + acc) r + +let rec rev_unsafe_blits b pos = function + | [] -> () + | s :: r -> + let n = String.length s in + Bytes.blit_string s 0 b (pos - n) n; + rev_unsafe_blits b (pos - n) r + +let rev_concat = function + | [] -> "" + | l -> + let n = sum_lengths 0 l in + let b = Bytes.create n in + rev_unsafe_blits b n l; + Bytes.unsafe_to_string b + +let iter_apply f acc = function + | None -> () + | Some v -> + let s = rev_concat acc in + f s v + +let rec iter_aux f acc t = + match t with + | Leaf { value; } -> + iter_apply f acc value + | Prefix { value; prefix; child; } -> + iter_apply f acc value; + iter_aux f (prefix :: acc) child + | Dichotomy { value; keys; children; } -> + iter_apply f acc value; + String.iteri (fun i c -> + let s = String.make 1 c in + let child = children.(i) in + iter_aux f (s :: acc) child + ) keys + | Indirect { value; keys; children; } -> + iter_apply f acc value; + String.iter (fun c -> + let i = Char.code c in + if i < 255 then begin + let s = String.make 1 c in + let child = children.(i) in + iter_aux f (s :: acc) child + end) keys + | Direct { value; children; } -> + iter_apply f acc value; + Array.iteri (fun i child -> + let c = Char.chr i in + let s = String.make 1 c in + iter_aux f (s :: acc) child + ) children + + +(* Exported interface *) +(* ************************************************************************* *) + +let empty = empty + +let find_opt k t = + find_aux k (String.length k) 0 t + +let find_exn k t = + match find_opt k t with + | None -> raise Not_found + | Some res -> res + +let add k v t = + add_aux k v (String.length k) 0 t + +let find_add k f t = + find_add_aux f k (String.length k) 0 t + +let iter f t = iter_aux f [] t + +let fold f t acc = + let r = ref acc in + iter_aux (fun s v -> r := f s v !r) [] t; + !r + + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/maps_string.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/maps_string.mli new file mode 100644 index 0000000000000000000000000000000000000000..ba070deca9ad2e1a14d547b388cdf508d2d4f337 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/maps_string.mli @@ -0,0 +1,6 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +include Dolmen_intf.Map.S with type key := string + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/misc.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/misc.ml index 70a7c2c757f9e01d60b8d65f225c9298b143a132..af823fc096367df487c37c6e1e96c13c5704815c 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/misc.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/misc.ml @@ -1,6 +1,9 @@ (* This file is free software, part of dolmen. See file "LICENSE" for more information. *) +(* Extensions *) +(* ************************************************************************* *) + let get_extension s = let rec aux s i = if i <= 0 then "" @@ -29,6 +32,106 @@ let split_on_char sep s = done; String.sub s 0 !j :: !r + +(* Hashs *) +(* ************************************************************************* *) +(* Taken from containres's CCHash, cf + https://github.com/c-cube/ocaml-containers/blob/master/src/core/CCHash.ml *) + +let fnv_offset_basis = 0xcbf29ce484222325L +let fnv_prime = 0x100000001b3L + +(* hash an integer *) +let hash_int n = + let h = ref fnv_offset_basis in + for k = 0 to 7 do + h := Int64.(mul !h fnv_prime); + h := Int64.(logxor !h (of_int ((n lsr (k * 8)) land 0xff))); + done; + (Int64.to_int !h) land max_int (* truncate back to int and remove sign *) + +let hash_string (x:string) = + let h = ref fnv_offset_basis in + for i = 0 to String.length x - 1 do + h := Int64.(mul !h fnv_prime); + let byte = Char.code (String.unsafe_get x i) in + h := Int64.(logxor !h (of_int byte)); + done; + Int64.to_int !h land max_int + +let hash2 a b = + let h = ref fnv_offset_basis in + (* we only do one loop, where we mix bytes of [a] and [b], so as + to simplify control flow *) + for k = 0 to 7 do + h := Int64.(mul !h fnv_prime); + h := Int64.(logxor !h (of_int ((a lsr (k * 8)) land 0xff))); + h := Int64.(mul !h fnv_prime); + h := Int64.(logxor !h (of_int ((b lsr (k * 8)) land 0xff))); + done; + Int64.to_int !h land max_int + +let hash3 a b c = + let h = ref fnv_offset_basis in + (* we only do one loop, where we mix bytes of [a] [b] and [c], so as + to simplify control flow *) + for k = 0 to 7 do + h := Int64.(mul !h fnv_prime); + h := Int64.(logxor !h (of_int ((a lsr (k * 8)) land 0xff))); + h := Int64.(mul !h fnv_prime); + h := Int64.(logxor !h (of_int ((b lsr (k * 8)) land 0xff))); + h := Int64.(mul !h fnv_prime); + h := Int64.(logxor !h (of_int ((c lsr (k * 8)) land 0xff))); + done; + Int64.to_int !h land max_int + +let hash4 a b c d = + let h = ref fnv_offset_basis in + for k = 0 to 7 do + h := Int64.(mul !h fnv_prime); + h := Int64.(logxor !h (of_int ((a lsr (k * 8)) land 0xff))); + h := Int64.(mul !h fnv_prime); + h := Int64.(logxor !h (of_int ((b lsr (k * 8)) land 0xff))); + h := Int64.(mul !h fnv_prime); + h := Int64.(logxor !h (of_int ((c lsr (k * 8)) land 0xff))); + h := Int64.(mul !h fnv_prime); + h := Int64.(logxor !h (of_int ((d lsr (k * 8)) land 0xff))); + done; + Int64.to_int !h land max_int + +let[@inline] hash_combine f s x = hash2 s (f x) + +let hash_list f l = List.fold_left (hash_combine f) 0x42 l + + +(* Comparisons *) +(* ************************************************************************* *) + +(* Useful shorthand for chaining comparisons *) +let (<?>) i (cmp, x, y) = + match i with + | 0 -> cmp x y + | _ -> i + +(* lexicographic comparison *) +let lexicographic cmp l l' = + let rec aux l l' = + match l, l' with + | [], [] -> 0 + | _ :: _, [] -> 1 + | [], _ :: _ -> -1 + | x :: r, x' :: r' -> + begin match cmp x x' with + | 0 -> aux r r' + | res -> res + end + in + aux l l' + + +(* Options *) +(* ************************************************************************* *) + let opt_map o f = match o with | None -> None @@ -39,9 +142,6 @@ let opt_bind o f = | None -> None | Some x -> f x - -(* Option printing *) - let pp_opt ?(none="<none>") pp b = function | Some t -> pp b t | None -> Printf.bprintf b "%s" none @@ -51,7 +151,8 @@ let print_opt ?(none="<none>") print fmt = function | None -> Format.fprintf fmt "%s" none -(* List printing functions *) +(* Lists *) +(* ************************************************************************* *) let rec pp_list ~pp_sep ~sep ~pp b = function | [] -> () @@ -70,7 +171,9 @@ let rec print_list ~print_sep ~sep ~print fmt = function (print_list ~print_sep ~sep ~print) r -(* Operations on Lexing.lexbuf *) +(* Lexbufs *) +(* ************************************************************************* *) + let set_file buf filename = let open Lexing in buf.lex_curr_p <- {buf.lex_curr_p with pos_fname=filename;}; diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/misc.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/misc.mli index 272dff980fdd4f1600e7de09ffbd70b532a157c5..925f024b6a864d864a6f72fb0f3590c350f2ab3f 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/misc.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/misc.mli @@ -1,6 +1,25 @@ (* This file is free software, part of dolmen. See file "LICENSE" for more information. *) +(** {2 Comparison helpers} *) + +val (<?>) : int -> (('a -> 'a -> int) * 'a * 'a) -> int +(** Composition of comparison functions. *) + +val lexicographic : ('a -> 'a -> int) -> 'a list -> 'a list -> int +(** Lexicogrphic comparison on lsits. *) + + +(** {2 Hash helpers} *) + +val hash_int : int -> int +val hash_string : string -> int +val hash_list : ('a -> int) -> 'a list -> int +val hash2 : int -> int -> int +val hash3 : int -> int -> int -> int +val hash4 : int -> int -> int -> int -> int +(** Hash combinators. *) + (** {2 Misc functions} *) @@ -66,3 +85,4 @@ val mk_lexbuf : stream (to report errors), and then a string with the actual contents to be parsed. *) + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/name.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/name.ml new file mode 100644 index 0000000000000000000000000000000000000000..780544d84ed3bec328b38d47bfac279564f91cbb --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/name.ml @@ -0,0 +1,258 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information. *) + +(* Type definition *) +(* ************************************************************************* *) + +type t = + | Simple of string + | Indexed of { + basename : string; + indexes : string list; + } + | Qualified of { + path : string list; + basename : string; + } + + +(* Creation functions *) +(* ************************************************************************* *) + +let simple basename = + Simple basename + +let indexed basename indexes = + Indexed { basename; indexes; } + +let qualified path basename = + Qualified { path; basename; } + + +(* Std functions *) +(* ************************************************************************* *) + +let print fmt = function + | Simple basename -> + Format.fprintf fmt "%s" basename + | Indexed { basename; indexes; } -> + Format.fprintf fmt "(_ %s %a)" basename + (Format.pp_print_list ~pp_sep:Format.pp_print_space + Format.pp_print_string) indexes + | Qualified { path = []; basename; } -> + Format.fprintf fmt "%s" basename + | Qualified { path; basename; } -> + Format.fprintf fmt "%a.%s" + (Format.pp_print_list ~pp_sep:Format.pp_print_space + Format.pp_print_string) path basename + +let hash = function + | Simple basename -> + Misc.hash2 3 + (Misc.hash_string basename) + | Indexed { basename; indexes; } -> + Misc.hash3 5 + (Misc.hash_string basename) + (Misc.hash_list Misc.hash_string indexes) + | Qualified { path; basename; } -> + Misc.hash3 7 + (Misc.hash_list Misc.hash_string path) + (Misc.hash_string basename) + +let discr = function + | Simple _ -> 0 + | Indexed _ -> 1 + | Qualified _ -> 2 + +let compare n n' = + let (<?>) = Misc.(<?>) in + match n, n' with + | Simple name, Simple name' -> + String.compare name name' + | Indexed { basename = s; indexes = l; }, + Indexed { basename = s'; indexes = l'; } -> + String.compare s s' + <?> (Misc.lexicographic String.compare, l, l') + | Qualified { path = p; basename = s; }, + Qualified { path = p'; basename = s'; } -> + Misc.lexicographic String.compare p p' + <?> (String.compare, s, s') + + | _, _ -> compare (discr n) (discr n') + +let equal n n' = n == n' || compare n n' = 0 + + +(* Maps *) +(* ************************************************************************* *) + +module Map = struct + + module M = Maps.String + + (* Types *) + type 'a t = { + simple : 'a M.t; + qualified : 'a qualified M.t; + indexed : 'a indexed M.t; + } + + and 'a qualified = { + base : 'a M.t; + path : 'a qualified M.t; + } + + and 'a indexed = { + value : 'a option; + index : 'a indexed M.t; + } + + + (* Empty *) + let empty = { + simple = M.empty; + qualified = M.empty; + indexed = M.empty; + } + + let empty_q = { + base = M.empty; + path = M.empty; + } + + let empty_i = { + value = None; + index = M.empty; + } + + + (* find *) + let rec find_opt k t = + match k with + | Simple basename -> + M.find_opt basename t.simple + | Qualified { path = []; _ } -> assert false + | Qualified { path = hd :: tl; basename; } -> + begin match M.find_opt hd t.qualified with + | None -> None + | Some q -> find_qualified basename q tl + end + | Indexed { basename; indexes; } -> + begin match M.find_opt basename t.indexed with + | None -> None + | Some i -> find_indexed i indexes + end + + and find_qualified basename q = function + | [] -> M.find_opt basename q.base + | s :: r -> + match M.find_opt s q.path with + | None -> None + | Some q' -> find_qualified basename q' r + + and find_indexed i = function + | [] -> i.value + | s :: r -> + match M.find_opt s i.index with + | None -> None + | Some i' -> find_indexed i' r + + let find_exn k t = + match find_opt k t with + | Some res -> res + | None -> raise Not_found + + + (* Add *) + let rec add k v t = + match k with + | Simple basename -> + { t with simple = M.add basename v t.simple; } + | Qualified { path = []; _ } -> assert false + | Qualified { path = hd :: tl; basename; } -> + { t with qualified = M.find_add hd (function + | None -> add_qualified basename v empty_q tl + | Some q -> add_qualified basename v q tl + ) t.qualified; } + | Indexed { basename; indexes; } -> + { t with indexed = M.find_add basename (function + | None -> add_indexed v empty_i indexes + | Some i -> add_indexed v i indexes + ) t.indexed; } + + and add_qualified basename v q = function + | [] -> { q with base = M.add basename v q.base; } + | s :: r -> + { q with path = M.find_add s (function + | None -> add_qualified basename v empty_q r + | Some q' -> add_qualified basename v q' r + ) q.path; } + + and add_indexed v i = function + | [] -> { i with value = Some v; } + | s :: r -> + { i with index = M.find_add s (function + | None -> add_indexed v empty_i r + | Some i' -> add_indexed v i' r + ) i.index; } + + (* Find-Add *) + let rec find_add k f t = + match k with + | Simple basename -> + { t with simple = M.find_add basename f t.simple; } + | Qualified { path = []; _ } -> assert false + | Qualified { path = hd :: tl; basename; } -> + { t with qualified = M.find_add hd (function + | None -> find_add_qualified basename f empty_q tl + | Some q -> find_add_qualified basename f q tl + ) t.qualified; } + | Indexed { basename; indexes; } -> + { t with indexed = M.find_add basename (function + | None -> find_add_indexed f empty_i indexes + | Some i -> find_add_indexed f i indexes + ) t.indexed; } + + and find_add_qualified basename f q = function + | [] -> { q with base = M.find_add basename f q.base; } + | s :: r -> + { q with path = M.find_add s (function + | None -> find_add_qualified basename f empty_q r + | Some q' -> find_add_qualified basename f q' r + ) q.path; } + + and find_add_indexed f i = function + | [] -> { i with value = Some (f i.value); } + | s :: r -> + { i with index = M.find_add s (function + | None -> find_add_indexed f empty_i r + | Some i' -> find_add_indexed f i' r + ) i.index; } + + (* Iter *) + let rec iter f t = + M.iter (fun s v -> f (simple s) v) t.simple; + M.iter (fun s q -> iter_qualified f [s] q) t.qualified; + M.iter (fun basename i -> iter_indexed f basename [] i) t.indexed; + () + + and iter_qualified f rev_path q = + let path = List.rev rev_path in + M.iter (fun s v -> f (qualified path s) v) q.base; + M.iter (fun s q' -> iter_qualified f (s :: rev_path) q') q.path; + () + + and iter_indexed f basename indexes i = + begin match i.value with + | None -> () + | Some v -> f (indexed basename (List.rev indexes)) v + end; + M.iter (fun s i' -> iter_indexed f basename (s :: indexes) i') i.index; + () + + let fold f t acc = + let r = ref acc in + iter (fun name v -> r := f name v !r) t; + !r + +end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/name.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/name.mli new file mode 100644 index 0000000000000000000000000000000000000000..abe7e208c71f2bcebd0a28ae43de57cf057c0438 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/name.mli @@ -0,0 +1,54 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information. *) + +(** Names + + This is an abstraction of the names that can appear in parsed files. + Names are basically a slightly more structured representation of the + strings used to refer to symbols in input files. *) + +(** {2 Type definition} *) + +type t = private + | Simple of string + (** Regular symbols. *) + | Indexed of { + basename : string; + indexes : string list; + } + (** Indexed symbols (currently only come from smtlib) *) + | Qualified of { + path : string list; + basename : string; + } + (** Qualified names, including a module path before the basename. *) +(** The type of names. *) + + +(** {2 Std functions} *) + +val hash : t -> int +val equal : t -> t -> bool +val compare : t -> t -> int +(** Std functions. *) + +val print : Format.formatter -> t -> unit +(** Printing function. *) + + +(** {2 Std functions} *) + +module Map : Maps.S with type key := t + + +(** {2 Creation functions} *) + +val simple : string -> t +(** Create a simple/regular name. *) + +val indexed : string -> string list -> t +(** Create an indexed name. *) + +val qualified : string list -> string -> t +(** Create a qualified name. *) + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/namespace.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/namespace.ml new file mode 100644 index 0000000000000000000000000000000000000000..b6a55547f489504e8cc2a05c395d676d42f8b34a --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/namespace.ml @@ -0,0 +1,89 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information. *) + +(* Type definitions *) +(* ************************************************************************* *) + +type value = + | Integer + | Rational + | Real + | Binary + | Hexadecimal + | Bitvector + | String + +type t = + | Var + | Sort + | Term + | Attr + | Decl + | Track + | Value of value + + +(* Creation functions *) +(* ************************************************************************* *) + +let var = Var +let sort = Sort +let term = Term +let attr = Attr +let decl = Decl +let track = Track + + +(* Std functions *) +(* ************************************************************************* *) + +let value_discr = function + | Integer -> 0 + | Rational -> 1 + | Real -> 2 + | Binary -> 3 + | Hexadecimal -> 4 + | Bitvector -> 5 + | String -> 6 + +let discr = function + | Var -> 0 + | Sort -> 1 + | Term -> 2 + | Attr -> 3 + | Decl -> 4 + | Track -> 5 + | Value v -> 6 + value_discr v + +let hash a = Hashtbl.hash a +let compare a b = compare (discr a) (discr b) +let equal a b = a == b || compare a b = 0 + +let print_value fmt = function + | Integer -> Format.fprintf fmt "int" + | Rational -> Format.fprintf fmt "rat" + | Real -> Format.fprintf fmt "real" + | Binary -> Format.fprintf fmt "bin" + | Hexadecimal -> Format.fprintf fmt "hex" + | Bitvector -> Format.fprintf fmt "bitv" + | String -> Format.fprintf fmt "string" + +let print fmt = function + | Var -> Format.fprintf fmt "var" + | Sort -> Format.fprintf fmt "sort" + | Term -> Format.fprintf fmt "term" + | Attr -> Format.fprintf fmt "attr" + | Decl -> Format.fprintf fmt "decl" + | Track -> Format.fprintf fmt "track" + | Value value -> Format.fprintf fmt "value:%a" print_value value + + +(* Map *) +(* ************************************************************************* *) + +module Map = Maps.Make(struct + type nonrec t = t + let compare = compare + end) + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/namespace.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/namespace.mli new file mode 100644 index 0000000000000000000000000000000000000000..3482cbbd91c2e6e91120ea837c9c66f2f4d24020 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/namespace.mli @@ -0,0 +1,81 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + + +(** {2 Type and std functions} *) +(* ************************************************************************* *) + +type value = + | Integer + (** Integers (in base 10 notation), e.g. ["123456789"] *) + | Rational + (** Rational (using quotient notation with '/'), e.g. ["123/456"] *) + | Real + (** Real (using decimal floating point notation with exponent), + e.g. ["123.456e789"] *) + | Binary + (** Bitvector in binary notation, e.g. ["0b011010111010"] *) + | Hexadecimal + (** Bitvector in hexadecimal notation, e.g. ["0x9a23e5f"] *) + | Bitvector + (** Bitvector litteral. *) + | String + (** String litterals. *) +(** Types of lexical values typically encountered. *) + +type t = + | Var + (** Namespace for variables. Not all variables are necessarily in + this namespace, but ids in this namespace must be variables. *) + | Sort + (** Namepsace for sorts and types (only used in smtlib) *) + | Term + (** Most used namespace, used for terms in general (and types outside smtlib). *) + | Attr + (** Namespace for attributes (also called annotations). *) + | Decl + (** Namespace used for naming declarations/definitions/statements... *) + | Track + (** Namespace used to track specific values throughout some files. *) + | Value of value + (** The identifier is a value, encoded in a string. Examples include + arithmetic constants (e.g. ["123456", "123/456", "123.456e789"]), + bitvectors (i.e. binary notation), etc... *) +(** Namespaces, used to record the lexical scop in which an identifier + was parsed. *) + +val hash : t -> int +val equal : t -> t -> bool +val compare : t -> t -> int +(** Std functions *) + +val print : Format.formatter -> t -> unit +(** Printing function. *) + +module Map : Maps.S with type key := t +(** Maps on namespaces *) + + +(** {2 Creation} *) +(* ************************************************************************* *) + +val var : t +(** The variable namespace. *) + +val sort : t +(** The sort namespace. *) + +val term : t +(** The term namespace. *) + +val attr : t +(** Teh attribute namespace. *) + +val decl : t +(** The declaration namespace. *) + +val track : t +(** Namespace used for identifiers used for tracking/special identification. *) + + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/normalize.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/normalize.ml index e2d7a2a66af03184a1fa0f89f0da6820e22acf11..2fafcc5ad6d7c98a6a8cf7d0c79a5525950512c0 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/normalize.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/normalize.ml @@ -6,21 +6,21 @@ module Tptp = struct let symbol m ~attr ~loc id = let t = match id with (* Base builtins *) - | { Id.name = "$_"; ns = Id.Term } -> Term.(builtin ~loc Wildcard ()) - | { Id.name = "$tType" ; ns = Id.Term } -> Term.(builtin ~loc Ttype ()) - | { Id.name = "$o"; ns = Id.Term } -> Term.(builtin ~loc Prop ()) - | { Id.name = "$true"; ns = Id.Term } -> Term.(builtin ~loc True ()) - | { Id.name = "$false"; ns = Id.Term } -> Term.(builtin ~loc False ()) + | { Id.name = Simple "$_"; ns = Term } -> Term.(builtin ~loc Wildcard ()) + | { Id.name = Simple "$tType" ; ns = Term } -> Term.(builtin ~loc Ttype ()) + | { Id.name = Simple "$o"; ns = Term } -> Term.(builtin ~loc Prop ()) + | { Id.name = Simple "$true"; ns = Term } -> Term.(builtin ~loc True ()) + | { Id.name = Simple "$false"; ns = Term } -> Term.(builtin ~loc False ()) (* Arithmetic builtins *) - | { Id.name = "$int"; ns = Id.Term } -> Term.(builtin ~loc Int ()) - | { Id.name = "$less"; ns = Id.Term } -> Term.(builtin ~loc Lt ()) - | { Id.name = "$lesseq"; ns = Id.Term } -> Term.(builtin ~loc Leq ()) - | { Id.name = "$greater"; ns = Id.Term } -> Term.(builtin ~loc Gt ()) - | { Id.name = "$greatereq"; ns = Id.Term } -> Term.(builtin ~loc Geq ()) - | { Id.name = "$uminus"; ns = Id.Term } -> Term.(builtin ~loc Minus ()) - | { Id.name = "$sum"; ns = Id.Term } -> Term.(builtin ~loc Add ()) - | { Id.name = "$difference"; ns = Id.Term } -> Term.(builtin ~loc Sub ()) - | { Id.name = "$product"; ns = Id.Term } -> Term.(builtin ~loc Mult ()) + | { Id.name = Simple "$int"; ns = Term } -> Term.(builtin ~loc Int ()) + | { Id.name = Simple "$less"; ns = Term } -> Term.(builtin ~loc Lt ()) + | { Id.name = Simple "$lesseq"; ns = Term } -> Term.(builtin ~loc Leq ()) + | { Id.name = Simple "$greater"; ns = Term } -> Term.(builtin ~loc Gt ()) + | { Id.name = Simple "$greatereq"; ns = Term } -> Term.(builtin ~loc Geq ()) + | { Id.name = Simple "$uminus"; ns = Term } -> Term.(builtin ~loc Minus ()) + | { Id.name = Simple "$sum"; ns = Term } -> Term.(builtin ~loc Add ()) + | { Id.name = Simple "$difference"; ns = Term } -> Term.(builtin ~loc Sub ()) + | { Id.name = Simple "$product"; ns = Term } -> Term.(builtin ~loc Mult ()) | _ -> Term.(const ~loc id) in let attrs = List.map (Term.map m) attr in @@ -35,17 +35,17 @@ module Smtlib = struct let symbol m ~attr ~loc id = let t = match id with - | { Id.name = "Bool"; ns = Id.Sort } -> Term.(builtin ~loc Prop ()) - | { Id.name = "true"; ns = Id.Term } -> Term.(builtin ~loc True ()) - | { Id.name = "false"; ns = Id.Term } -> Term.(builtin ~loc False ()) - | { Id.name = "not"; ns = Id.Term } -> Term.(builtin ~loc Not ()) - | { Id.name = "and"; ns = Id.Term } -> Term.(builtin ~loc And ()) - | { Id.name = "or"; ns = Id.Term } -> Term.(builtin ~loc Or ()) - | { Id.name = "xor"; ns = Id.Term } -> Term.(builtin ~loc Xor ()) - | { Id.name = "=>"; ns = Id.Term } -> Term.(builtin ~loc Imply ()) - | { Id.name = "="; ns = Id.Term } -> Term.(builtin ~loc Eq ()) - | { Id.name = "distinct"; ns = Id.Term } -> Term.(builtin ~loc Distinct ()) - | { Id.name = "ite"; ns = Id.Term } -> Term.(builtin ~loc Ite ()) + | { Id.name = Simple "Bool"; ns = Sort } -> Term.(builtin ~loc Prop ()) + | { Id.name = Simple "true"; ns = Term } -> Term.(builtin ~loc True ()) + | { Id.name = Simple "false"; ns = Term } -> Term.(builtin ~loc False ()) + | { Id.name = Simple "not"; ns = Term } -> Term.(builtin ~loc Not ()) + | { Id.name = Simple "and"; ns = Term } -> Term.(builtin ~loc And ()) + | { Id.name = Simple "or"; ns = Term } -> Term.(builtin ~loc Or ()) + | { Id.name = Simple "xor"; ns = Term } -> Term.(builtin ~loc Xor ()) + | { Id.name = Simple "=>"; ns = Term } -> Term.(builtin ~loc Imply ()) + | { Id.name = Simple "="; ns = Term } -> Term.(builtin ~loc Eq ()) + | { Id.name = Simple "distinct"; ns = Term } -> Term.(builtin ~loc Distinct ()) + | { Id.name = Simple "ite"; ns = Term } -> Term.(builtin ~loc Ite ()) | _ -> Term.(const ~loc id) in let attrs = List.map (Term.map m) attr in @@ -59,10 +59,14 @@ module Smtlib = struct let binder m ~attr ~loc b l body = match b with - | Term.Let -> + | Term.Let_seq -> let attrs = List.map (Term.map m) attr in let l' = List.map (binder_let_eq m) l in Term.add_attrs attrs (Term.letin ~loc l' (Term.map m body)) + | Term.Let_par -> + let attrs = List.map (Term.map m) attr in + let l' = List.map (binder_let_eq m) l in + Term.add_attrs attrs (Term.letand ~loc l' (Term.map m body)) | _ -> Term.(id_mapper.binder m ~attr ~loc b l body) let mapper = diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/path.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/path.ml new file mode 100644 index 0000000000000000000000000000000000000000..f8c0b83e514715a695cc39790da016d14eb8fa97 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/path.ml @@ -0,0 +1,39 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information. *) + +(** Names + +*) + +(** {2 Type definition} *) + +type path = string list + +type t = + | Local of { + name : string; + } + | Absolute of { + path : path; + name : string; + } + +let print fmt = function + | Local { name; } -> + Format.fprintf fmt "%s" name + | Absolute { path = []; name; } -> + Format.fprintf fmt "%s" name + | Absolute { path; name; } -> + let pp_sep fmt () = Format.fprintf fmt "." in + Format.fprintf fmt "%a.%a" + (Format.pp_print_list ~pp_sep Format.pp_print_string) path + Format.pp_print_string name + + +let local name = Local { name; } +let global name = Absolute { path = []; name; } +let absolute path name = Absolute { path; name; } + +let rename f = function + | Local { name; } -> local (f name) + | Absolute { path; name; } -> absolute path (f name) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/path.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/path.mli new file mode 100644 index 0000000000000000000000000000000000000000..1db6115ba711a427c1d494c93835ce0ce8158f17 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/path.mli @@ -0,0 +1,53 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information. *) + +(** Paths + + Paths are used to identify constants and variables after typechecking. + They are meant to identify the abstract location of where constants and + variables come from. Variables are always local, whereas constants + are identified by a full absolute path including all module names + leading to the module defining the constant. +*) + +(** {2 Type definition} *) + +type path = string list +(** A path of module names. A {path} identifies a module, or + the toplevel/implicitly global module if empty. *) + +type t = private + | Local of { + name : string; + } + (** A local path, mainly used for variables. *) + | Absolute of { + path : path; + name : string; + } + (** An absolute path, containing a path to a module, + and a basename. *) +(** Paths used for variables and constants. *) + + +(** {2 Std functions} *) + +val print : Format.formatter -> t -> unit +(** Printing function. *) + + +(** {2 Creation function} *) + +val local : string -> t +(** Create a local path. *) + +val global : string -> t +(** Create a global path. *) + +val absolute : path -> string -> t +(** Create an absolute path. *) + +val rename : (string -> string) -> t -> t +(** Change the basename of a path. *) + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/statement.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/statement.ml index cb731e17f21d69041d9886931dd6d87886e3a853..027104596a71a0d7a0cd377b16dd8092eb1ac7c1 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/statement.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/statement.ml @@ -16,7 +16,7 @@ type inductive = { vars : term list; cstrs : (Id.t * term list) list; loc : location; - attr : term option; + attrs : term list; } type record = { @@ -24,7 +24,7 @@ type record = { vars : term list; fields : (Id.t * term) list; loc : location; - attr : term option; + attrs : term list; } type decl = @@ -34,9 +34,11 @@ type decl = type def = { id : Id.t; - ty : term; - body : term; loc : location; + vars : term list; + params : term list; + ret_ty : term; + body : term; } type 'a group = { @@ -89,9 +91,9 @@ type descr = (* Statements are wrapped in a record to have a location. *) and t = { - id : Id.t; + id : Id.t option; descr : descr; - attr : term option; + attrs : term list; loc : location; } @@ -99,6 +101,7 @@ and t = { let no_loc = Loc.no_loc +(* (* Debug printing *) let pp_abstract b (i : abstract) = @@ -187,6 +190,7 @@ let rec pp_descr b = function and pp b = function { descr; _ } -> Printf.bprintf b "%a" pp_descr descr +*) (* Pretty printing *) @@ -237,9 +241,9 @@ let rec print_descr fmt = function | Plain t -> Format.fprintf fmt "@[<hov 2>plain: %a@]" Term.print t - | Prove [] -> Format.fprintf fmt "Prove" + | Prove [] -> Format.fprintf fmt "prove" | Prove l -> - Format.fprintf fmt "Prove assuming: %a" + Format.fprintf fmt "@[<hov 2>prove-assuming:@ %a@]" (Misc.print_list ~print_sep:Format.fprintf ~sep:" &&@ " ~print:Term.print) l | Clause l -> @@ -282,19 +286,25 @@ let rec print_descr fmt = function | Reset -> Format.fprintf fmt "reset" | Exit -> Format.fprintf fmt "exit" -and print fmt = function { descr; _ } -> - Format.fprintf fmt "%a" print_descr descr +and print_attrs fmt = function + | [] -> () + | l -> + Format.fprintf fmt "@[<hov>{ %a }@]@ " + (Format.pp_print_list Term.print) l + +and print fmt = function { descr; attrs; _ } -> + Format.fprintf fmt "%a%a" print_attrs attrs print_descr descr (** Annotations *) let annot = Term.apply (* Internal shortcut. *) -let mk ?(id=Id.(mk decl "")) ?(loc=Loc.no_loc) ?attr descr = - { id; descr; loc; attr; } +let mk ?id ?(loc=Loc.no_loc) ?(attrs=[]) descr = + { id; descr; loc; attrs; } (* Pack *) -let pack ?id ?loc ?attr l = - mk ?id ?loc ?attr (Pack l) +let pack ?id ?loc ?attrs l = + mk ?id ?loc ?attrs (Pack l) (* Push/Pop *) let pop ?loc i = mk ?loc (Pop i) @@ -303,9 +313,9 @@ let reset_assertions ?loc () = mk ?loc Reset_assertions (* Assumptions and fact checking *) let prove ?loc () = mk ?loc (Prove []) -let mk_clause ?loc ?attr l = mk ?loc ?attr (Clause l) -let consequent ?loc ?attr t = mk ?loc ?attr (Consequent t) -let antecedent ?loc ?attr t = mk ?loc ?attr (Antecedent t) +let mk_clause ?loc ?attrs l = mk ?loc ?attrs (Clause l) +let consequent ?loc ?attrs t = mk ?loc ?attrs (Consequent t) +let antecedent ?loc ?attrs t = mk ?loc ?attrs (Antecedent t) (* Options statements *) let set_logic ?loc s = mk ?loc (Set_logic s) @@ -334,59 +344,53 @@ let reset ?loc () = mk ?loc Reset let exit ?loc () = mk ?loc Exit (* decl/def *) -let def ?(loc=no_loc) id ty body = - { id; ty; body; loc; } +let def ?(loc=no_loc) id ~vars ~params ret_ty body = + { id; vars; params; ret_ty; body; loc; } let abstract ?(loc=no_loc) id ty = Abstract { id; ty; loc; } -let record ?(attr=None) ?(loc=no_loc) id vars fields = - Record { id; vars; fields; loc; attr; } +let record ?(attrs=[]) ?(loc=no_loc) id vars fields = + Record { id; vars; fields; loc; attrs; } -let inductive ?(attr=None) ?(loc=no_loc) id vars cstrs = - Inductive { id; vars; cstrs; loc; attr; } +let inductive ?(attrs=[]) ?(loc=no_loc) id vars cstrs = + Inductive { id; vars; cstrs; loc; attrs; } (* grouping of decls/defs *) -let mk_decls ?loc ?attr ~recursive decls = - mk ?loc ?attr (Decls { recursive; contents = decls; }) +let mk_decls ?loc ?attrs ~recursive decls = + mk ?loc ?attrs (Decls { recursive; contents = decls; }) -let group_decls ?loc ?attr ~recursive l = +let group_decls ?loc ?attrs ~recursive l = let decls, others = List.fold_left (fun (decls, others) s -> match s with | { descr = Decls d; _ } -> List.rev_append d.contents decls, others | _ -> decls, s :: others ) ([], []) l in - let new_decls = mk_decls ?loc ?attr ~recursive (List.rev decls) in + let new_decls = mk_decls ?loc ?attrs ~recursive (List.rev decls) in match others with | [] -> new_decls | l -> pack ?loc (new_decls :: List.rev l) -let mk_defs ?loc ?attr ~recursive defs = - mk ?loc ?attr (Defs { recursive; contents = defs; }) +let mk_defs ?loc ?attrs ~recursive defs = + mk ?loc ?attrs (Defs { recursive; contents = defs; }) -let group_defs ?loc ?attr ~recursive l = +let group_defs ?loc ?attrs ~recursive l = let defs, others = List.fold_left (fun (defs, others) s -> match s with | { descr = Defs d; _ } -> List.rev_append d.contents defs, others | _ -> defs, s :: others ) ([], []) l in - let new_defs = mk_defs ?loc ?attr ~recursive (List.rev defs) in + let new_defs = mk_defs ?loc ?attrs ~recursive (List.rev defs) in match others with | [] -> new_defs | l -> pack ?loc (new_defs :: List.rev l) -(* Some helpers *) -let extract_type = function - | { Term.term = Colon (_, ty); _ } -> ty - | _ -> assert false - - (* Alt-ergo wrappers *) let logic ?loc ~ac ids ty = - let attr = if ac then Some (Term.const ?loc Id.ac_symbol) else None in + let attrs = if ac then [Term.const ?loc Id.ac_symbol] else [] in let ty = match Term.fv ty with | [] -> ty | vars -> @@ -396,10 +400,10 @@ let logic ?loc ~ac ids ty = Term.pi ?loc l ty in let l = List.map (fun id -> abstract ?loc id ty) ids in - mk_decls ?loc ?attr ~recursive:true l + mk_decls ?loc ~attrs ~recursive:true l let abstract_type ?loc id vars = - let ty = Term.fun_ty ?loc vars (Term.tType ?loc ()) in + let ty = Term.pi ?loc vars (Term.tType ?loc ()) in mk_decls ?loc ~recursive:false [abstract ?loc id ty] let record_type ?loc id vars fields = @@ -415,8 +419,8 @@ let axiom ?loc id t = mk ~id ?loc (Antecedent t) let case_split ?loc id t = - let attr = Term.const ?loc Id.case_split in - mk ~id ?loc ~attr (Antecedent t) + let attrs = [Term.const ?loc Id.case_split] in + mk ~id ?loc ~attrs (Antecedent t) let prove_goal ?loc id t = mk ~id ?loc @@ Pack [ @@ -430,16 +434,18 @@ let rewriting ?loc id l = ) l) let theory ?loc id extends l = - let attr = Term.colon ?loc (Term.const ?loc Id.theory_decl) - (Term.colon ?loc (Term.const ?loc id) (Term.const ?loc extends)) in - mk ?loc ~attr (Pack l) + let attrs = [ + Term.colon ?loc (Term.const ?loc Id.theory_decl) + (Term.colon ?loc (Term.const ?loc id) (Term.const ?loc extends)) + ] in + mk ?loc ~attrs (Pack l) (* Dimacs&iCNF wrappers *) let p_cnf ?loc nbvar nbclause = let i = Term.int ?loc (string_of_int nbvar) in let j = Term.int ?loc (string_of_int nbclause) in - let attr = Term.colon ?loc i j in - mk ?loc ~attr (Set_logic "dimacs") + let attrs = [Term.colon ?loc i j] in + mk ?loc ~attrs (Set_logic "dimacs") let p_inccnf ?loc () = mk ?loc (Set_logic "icnf") @@ -464,11 +470,10 @@ let fun_decl ?loc id vars l t' = | vars -> Term.pi ?loc vars ty in mk_decls ?loc ~recursive:false [abstract ?loc id ty] -let type_def ?loc id args body = - let l = List.map (fun id -> Term.colon (Term.const id) @@ Term.tType ()) args in - let ty = Term.pi l (Term.tType ()) in - let body = Term.lambda l body in - mk_defs ?loc ~recursive:false [def ?loc id ty body] +let type_def ?loc id vars body = + let vars = List.map (fun id -> Term.colon (Term.const id) @@ Term.tType ()) vars in + let ret_ty = Term.tType ?loc () in + mk_defs ?loc ~recursive:false [def ?loc id ~vars ~params:[] ret_ty body] let datatypes ?loc l = let l' = List.map (fun (id, vars, cstrs) -> @@ -476,95 +481,67 @@ let datatypes ?loc l = ) l in mk_decls ?loc ~recursive:true l' -let fun_def_aux ?loc id vars args ty_ret body = - let ty = Term.fun_ty (List.map extract_type args) ty_ret in - let ty = match vars with - | [] -> ty - | vars -> Term.pi ?loc vars ty in - let t = Term.lambda args (Term.colon body ty_ret) in - let t = match vars with - | [] -> t - | vars -> - Term.lambda (List.map (fun e -> Term.colon e (Term.tType ?loc ())) vars) t in - id, ty, t - -let fun_def ?loc id vars args ty_ret body = - let id, ty, body = fun_def_aux ?loc id vars args ty_ret body in - mk_defs ?loc ~recursive:false [def ?loc id ty body] +let fun_def ?loc id vars params ret_ty body = + mk_defs ?loc ~recursive:false [ + def ?loc id ~vars ~params ret_ty body + ] let funs_def_rec ?loc l = - let contents = List.map (fun (id, vars, args, ty_ret, body) -> - let id, ty, body = fun_def_aux ?loc id vars args ty_ret body in - def ?loc id ty body + let contents = List.map (fun (id, vars, params, ret_ty, body) -> + def ?loc id ~vars ~params ret_ty body ) l in mk_defs ?loc ~recursive:true contents (* Wrappers for Zf *) -let zf_attr ?loc = function - | None | Some [] -> None - | Some l -> Some (Term.apply ?loc (Term.and_t ()) l) - let import ?loc s = mk ?loc (Include s) let defs ?loc ?attrs l = - let attr = zf_attr ?loc attrs in - group_defs ?loc ?attr ~recursive:true l + group_defs ?loc ?attrs ~recursive:true l let rewrite ?loc ?attrs t = - let attr = zf_attr ?loc attrs in - antecedent ?loc ?attr (Term.add_attr (Term.const Id.rwrt_rule) t) + antecedent ?loc ?attrs (Term.add_attr (Term.const Id.rwrt_rule) t) let goal ?loc ?attrs t = - let attr = zf_attr ?loc attrs in - mk ?loc ?attr (Pack [ + mk ?loc ?attrs (Pack [ consequent ?loc t; prove ?loc (); ]) let assume ?loc ?attrs t = - let attr = zf_attr ?loc attrs in - antecedent ?loc ?attr t + antecedent ?loc ?attrs t let lemma ?loc ?attrs t = - let attr = zf_attr ?loc attrs in - antecedent ?loc ?attr t + antecedent ?loc ?attrs t let decl ?loc ?attrs id ty = - let attr = zf_attr ?loc attrs in - mk_decls ?loc ?attr ~recursive:true [abstract ?loc id ty] + mk_decls ?loc ?attrs ~recursive:true [abstract ?loc id ty] let definition ?loc ?attrs s ty l = - let attr = zf_attr ?loc attrs in - mk ?loc ?attr (Pack ( + mk ?loc ?attrs (Pack ( decl ?loc s ty :: List.map (assume ?loc) l )) let inductive ?loc ?attrs id vars cstrs = - let attr = zf_attr ?loc attrs in - mk_decls ?loc ~recursive:true [inductive ?loc ~attr id vars cstrs] + mk_decls ?loc ~recursive:true [inductive ?loc ?attrs id vars cstrs] let data ?loc ?attrs l = (* this is currently only used for mutually recursive datatypes *) - let attr = zf_attr ?loc attrs in - group_decls ?loc ?attr ~recursive:true l + group_decls ?loc ?attrs ~recursive:true l (* Wrappers for tptp *) let include_ ?loc s l = - let attr = Term.apply ?loc (Term.and_t ()) (List.map Term.const l) in - mk ?loc ~attr (Include s) + let attrs = List.map Term.const l in + mk ?loc ~attrs (Include s) -let tptp ?loc ?annot id role body = - let aux t = +let tptp ?loc ?annot kind id role body = + let attrs = + Term.apply (Term.const Id.tptp_role) [Term.const Id.(mk Attr role)] :: + Term.apply (Term.const Id.tptp_kind) [Term.const Id.(mk Attr kind)] :: match annot with - | None -> t - | Some t' -> Term.colon t t' - in - let attr = aux (Term.apply - (Term.const Id.tptp_role) - [Term.const Id.(mk Attr role)]) + | None -> [] | Some t -> [t] in let descr = match role with | "axiom" @@ -606,12 +583,12 @@ let tptp ?loc ?annot id role body = Format.eprintf "WARNING: unknown tptp formula role: '%s'@." role; Pack [] in - mk ~id ?loc ~attr descr + mk ~id ?loc ~attrs descr -let tpi ?loc ?annot id role t = tptp ?loc ?annot id role (`Term t) -let thf ?loc ?annot id role t = tptp ?loc ?annot id role (`Term t) -let tff ?loc ?annot id role t = tptp ?loc ?annot id role (`Term t) -let fof ?loc ?annot id role t = tptp ?loc ?annot id role (`Term t) +let tpi ?loc ?annot id role t = tptp ?loc ?annot "tpi" id role (`Term t) +let thf ?loc ?annot id role t = tptp ?loc ?annot "thf" id role (`Term t) +let tff ?loc ?annot id role t = tptp ?loc ?annot "tff" id role (`Term t) +let fof ?loc ?annot id role t = tptp ?loc ?annot "fof" id role (`Term t) let cnf ?loc ?annot id role t = let l = @@ -620,6 +597,6 @@ let cnf ?loc ?annot id role t = ({ Term.term = Term.Builtin Term.Or; _ }, l); _ } -> l | _ -> [t] in - tptp ?loc ?annot id role (`Clause (t, l)) + tptp ?loc ?annot "cnf" id role (`Clause (t, l)) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/statement.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/statement.mli index 408475136a39525aa16fe24f31efc38f0f784a04..9889f2bc0f7a3d06cd96aac4400a672a551955d5 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/statement.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/statement.mli @@ -29,7 +29,7 @@ type inductive = { vars : term list; cstrs : (Id.t * term list) list; loc : location; - attr : term option; + attrs : term list; } (** The type for inductive type declarations. The "vars" field if used to store polymorphic variables of the inductive type. For instance, @@ -46,7 +46,7 @@ type record = { vars : term list; fields : (Id.t * term) list; loc : location; - attr : term option; + attrs : term list; } (** The type of record definitions. *) @@ -58,9 +58,11 @@ type decl = type def = { id : Id.t; - ty : term; - body : term; loc : location; + vars : term list; + params : term list; + ret_ty : term; + body : term; } (** Term definition. *) @@ -143,9 +145,9 @@ type descr = (** Exit the interactive loop. *) and t = { - id : Id.t; + id : Id.t option; descr : descr; - attr : term option; + attrs : term list; loc : location; } (** The type of statements. Statements have optional location and attributes (or annotations). @@ -163,35 +165,31 @@ include Dolmen_intf.Stmt.Logic (** {2 Additional functions} *) val mk_decls : - ?loc:location -> ?attr:term -> recursive:bool -> decl list -> t + ?loc:location -> ?attrs:term list -> recursive:bool -> decl list -> t (** Create a group of declarations *) val mk_defs : - ?loc:location -> ?attr:term -> recursive:bool -> def list -> t + ?loc:location -> ?attrs:term list -> recursive:bool -> def list -> t (** Create a group of declarations *) val prove : ?loc:location -> unit -> t (** Emit a [Prove] statement. *) -val pack : ?id:Id.t -> ?loc:location -> ?attr:term -> t list -> t +val pack : ?id:Id.t -> ?loc:location -> ?attrs:term list -> t list -> t (** Pack a list of statements into a single one. *) (** {2 Printing functions} *) -val pp : Buffer.t -> t -> unit val print : Format.formatter -> t -> unit (** Printing functions for statements. *) -val pp_decl : Buffer.t -> decl -> unit val print_decl : Format.formatter -> decl -> unit (* Printer for declarations. *) -val pp_def : Buffer.t -> def -> unit val print_def : Format.formatter -> def -> unit (* Printer for declarations. *) -val pp_group : (Buffer.t -> 'a -> unit) -> Buffer.t -> 'a group -> unit val print_group : (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a group -> unit diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/stats.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/stats.ml new file mode 100644 index 0000000000000000000000000000000000000000..27274f8041f0bf5511a1e861dcfc36a37aa80401 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/stats.ml @@ -0,0 +1,69 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +(* Global things *) +(* ************************************************************************* *) + +let enabled = ref false + +let wrap_at_exit print t = + at_exit (fun () -> + if !enabled then Format.printf "%a@." print t + ); + t + +(* One-value statistics *) +(* ************************************************************************* *) + +module Float = struct + + type t = { + name : string; + mutable value : float; + } + + let print fmt t = + Format.fprintf fmt + "* %s: @[<hov>%f@]" + t.name t.value + + let create name = + wrap_at_exit print { name; value = nan; } + + let set t v = + t.value <- v + +end + +(* Cumulative statistics *) +(* ************************************************************************* *) + +module Floats = struct + + type t = { + name : string; + mutable count : int; + mutable total_time : float; + } + + let print fmt t = + Format.fprintf fmt + "* @[<v>%s@;\ + + count: %d@;\ + + total: %f@;\ + + mean : %f@]" + t.name + t.count + t.total_time + (t.total_time /. float t.count) + + let create name = + wrap_at_exit print { name; count = 0; total_time = 0.; } + + let add t time = + t.total_time <- t.total_time +. time; + t.count <- t.count + 1; + () + +end + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/stats.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/stats.mli new file mode 100644 index 0000000000000000000000000000000000000000..461212580af0d0f698aef4109799385f0f84c9af --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/stats.mli @@ -0,0 +1,48 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +(* {2 Global things} *) +(* ************************************************************************* *) + +val enabled : bool ref + + +(* {2 One value statistics} *) +(* ************************************************************************* *) + +module Float : sig + + type t + (** Statistics holding exactly one value *) + + val create : string -> t + (** Create a float statistics, with the given name. *) + + val print : Format.formatter -> t -> unit + (** Print the statistic's current value. *) + + val set : t -> float -> unit + (** Set the stat value. *) + +end + + +(* {2 Multiple values statistics} *) +(* ************************************************************************* *) + +module Floats : sig + + type t + (** The type for a time statistics. *) + + val create : string -> t + (** Create a time statistics, with the given name. *) + + val print : Format.formatter -> t -> unit + (** Print the statistic's current values. *) + + val add : t -> float -> unit + (** Add a time lapse to a time statistics. *) + +end + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/tag.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/tag.ml index 526d39d5eb6a5dfb5276df15aaf0aaeb680ad3b3..6f57b9f1b24c9afd025fcba6baa4bca07968f44e 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/tag.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/tag.ml @@ -41,6 +41,9 @@ module type S = sig val add : inj:'a injection -> key -> 'a -> t -> t (** Bind the key to the value, using [inj] *) + + val remove : inj:'a injection -> key -> t -> t + (** Remove the binding to the key. *) end module type ORD = sig @@ -62,6 +65,10 @@ module Make(X : ORD) : S with type key = X.t = struct let add ~inj x y map = M.add x (inj.set y) map + + let remove ~inj:_ x map = + M.remove x map + end @@ -77,7 +84,7 @@ type map = M.t type 'a t = { id : int; - inj : 'a list injection; + inj : 'a injection; } let equal k k' = k.id = k'.id @@ -93,18 +100,37 @@ let create () = let empty = M.empty let get m k = - match M.get ~inj:k.inj k.id m with + M.get ~inj:k.inj k.id m + +let get_list m k = + match get m k with | None -> [] | Some l -> l -let last m k = +let get_last m k = match get m k with - | x :: _ -> Some x - | [] -> None + | None -> None + | Some [] -> None + | Some (x :: _) -> Some x -let replace m k l = +let unset m k = + M.remove ~inj:k.inj k.id m + +let set m k l = M.add ~inj:k.inj k.id l m +let set_opt m k = function + | None -> m + | Some v -> set m k v + let add m k v = - replace m k (v :: get m k) + set m k (v :: get_list m k) + +let add_opt m k = function + | None -> m + | Some v -> add m k v + +let add_list m k = function + | [] -> m + | l -> set m k (List.rev_append l (get_list m k)) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/tag.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/tag.mli index c42c6c88f75eb279254759e88e1720d75519303c..c731de2d50d0a22025a6b5ec076889ec472930e0 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/tag.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/tag.mli @@ -23,18 +23,36 @@ val empty : map val create : unit -> 'a t (** Create a new tag. *) -val get : map -> 'a t -> 'a list -(** Get the list of values associated to a tag - (can be the empty list). *) +val get : map -> 'a t -> 'a option +(** Get the value associated to a tag. *) -val last : map -> 'a t -> 'a option -(** Return the last value associated to a tag (i.e. the head of the - list returned by {get} if it exists). *) +val get_list : map -> 'a list t -> 'a list +(** Get all the values associated with a tag list, returning + the empty list by default if the tag is not present. *) -val add : map -> 'a t -> 'a -> map -(** Add a value to a tag in a map. If some values were previously bound, - this adds the new value to the head of the value list. *) +val get_last : map -> 'a list t -> 'a option +(** Return the last value associated to a list tag (i.e. the head of the + list returned by {get_list} if it exists). *) -val replace : map -> 'a t -> 'a list -> map -(** Replace the set of values associated to a tag in a map. *) +val set : map -> 'a t -> 'a -> map +(** Set the value bound to a tag. *) + +val set_opt : map -> 'a t -> 'a option -> map +(** Convenient shorthand for an optional set. *) + +val add : map -> 'a list t -> 'a -> map +(** Add a value to a list tag in a map. The new value is enqueued + at the head of the list of values bound. *) + +val add_opt : map -> 'a list t -> 'a option -> map +(** Optionally add a value to a list tag in a map. The new value is enqueued + at the head of the list of values bound. *) + +val add_list : map -> 'a list t -> 'a list -> map +(** Add a list of values to a list tag in a map. The new values are enqueued + at the head of the list of values bound, however it is not guaranteed that + the first value of the given list is the new head of the list of bound values. *) + +val unset : map -> _ t -> map +(** Remove any binding to the given key in the map. *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/term.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/term.ml index 9c0d4dee13b4259290a4b6aaaee1ac70904b3106..98fdb9c7e94cdb5c575779b6061ad609102813ba 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/term.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/term.ml @@ -26,6 +26,8 @@ type builtin = | Subtype (* Function type constructor and subtyping relation *) | Product | Union (* Product and union of types (not set theory) *) + | Pi | Sigma (* Higher-order constant to encode forall and exists quantifiers *) + | Not (* Propositional negation *) | And | Or (* Conjunction and disjunction *) | Nand | Xor | Nor (* Advanced propositional connectives *) @@ -53,7 +55,9 @@ type builtin = type binder = | All | Ex | Pi | Arrow - | Let | Fun (* Standard binders *) + | Let_seq (* sequential let-binding *) + | Let_par (* parrallel let-binding *) + | Fun (* function parameter binding *) | Choice (* Indefinite description, or epsilon terms *) | Description (* Definite description *) @@ -73,7 +77,7 @@ and t = { (* Printing info *) -let infix_builtin n = function +let infix_builtin = function | Add | Sub | Lt | Leq | Gt | Geq @@ -84,8 +88,7 @@ let infix_builtin n = function | Sequent | Subtype | Adt_check | Adt_project | Record_access -> true - | Distinct when n = 2 - -> true + | Distinct -> true | _ -> false let builtin_to_string = function @@ -118,6 +121,8 @@ let builtin_to_string = function | Subtype -> "â" | Product -> "*" | Union -> "âĒ" + | Pi -> "Î " + | Sigma -> "ÎŖ" | Not -> "ÂŦ" | And -> "â§" | Or -> "â¨" @@ -148,12 +153,36 @@ let binder_to_string = function | All -> "â" | Ex -> "â" | Pi -> "Î " - | Arrow -> "â" - | Let -> "let" + | Arrow -> "" + | Let_seq + | Let_par -> "let" | Fun -> "Îģ" | Choice -> "Îĩ" | Description -> "@" +let binder_sep_string = function + | All + | Ex + | Pi + | Let_seq + | Fun + | Choice + | Description -> "" + | Let_par -> " and" + | Arrow -> " â" + +let binder_end_string = function + | All + | Ex + | Pi + | Choice + | Description -> "." + | Fun -> "=>" + | Let_seq + | Let_par -> "in" + | Arrow -> "â" + +(* (* Debug printing *) let pp_builtin b builtin = @@ -166,17 +195,16 @@ let rec pp_descr b = function | Symbol id -> Id.pp b id | Builtin s -> pp_builtin b s | Colon (u, v) -> Printf.bprintf b "%a : %a" pp u pp v - | App ({ term = Builtin sep ; _ }, l) when infix_builtin (List.length l) sep -> + | App ({ term = Builtin sep ; _ }, l) when infix_builtin sep -> Misc.pp_list ~pp_sep:pp_builtin ~sep ~pp b l | App (f, l) -> Printf.bprintf b "%a(%a)" pp f (Misc.pp_list ~pp_sep:Buffer.add_string ~sep:"," ~pp) l - | Binder (Arrow as q, l, e) -> - Printf.bprintf b "%a %a %a" - (Misc.pp_list ~pp_sep:Buffer.add_string ~sep:" â " ~pp) l pp_binder q pp e | Binder (q, l, e) -> - Printf.bprintf b "%a %a. %a" pp_binder q - (Misc.pp_list ~pp_sep:Buffer.add_string ~sep:"," ~pp) l pp e + let sep = binder_sep_string q ^ " " in + Printf.bprintf b "%a %a %s %a" pp_binder q + (Misc.pp_list ~pp_sep:Buffer.add_string ~sep ~pp) l + (binder_end_string q) pp e | Match (t, l) -> Printf.bprintf b "match %a with %a" pp t (Misc.pp_list ~pp_sep:Buffer.add_string ~sep:" | " ~pp:pp_match_case) l @@ -188,6 +216,7 @@ and pp b = function | { term = (Symbol _) as d; _ } | { term = (Builtin _) as d; _ } -> pp_descr b d | e -> Printf.bprintf b "(%a)" pp_descr e.term +*) (* Pretty-printing *) @@ -201,23 +230,22 @@ let rec print_descr fmt = function | Symbol id -> Id.print fmt id | Builtin s -> print_builtin fmt s | Colon (u, v) -> Format.fprintf fmt "%a :@ %a" print u print v - | App ({ term = Builtin sep ; _ }, l) when infix_builtin (List.length l) sep -> - Misc.print_list ~print_sep:print_builtin ~sep ~print fmt l + | App ({ term = Builtin b ; _ }, l) when infix_builtin b -> + let pp_sep fmt () = Format.fprintf fmt " %a@ " print_builtin b in + Format.pp_print_list ~pp_sep print fmt l | App (f, []) -> Format.fprintf fmt "%a" print f | App (f, l) -> - Format.fprintf fmt "%a@ %a" print f - (Misc.print_list ~print_sep:Format.fprintf ~sep:"@ " ~print) l - | Binder (Arrow as q, l, e) -> - Format.fprintf fmt "%a %a@ %a" - (Misc.print_list ~print_sep:Format.fprintf ~sep:"â@ " ~print) l - print_binder q print e + let pp_sep = Format.pp_print_space in + Format.fprintf fmt "%a@ %a" print f (Format.pp_print_list ~pp_sep print) l | Binder (q, l, e) -> - Format.fprintf fmt "%a@ %a.@ %a" print_binder q - (Misc.print_list ~print_sep:Format.fprintf ~sep:"@ " ~print) l print e + let pp_sep fmt () = Format.fprintf fmt "%s@ " (binder_sep_string q) in + Format.fprintf fmt "%a@ %a@ %s@ %a" print_binder q + (Format.pp_print_list ~pp_sep print) l (binder_end_string q) print e | Match (t, l) -> + let pp_sep fmt () = Format.fprintf fmt " | " in Format.fprintf fmt "match %a with %a" print t - (Misc.print_list ~print_sep:Format.fprintf ~sep:" | " ~print:print_match_case) l + (Format.pp_print_list ~pp_sep print_match_case) l and print_match_case fmt (pattern, branch) = Format.fprintf fmt "%a => %a" print pattern print branch @@ -337,6 +365,9 @@ let equiv_t = builtin Equiv let implies_t = builtin Imply let implied_t = builtin Implied +let pi_t = builtin Pi +let sigma_t = builtin Sigma + let void = builtin Void let true_ = builtin True let false_ = builtin False @@ -458,7 +489,8 @@ let record_access ?loc t id = (* {2 Binders} *) let pi = mk_bind Pi -let letin = mk_bind Let +let letin = mk_bind Let_seq +let letand = mk_bind Let_par let exists = mk_bind Ex let forall = mk_bind All let lambda = mk_bind Fun @@ -476,7 +508,7 @@ let rec free_vars acc t = match t.term with | Builtin _ -> acc | Colon (t, t') -> free_vars (free_vars acc t) t' - | Symbol i -> if i.Id.ns = Id.Var then S.add i acc else acc + | Symbol i -> if i.Id.ns = Namespace.Var then S.add i acc else acc | App (t, l) -> List.fold_left free_vars (free_vars acc t) l | Binder (Arrow, l, t) -> @@ -559,12 +591,17 @@ let binary ?loc s = const ?loc Id.(mk (Value Binary) s) let sexpr ?loc l = apply ?loc (const Id.(mk Attr "$data")) l +let par ?loc vars t = + let vars = List.map (fun v -> colon ?loc v (tType ?loc ())) vars in + forall ?loc vars t + + (* {2 Wrappers for tptp} *) let rat ?loc s = const ?loc Id.(mk (Value Rational) s) let distinct = const -let var ?loc id = const ?loc { id with Id.ns = Id.Var } +let var ?loc id = const ?loc { id with Id.ns = Var } let ite ?loc a b c = apply ?loc (ite_t ?loc ()) [a; b; c] @@ -580,7 +617,7 @@ let subtype ?loc a b = apply ?loc (subtype_t ?loc ()) [a; b] (* {2 Wrappers for Zf} *) let quoted ?loc name = - const ?loc Id.({ name; ns = Attr}) + const ?loc (Id.mk Attr name) (* {2 Term traversal} *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/term.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/term.mli index e76f2e3c662fb238c9cc074676116c1f9a91d6ad..2aade5677dc50095771ede7f95e40d7ccea4a1d8 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/term.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/term.mli @@ -75,6 +75,11 @@ type builtin = | Union (** Union type constructor *) + | Pi + (** Pi: higher-order encoding of the forall quantifier as a constant. *) + | Sigma + (** Sigma: higher-order envoding of the exists quantifier of a constant. *) + | Not (** Propositional negation *) | And @@ -146,7 +151,7 @@ type binder = a variable (optionnally typed using the {!Colon} constructor. *) | Arrow (** The arrow binder, for function types. Allows for curified types, if wanted. *) - | Let + | Let_seq (** Let bindings (either propositional or for terms). Term bound by a let can have many forms depending on the language, but usual shapes are: @@ -158,6 +163,14 @@ type binder = and a term/proposition (e.g. in tptp) - a variable and a term juxtaposed using the {!Colon} constructor (e.g. in smtlib) *) + | Let_par + (** Similar to [Let_seq]; except that the list of bindings should be considered all + bound at the same time/level/scope. + More precisely, for [Let_seq], the list of bindings is to be understood + sequentially (i.e. [Let_seq (b1 :: b2 ...)] is semantically the same as + [Let_seq b1 (Let_seq b2 (..))]. For [Let_par], the list of bindings all + happen at the same time: the defining expressions of each binding cannot + refer to other bindings in the same parralel let-binding. *) | Fun (** Lambda, i.e function abstraction binder. Bound terms are the variables bound by the lambda, optionnally typed @@ -203,7 +216,6 @@ val equal : t -> t -> bool val compare : t -> t -> int (** Equality and comparison *) -val pp : Buffer.t -> t -> unit val print : Format.formatter -> t -> unit val print_builtin : Format.formatter -> builtin -> unit (** Printing functionson buffer and formatters. *) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/timer.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/timer.ml new file mode 100644 index 0000000000000000000000000000000000000000..1eb4f2fe23cdcf6663b18134e0c55926511f16df --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/timer.ml @@ -0,0 +1,24 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +(* Simple timer *) +(* ************************************************************************* *) + +type t = { + mutable start : float; (* start of the timer *) +} + +let start () = + (* allocate the recors before calling gettimeofday to + avoid counting the time of the record allocation in the + timer. *) + let t = { start = 0.; } in + t.start <- Unix.gettimeofday (); + t + +let stop t = + let stop = Unix.gettimeofday () in + stop -. t.start + + + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/timer.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/timer.mli new file mode 100644 index 0000000000000000000000000000000000000000..4848a45b6c04a4eb0e80354b9b0998838b48d7ad --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/timer.mli @@ -0,0 +1,15 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +(** {2 Simple timer} *) +(* ************************************************************************* *) + +type t +(** The type of a timer. *) + +val start : unit -> t +(** Start a timer. *) + +val stop : t -> float +(** Stop a timer and return the total time of the timer. *) + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/transformer.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/transformer.ml index 5ca22e28de3f6d18c9593e3e6884843073191ebf..75927f48c79f97484577f1ae83cde7a6cf24bbe7 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/transformer.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/standard/transformer.ml @@ -61,31 +61,34 @@ module Make Parser.MenhirInterpreter.current_state_number env | _ -> assert false (* this cannot happen, I promise *) - let error_message token checkpoint = + + let error_message token checkpoint = let s = state checkpoint in match token with | None -> - (fun fmt -> Format.fprintf fmt "Syntax error@ with@ missing@ token@ read,@ \ - please@ report upstream,@ ^^") + `Regular (fun fmt -> Format.fprintf fmt "Syntax error@ with@ missing@ token@ read,@ \ + please@ report upstream,@ ^^") | Some tok -> let tok_descr = Lexer.descr tok in begin match String.trim (Ty.error s) with | exception Not_found -> - (fun fmt -> Format.fprintf fmt "Missing@ syntax@ error@ message (state %d),@ \ - please@ report@ upstream,@ ^^" s) + `Regular (fun fmt -> Format.fprintf fmt "Missing@ syntax@ error@ message@ \ + (state %d),@ please@ report@ \ + upstream,@ ^^" s) | "<YOUR SYNTAX ERROR MESSAGE HERE>" -> - (fun fmt -> Format.fprintf fmt "Syntax error (state %d)@ while reading %a." - s Tok.print tok_descr) + `Regular (fun fmt -> Format.fprintf fmt "Syntax error (state %d)@ \ + while reading %a." s Tok.print tok_descr) | msg -> begin match Misc.split_on_char '\n' msg with - | error_no :: production :: l -> - let expected = String.concat " " l in - (fun fmt -> Format.fprintf fmt - "@[<v>@[<hv>(%s) while parsing %s,@ read %a,@]@ @[<hov>but expected %a.@]@]" - error_no production Tok.print tok_descr - Format.pp_print_text expected) + | _error_no :: production :: l -> + let prod = fun fmt -> Format.fprintf fmt "%s" production in + let lexed = fun fmt -> Format.fprintf fmt "%a" Tok.print tok_descr in + let expected = + fun fmt -> Format.fprintf fmt "%a" Format.pp_print_text (String.concat " " l) + in + `Advanced (prod, lexed, expected) | _ -> - (fun fmt -> Format.fprintf fmt "Syntax error (state %d)." s) + `Regular (fun fmt -> Format.fprintf fmt "Syntax error (state %d)." s) end end @@ -93,7 +96,7 @@ module Make (* Parsing loop ------------ *) - let parse_aux ~k_exn newline lexbuf checkpoint = + let parse_aux ~k_exn newline sync lexbuf checkpoint = (* Token supplier *) let last_token = ref None in let aux = @@ -106,8 +109,12 @@ module Make res in (* Incremental loop *) - let succeed res = res in + let succeed res = + sync lexbuf; + res + in let fail checkpoint = + sync lexbuf; let pos = Loc.of_lexbuf lexbuf in let msg = error_message !last_token checkpoint in let () = k_exn () in @@ -129,13 +136,14 @@ module Make raise (Loc.Lexing_error (pos, err)) | exception Parser.Error -> let pos = Loc.of_lexbuf lexbuf in - let msg fmt = Format.fprintf fmt "" in + let msg = `Regular (fun fmt -> Format.fprintf fmt "Syntax error") in let () = k_exn () in raise (Loc.Syntax_error (pos, msg)) | exception e -> + let bt = Printexc.get_raw_backtrace () in let pos = Loc.of_lexbuf lexbuf in let () = k_exn () in - raise (Loc.Uncaught (pos, e)) + raise (Loc.Uncaught (pos, e, bt)) end in aux @@ -145,27 +153,47 @@ module Make let parse_file file = let lexbuf, cleanup = Misc.mk_lexbuf (`File file) in - let newline = Loc.newline file in + let locfile = Loc.mk_file file in + let newline = Loc.newline locfile in + let sync = Loc.update_size locfile in let k_exn () = cleanup () in - let aux = parse_aux ~k_exn newline lexbuf Parser.Incremental.file in - let res = aux () in + let res = parse_aux ~k_exn newline sync lexbuf Parser.Incremental.file () in let () = cleanup () in - res + locfile, res + + let parse_file_lazy file = + let lexbuf, cleanup = Misc.mk_lexbuf (`File file) in + let locfile = Loc.mk_file file in + let newline = Loc.newline locfile in + let sync = Loc.update_size locfile in + let k_exn () = cleanup () in + let res = + lazy ( + let res = + parse_aux ~k_exn newline sync lexbuf Parser.Incremental.file () + in + let () = cleanup () in + res + ) + in + locfile, res let parse_input i = let lexbuf, cleanup = Misc.mk_lexbuf i in - let newline = Loc.newline (Misc.filename_of_input i) in + let locfile = Loc.mk_file (Misc.filename_of_input i) in + let newline = Loc.newline locfile in + let sync = Loc.update_size locfile in if not Ty.incremental then begin (* If incremental mode is not supported, raise an error rather than do weird things. *) - let msg fmt = Format.fprintf fmt ": @[<hov>%a@]" + let msg = fun fmt -> Format.fprintf fmt ": @[<hov>%a@]" Format.pp_print_text "Input format does not support incremental parsing" in - raise (Loc.Syntax_error (Loc.of_lexbuf lexbuf, msg)) + raise (Loc.Syntax_error (Loc.of_lexbuf lexbuf, `Regular msg)) end; - let k_exn () = Dolmen_line.consume lexbuf in - let aux = parse_aux ~k_exn newline lexbuf Parser.Incremental.input in - aux, cleanup + let k_exn () = Dolmen_line.consume ~newline ~sync lexbuf in + let aux = parse_aux ~k_exn newline sync lexbuf Parser.Incremental.input in + locfile, aux, cleanup end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arith.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arith.ml index 930690bfc3917bd03fea2ff195ced80398406daa..9b62c41b7b16f19ae1900c7eb8d8ef9c7fe5c820 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arith.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arith.ml @@ -2,6 +2,102 @@ module Id = Dolmen.Std.Id module Term = Dolmen.Std.Term +(* Ae arithmetic *) +(* ************************************************************************ *) +module Ae = struct + + module Tff + (Type : Tff_intf.S) + (Ty : Dolmen.Intf.Ty.Ae_Arith with type t := Type.Ty.t) + (T : Dolmen.Intf.Term.Ae_Arith with type t := Type.T.t + and type ty := Type.Ty.t) = struct + + type _ Type.err += + | Expected_arith_type : Type.Ty.t -> Term.t Type.err + + let dispatch1 env (mk_int, mk_real) ast t = + let ty = T.ty t in + match Ty.view ty with + | `Int -> mk_int t + | `Real -> mk_real t + | _ -> Type._error env (Ast ast) (Expected_arith_type ty) + + let dispatch2 env (mk_int, mk_real) ast a b = + let tya = T.ty a in + match Ty.view tya with + | `Int -> mk_int a b + | `Real -> mk_real a b + | _ -> Type._error env (Ast ast) (Expected_arith_type tya) + + + let parse env s = + match s with + (* Types *) + | Type.Builtin Term.Int -> + `Ty (Base.app0 (module Type) env s Ty.int) + | Type.Builtin Term.Real -> + `Ty (Base.app0 (module Type) env s Ty.real) + + (* Literals *) + | Type.Id { Id.ns = Value Integer; name = Simple name; } -> + `Term (Base.app0 (module Type) env s (T.int name)) + | Type.Id { Id.ns = Value Real; name = Simple name; } -> + `Term (Base.app0 (module Type) env s (T.real name)) + + (* Arithmetic *) + | Type.Builtin Term.Minus -> + `Term (Base.term_app1_ast (module Type) env s + (dispatch1 env (T.Int.minus, T.Real.minus))) + | Type.Builtin Term.Add -> + `Term (Base.term_app2_ast (module Type) env s + (dispatch2 env (T.Int.add, T.Real.add))) + | Type.Builtin Term.Sub -> + `Term (Base.term_app2_ast (module Type) env s + (dispatch2 env (T.Int.sub, T.Real.sub))) + | Type.Builtin Term.Mult -> + `Term (Base.term_app2_ast (module Type) env s + (dispatch2 env (T.Int.mul, T.Real.mul))) + | Type.Builtin Term.Div -> + `Term (Base.term_app2_ast (module Type) env s + (dispatch2 env (T.Int.div_e, T.Real.div))) + | Type.Builtin Term.Mod -> + `Term (Base.term_app2 (module Type) env s T.Int.rem_e) + | Type.Builtin Term.Int_pow -> + `Term (Base.term_app2 (module Type) env s T.Int.pow) + | Type.Builtin Term.Real_pow -> + `Term (fun ast args -> + Base.term_app2 (module Type) env s + (fun a b -> + let tya = T.ty a in + let a', b' = + match Ty.view tya, Ty.view (T.ty b) with + | `Real, `Real -> a, b + | `Real, `Int -> a, T.Int.to_real b + | `Int, `Real -> T.Int.to_real a, b + | `Int, `Int -> T.Int.to_real a, T.Int.to_real b + | _ -> Type._error env (Ast ast) (Expected_arith_type tya) + in + T.Real.pow a' b') ast args + ) + | Type.Builtin Term.Lt -> + `Term (Base.term_app2_ast (module Type) env s + (dispatch2 env (T.Int.lt, T.Real.lt))) + | Type.Builtin Term.Leq -> + `Term (Base.term_app2_ast (module Type) env s + (dispatch2 env (T.Int.le, T.Real.le))) + | Type.Builtin Term.Gt -> + `Term (Base.term_app2_ast (module Type) env s + (dispatch2 env (T.Int.gt, T.Real.gt))) + | Type.Builtin Term.Geq -> + `Term (Base.term_app2_ast (module Type) env s + (dispatch2 env (T.Int.ge, T.Real.ge))) + + (* Catch-all *) + | _ -> `Not_found + + end +end + (* Smtlib arithmetic (integer and reals) *) (* ************************************************************************ *) @@ -92,19 +188,19 @@ module Smtlib2 = struct let rec view ~parse version env (t : Term.t) = match t.term with - | Symbol { Id.ns = Id.Value Id.Integer; name } -> Numeral name - | Symbol { Id.ns = Id.Value Id.Real; name } -> Decimal name + | Symbol { ns = Value Integer; name = Simple name } -> Numeral name + | Symbol { ns = Value Real; name = Simple name } -> Decimal name - | App ({ term = Symbol { Id.ns = Id.Term; name = "-"; }; _ }, [e]) + | App ({ term = Symbol { Id.ns = Term; name = Simple "-"; }; _ }, [e]) -> Negation e - | App ({ term = Symbol { Id.ns = Id.Term; name = "+"; }; _ }, ((_ :: _) as args)) + | App ({ term = Symbol { Id.ns = Term; name = Simple "+"; }; _ }, ((_ :: _) as args)) -> Addition args - | App ({ term = Symbol { Id.ns = Id.Term; name = "-"; }; _ }, ((_ :: _) as args)) + | App ({ term = Symbol { Id.ns = Term; name = Simple "-"; }; _ }, ((_ :: _) as args)) -> Subtraction args - | App ({ term = Symbol { Id.ns = Id.Term; name = "/"; }; _ }, [a; b]) + | App ({ term = Symbol { Id.ns = Term; name = Simple "/"; }; _ }, [a; b]) -> Division (a, b) | Symbol id -> view_id ~parse version env id [] @@ -126,7 +222,7 @@ module Smtlib2 = struct the Type.find_bound function. *) and view_id ~parse version env id args = match Type.find_var env id with - | `Letin (e, _, _) -> view ~parse version env e + | `Letin (env, e, _, _) -> view ~parse version env e | #Type.var -> begin match args with | [] -> Variable id @@ -549,13 +645,13 @@ module Smtlib2 = struct let rec parse ~arith version env s = match s with (* type *) - | Type.Id { Id.ns = Id.Sort; name = "Int"; } -> - `Ty (Base.app0 (module Type) env "Int" Ty.int) + | Type.Id { Id.ns = Sort; name = Simple "Int"; } -> + `Ty (Base.app0 (module Type) env s Ty.int) (* values *) - | Type.Id { Id.ns = Id.Value Id.Integer; name; } -> - `Term (Base.app0 (module Type) env name (T.mk name)) + | Type.Id { Id.ns = Value Integer; name = Simple name; } -> + `Term (Base.app0 (module Type) env s (T.mk name)) (* terms *) - | Type.Id ({ Id.ns = Id.Term; name; } as id) -> + | Type.Id { Id.ns = Term; name = Simple name; } -> begin match name with | "-" -> `Term (fun ast args -> match args with @@ -563,47 +659,49 @@ module Smtlib2 = struct check env (F.minus arith (parse ~arith) version env) ast args; T.minus (Type.parse_term env x) | _ -> - Base.term_app_left (module Type) env "-" T.sub ast args + Base.term_app_left (module Type) env s T.sub ast args ~check:(check env (F.sub arith (parse ~arith) version env)) ) | "+" -> - `Term (Base.term_app_left (module Type) env "+" T.add + `Term (Base.term_app_left (module Type) env s T.add ~check:(check env (F.add arith (parse ~arith) version env))) | "*" -> - `Term (Base.term_app_left (module Type) env "*" T.mul + `Term (Base.term_app_left (module Type) env s T.mul ~check:(check env (F.mul arith (parse ~arith) version env))) | "div" -> - `Term (Base.term_app_left (module Type) env "div" T.div + `Term (Base.term_app_left (module Type) env s T.div ~check:(check env (F.ediv arith))) | "mod" -> - `Term (Base.term_app2 (module Type) env "mod" T.rem + `Term (Base.term_app2 (module Type) env s T.rem ~check:(check2 env (F.mod_ arith))) | "abs" -> - `Term (Base.term_app1 (module Type) env "abs" T.abs + `Term (Base.term_app1 (module Type) env s T.abs ~check:(check1 env (F.abs arith))) | "<=" -> - `Term (Base.term_app_chain (module Type) env "<=" T.le + `Term (Base.term_app_chain (module Type) env s T.le ~check:(check env (F.comp arith (parse ~arith) version env))) | "<" -> - `Term (Base.term_app_chain (module Type) env "<" T.lt + `Term (Base.term_app_chain (module Type) env s T.lt ~check:(check env (F.comp arith (parse ~arith) version env))) | ">=" -> - `Term (Base.term_app_chain (module Type) env ">=" T.ge + `Term (Base.term_app_chain (module Type) env s T.ge ~check:(check env (F.comp arith (parse ~arith) version env))) | ">" -> - `Term (Base.term_app_chain (module Type) env ">" T.gt + `Term (Base.term_app_chain (module Type) env s T.gt ~check:(check env (F.comp arith (parse ~arith) version env))) - | _ -> Base.parse_id id - ~k:(function _ -> `Not_found) - ~err:(Base.bad_ty_index_arity (module Type) env) - [ - "divisible", `Unary (function n -> - `Term (Base.term_app1 (module Type) env - "divisible" (T.divisible n) - ~check:(check1 env (F.divisible arith))) - ); - ] + | _ -> `Not_found end + | Type.Id { Id.ns = Term; name = Indexed { basename; indexes; }; } -> + Base.parse_indexed basename indexes + ~k:(function () -> `Not_found) + ~err:(Base.bad_ty_index_arity (module Type) env) (function + | "divisible" -> + `Unary (function n -> + `Term (Base.term_app1 (module Type) env s (T.divisible n) + ~check:(check1 env (F.divisible arith))) + ) + | _ -> `Not_indexed + ) | _ -> `Not_found end @@ -633,13 +731,13 @@ module Smtlib2 = struct let rec parse ~arith version env s = match s with (* type *) - | Type.Id { Id.ns = Id.Sort; name = "Real"; } -> - `Ty (Base.app0 (module Type) env "Real" Ty.real) + | Type.Id { Id.ns = Sort; name = Simple "Real"; } -> + `Ty (Base.app0 (module Type) env s Ty.real) (* values *) - | Type.Id { Id.ns = Id.Value (Id.Integer | Id.Real); name; } -> - `Term (Base.app0 (module Type) env name (T.mk name)) + | Type.Id { Id.ns = Value (Integer | Real); name = Simple name; } -> + `Term (Base.app0 (module Type) env s (T.mk name)) (* terms *) - | Type.Id { Id.ns = Id.Term; name; } -> + | Type.Id { Id.ns = Term; name = Simple name; } -> begin match name with | "-" -> `Term (fun ast args -> match args with @@ -647,29 +745,29 @@ module Smtlib2 = struct check env (F.minus arith (parse ~arith) version env) ast args; T.minus (Type.parse_term env x) | _ -> - Base.term_app_left (module Type) env "-" T.sub ast args + Base.term_app_left (module Type) env s T.sub ast args ~check:(check env (F.sub arith (parse ~arith) version env)) ) | "+" -> - `Term (Base.term_app_left (module Type) env "+" T.add + `Term (Base.term_app_left (module Type) env s T.add ~check:(check env (F.add arith (parse ~arith) version env))) | "*" -> - `Term (Base.term_app_left (module Type) env "*" T.mul + `Term (Base.term_app_left (module Type) env s T.mul ~check:(check env (F.mul arith (parse ~arith) version env))) | "/" -> - `Term (Base.term_app_left (module Type) env "/" T.div + `Term (Base.term_app_left (module Type) env s T.div ~check:(check env (F.div arith (parse ~arith) version env))) | "<=" -> - `Term (Base.term_app_chain (module Type) env "<=" T.le + `Term (Base.term_app_chain (module Type) env s T.le ~check:(check env (F.comp arith (parse ~arith) version env))) | "<" -> - `Term (Base.term_app_chain (module Type) env "<" T.lt + `Term (Base.term_app_chain (module Type) env s T.lt ~check:(check env (F.comp arith (parse ~arith) version env))) | ">=" -> - `Term (Base.term_app_chain (module Type) env ">=" T.ge + `Term (Base.term_app_chain (module Type) env s T.ge ~check:(check env (F.comp arith (parse ~arith) version env))) | ">" -> - `Term (Base.term_app_chain (module Type) env ">" T.gt + `Term (Base.term_app_chain (module Type) env s T.gt ~check:(check env (F.comp arith (parse ~arith) version env))) | _ -> `Not_found end @@ -729,85 +827,88 @@ module Smtlib2 = struct match s with (* type *) - | Type.Id { Id.ns = Id.Sort; name = "Int"; } -> - `Ty (Base.app0 (module Type) env "Int" Ty.int) - | Type.Id { Id.ns = Id.Sort; name = "Real"; } -> - `Ty (Base.app0 (module Type) env "Real" Ty.real) + | Type.Id { Id.ns = Sort; name = Simple "Int"; } -> + `Ty (Base.app0 (module Type) env s Ty.int) + | Type.Id { Id.ns = Sort; name = Simple "Real"; } -> + `Ty (Base.app0 (module Type) env s Ty.real) (* values *) - | Type.Id { Id.ns = Id.Value Id.Integer; name; } -> - `Term (Base.app0 (module Type) env name (T.Int.mk name)) - | Type.Id { Id.ns = Id.Value Id.Real; name; } -> - `Term (Base.app0 (module Type) env name (T.Real.mk name)) + | Type.Id { Id.ns = Value Integer; name = Simple name; } -> + `Term (Base.app0 (module Type) env s (T.Int.mk name)) + | Type.Id { Id.ns = Value Real; name = Simple name; } -> + `Term (Base.app0 (module Type) env s (T.Real.mk name)) (* terms *) - | Type.Id ({ Id.ns = Id.Term; name; } as id) -> + | Type.Id { Id.ns = Term; name = Simple name; } -> begin match name with | "-" -> `Term (fun ast args -> match args with | [_] -> - Base.term_app1_ast (module Type) env "-" + Base.term_app1_ast (module Type) env s (dispatch1 env (T.Int.minus, T.Real.minus)) ast args ~check:(check1 env (F.minus arith (parse ~arith) version env)) | _ -> - Base.term_app_left_ast (module Type) env "-" + Base.term_app_left_ast (module Type) env s (dispatch2 env (T.Int.sub, T.Real.sub)) ast args ~check:(check env (F.sub arith (parse ~arith) version env)) ) | "+" -> - `Term (Base.term_app_left_ast (module Type) env "+" + `Term (Base.term_app_left_ast (module Type) env s (dispatch2 env (T.Int.add, T.Real.add)) ~check:(check env (F.add arith (parse ~arith) version env))) | "*" -> - `Term (Base.term_app_left_ast (module Type) env "*" + `Term (Base.term_app_left_ast (module Type) env s (dispatch2 env (T.Int.mul, T.Real.mul)) ~check:(check env (F.mul arith (parse ~arith) version env))) | "div" -> - `Term (Base.term_app_left (module Type) env "div" T.Int.div + `Term (Base.term_app_left (module Type) env s T.Int.div ~check:(check env (F.ediv arith))) | "mod" -> - `Term (Base.term_app2 (module Type) env "mod" T.Int.rem + `Term (Base.term_app2 (module Type) env s T.Int.rem ~check:(check2 env (F.mod_ arith))) | "abs" -> - `Term (Base.term_app1 (module Type) env "abs" T.Int.abs + `Term (Base.term_app1 (module Type) env s T.Int.abs ~check:(check1 env (F.abs arith))) | "/" -> - `Term (Base.term_app_left_ast (module Type) env "/" + `Term (Base.term_app_left_ast (module Type) env s (promote_int_to_real env T.Real.div) ~check:(check env (F.div arith (parse ~arith) version env))) | "<=" -> - `Term (Base.term_app_chain_ast (module Type) env "<=" + `Term (Base.term_app_chain_ast (module Type) env s (dispatch2 env (T.Int.le, T.Real.le)) ~check:(check env (F.comp arith (parse ~arith) version env))) | "<" -> - `Term (Base.term_app_chain_ast (module Type) env "<" + `Term (Base.term_app_chain_ast (module Type) env s (dispatch2 env (T.Int.lt, T.Real.lt)) ~check:(check env (F.comp arith (parse ~arith) version env))) | ">=" -> - `Term (Base.term_app_chain_ast (module Type) env ">=" + `Term (Base.term_app_chain_ast (module Type) env s (dispatch2 env (T.Int.ge, T.Real.ge)) ~check:(check env (F.comp arith (parse ~arith) version env))) | ">" -> - `Term (Base.term_app_chain_ast (module Type) env ">" + `Term (Base.term_app_chain_ast (module Type) env s (dispatch2 env (T.Int.gt, T.Real.gt)) ~check:(check env (F.comp arith (parse ~arith) version env))) | "to_real" -> - `Term (Base.term_app1 (module Type) env "to_real" T.Int.to_real) + `Term (Base.term_app1 (module Type) env s T.Int.to_real) | "to_int" -> - `Term (Base.term_app1 (module Type) env "to_int" T.Real.to_int) + `Term (Base.term_app1 (module Type) env s T.Real.floor_to_int) | "is_int" -> - `Term (Base.term_app1 (module Type) env "is_int" T.Real.is_int) - - | _ -> Base.parse_id id - ~k:(function _ -> `Not_found) - ~err:(Base.bad_ty_index_arity (module Type) env) - [ - "divisible", `Unary (function n -> - `Term (Base.term_app1 (module Type) env - "divisible" (T.Int.divisible n) - ~check:(check1 env (F.divisible arith)))); - ] + `Term (Base.term_app1 (module Type) env s T.Real.is_int) + + | _ -> `Not_found end + | Type.Id { Id.ns = Term; name = Indexed { basename; indexes; }; } -> + Base.parse_indexed basename indexes + ~k:(function () -> `Not_found) + ~err:(Base.bad_ty_index_arity (module Type) env) (function + | "divisible" -> + `Unary (function n -> + `Term (Base.term_app1 (module Type) env s (T.Int.divisible n) + ~check:(check1 env (F.divisible arith)))) + | _ -> `Not_indexed + ) + | _ -> `Not_found end @@ -824,7 +925,7 @@ module Tptp = struct module Tff (Type : Tff_intf.S) (Ty : Dolmen.Intf.Ty.Tptp_Arith with type t := Type.Ty.t) - (T : Dolmen.Intf.Term.Tptp_Arith with type t := Type.T.t + (T : Dolmen.Intf.Term.Tptp_Tff_Arith with type t := Type.T.t and type ty := Type.Ty.t) = struct type _ Type.err += @@ -856,94 +957,94 @@ module Tptp = struct match s with (* type *) - | Type.Id { Id.ns = Id.Term; name = "$int"; } -> - `Ty (Base.app0 (module Type) env "$int" Ty.int) - | Type.Id { Id.ns = Id.Term; name = "$rat"; } -> - `Ty (Base.app0 (module Type) env "$rat" Ty.rat) - | Type.Id { Id.ns = Id.Term; name = "$real"; } -> - `Ty (Base.app0 (module Type) env "$real" Ty.real) + | Type.Id { Id.ns = Term; name = Simple "$int"; } -> + `Ty (Base.app0 (module Type) env s Ty.int) + | Type.Id { Id.ns = Term; name = Simple "$rat"; } -> + `Ty (Base.app0 (module Type) env s Ty.rat) + | Type.Id { Id.ns = Term; name = Simple "$real"; } -> + `Ty (Base.app0 (module Type) env s Ty.real) (* Literals *) - | Type.Id { Id.ns = Id.Value Id.Integer; name; } -> - `Term (Base.app0 (module Type) env name (T.int name)) - | Type.Id { Id.ns = Id.Value Id.Rational; name; } -> - `Term (Base.app0 (module Type) env name (T.rat name)) - | Type.Id { Id.ns = Id.Value Id.Real; name; } -> - `Term (Base.app0 (module Type) env name (T.real name)) + | Type.Id { Id.ns = Value Integer; name = Simple name; } -> + `Term (Base.app0 (module Type) env s (T.int name)) + | Type.Id { Id.ns = Value Rational; name = Simple name; } -> + `Term (Base.app0 (module Type) env s (T.rat name)) + | Type.Id { Id.ns = Value Real; name = Simple name; } -> + `Term (Base.app0 (module Type) env s (T.real name)) (* terms *) - | Type.Id { Id.ns = Id.Term; name = "$less"; } -> - `Term (Base.term_app2_ast (module Type) env "$less" + | Type.Id { Id.ns = Term; name = Simple "$less"; } -> + `Term (Base.term_app2_ast (module Type) env s (dispatch2 env (T.Int.lt, T.Rat.lt, T.Real.lt))) - | Type.Id { Id.ns = Id.Term; name = "$lesseq"; } -> - `Term (Base.term_app2_ast (module Type) env "$lesseq" + | Type.Id { Id.ns = Term; name = Simple "$lesseq"; } -> + `Term (Base.term_app2_ast (module Type) env s (dispatch2 env (T.Int.le, T.Rat.le, T.Real.le))) - | Type.Id { Id.ns = Id.Term; name = "$greater"; } -> - `Term (Base.term_app2_ast (module Type) env "$greater" + | Type.Id { Id.ns = Term; name = Simple "$greater"; } -> + `Term (Base.term_app2_ast (module Type) env s (dispatch2 env (T.Int.gt, T.Rat.gt, T.Real.gt))) - | Type.Id { Id.ns = Id.Term; name = "$greatereq"; } -> - `Term (Base.term_app2_ast (module Type) env "$greatereq" + | Type.Id { Id.ns = Term; name = Simple "$greatereq"; } -> + `Term (Base.term_app2_ast (module Type) env s (dispatch2 env (T.Int.ge, T.Rat.ge, T.Real.ge))) - | Type.Id { Id.ns = Id.Term; name = "$uminus"; } -> - `Term (Base.term_app1_ast (module Type) env "$uminus" + | Type.Id { Id.ns = Term; name = Simple "$uminus"; } -> + `Term (Base.term_app1_ast (module Type) env s (dispatch1 env (T.Int.minus, T.Rat.minus, T.Real.minus))) - | Type.Id { Id.ns = Id.Term; name = "$sum"; } -> - `Term (Base.term_app2_ast (module Type) env "$sum" + | Type.Id { Id.ns = Term; name = Simple "$sum"; } -> + `Term (Base.term_app2_ast (module Type) env s (dispatch2 env (T.Int.add, T.Rat.add, T.Real.add))) - | Type.Id { Id.ns = Id.Term; name = "$difference"; } -> - `Term (Base.term_app2_ast (module Type) env "$difference" + | Type.Id { Id.ns = Term; name = Simple "$difference"; } -> + `Term (Base.term_app2_ast (module Type) env s (dispatch2 env (T.Int.sub, T.Rat.sub, T.Real.sub))) - | Type.Id { Id.ns = Id.Term; name = "$product"; } -> - `Term (Base.term_app2_ast (module Type) env "$product" + | Type.Id { Id.ns = Term; name = Simple "$product"; } -> + `Term (Base.term_app2_ast (module Type) env s (dispatch2 env (T.Int.mul, T.Rat.mul, T.Real.mul))) - | Type.Id { Id.ns = Id.Term; name = "$quotient"; } -> - `Term (Base.term_app2_ast (module Type) env "$quotient" (fun ast a b -> + | Type.Id { Id.ns = Term; name = Simple "$quotient"; } -> + `Term (Base.term_app2_ast (module Type) env s (fun ast a b -> (dispatch2 env (_invalid env ast Ty.int, T.Rat.div, T.Real.div)) ast a b )) - | Type.Id { Id.ns = Id.Term; name = "$quotient_e"; } -> - `Term (Base.term_app2_ast (module Type) env "$quotient_e" + | Type.Id { Id.ns = Term; name = Simple "$quotient_e"; } -> + `Term (Base.term_app2_ast (module Type) env s (dispatch2 env (T.Int.div_e, T.Rat.div_e, T.Real.div_e))) - | Type.Id { Id.ns = Id.Term; name = "$remainder_e"; } -> - `Term (Base.term_app2_ast (module Type) env "$remainder_e" + | Type.Id { Id.ns = Term; name = Simple "$remainder_e"; } -> + `Term (Base.term_app2_ast (module Type) env s (dispatch2 env (T.Int.rem_e, T.Rat.rem_e, T.Real.rem_e))) - | Type.Id { Id.ns = Id.Term; name = "$quotient_t"; } -> - `Term (Base.term_app2_ast (module Type) env "$quotient_t" + | Type.Id { Id.ns = Term; name = Simple "$quotient_t"; } -> + `Term (Base.term_app2_ast (module Type) env s (dispatch2 env (T.Int.div_t, T.Rat.div_t, T.Real.div_t))) - | Type.Id { Id.ns = Id.Term; name = "$remainder_t"; } -> - `Term (Base.term_app2_ast (module Type) env "$remainder_t" + | Type.Id { Id.ns = Term; name = Simple "$remainder_t"; } -> + `Term (Base.term_app2_ast (module Type) env s (dispatch2 env (T.Int.rem_t, T.Rat.rem_t, T.Real.rem_t))) - | Type.Id { Id.ns = Id.Term; name = "$quotient_f"; } -> - `Term (Base.term_app2_ast (module Type) env "$quotient_f" + | Type.Id { Id.ns = Term; name = Simple "$quotient_f"; } -> + `Term (Base.term_app2_ast (module Type) env s (dispatch2 env (T.Int.div_f, T.Rat.div_f, T.Real.div_f))) - | Type.Id { Id.ns = Id.Term; name = "$remainder_f"; } -> - `Term (Base.term_app2_ast (module Type) env "$remainder_f" + | Type.Id { Id.ns = Term; name = Simple "$remainder_f"; } -> + `Term (Base.term_app2_ast (module Type) env s (dispatch2 env (T.Int.rem_f, T.Rat.rem_f, T.Real.rem_f))) - | Type.Id { Id.ns = Id.Term; name = "$floor"; } -> - `Term (Base.term_app1_ast (module Type) env "$floor" + | Type.Id { Id.ns = Term; name = Simple "$floor"; } -> + `Term (Base.term_app1_ast (module Type) env s (dispatch1 env (T.Int.floor, T.Rat.floor, T.Real.floor))) - | Type.Id { Id.ns = Id.Term; name = "$ceiling"; } -> - `Term (Base.term_app1_ast (module Type) env "$ceiling" + | Type.Id { Id.ns = Term; name = Simple "$ceiling"; } -> + `Term (Base.term_app1_ast (module Type) env s (dispatch1 env (T.Int.ceiling, T.Rat.ceiling, T.Real.ceiling))) - | Type.Id { Id.ns = Id.Term; name = "$truncate"; } -> - `Term (Base.term_app1_ast (module Type) env "$truncate" + | Type.Id { Id.ns = Term; name = Simple "$truncate"; } -> + `Term (Base.term_app1_ast (module Type) env s (dispatch1 env (T.Int.truncate, T.Rat.truncate, T.Real.truncate))) - | Type.Id { Id.ns = Id.Term; name = "$round"; } -> - `Term (Base.term_app1_ast (module Type) env "$round" + | Type.Id { Id.ns = Term; name = Simple "$round"; } -> + `Term (Base.term_app1_ast (module Type) env s (dispatch1 env (T.Int.round, T.Rat.round, T.Real.round))) - | Type.Id { Id.ns = Id.Term; name = "$is_int"; } -> - `Term (Base.term_app1_ast (module Type) env "$is_int" + | Type.Id { Id.ns = Term; name = Simple "$is_int"; } -> + `Term (Base.term_app1_ast (module Type) env s (dispatch1 env (T.Int.is_int, T.Rat.is_int, T.Real.is_int))) - | Type.Id { Id.ns = Id.Term; name = "$is_rat"; } -> - `Term (Base.term_app1_ast (module Type) env "$is_rat" + | Type.Id { Id.ns = Term; name = Simple "$is_rat"; } -> + `Term (Base.term_app1_ast (module Type) env s (dispatch1 env (T.Int.is_rat, T.Rat.is_rat, T.Real.is_rat))) - | Type.Id { Id.ns = Id.Term; name = "$to_int"; } -> - `Term (Base.term_app1_ast (module Type) env "$to_int" + | Type.Id { Id.ns = Term; name = Simple "$to_int"; } -> + `Term (Base.term_app1_ast (module Type) env s (dispatch1 env (T.Int.to_int, T.Rat.to_int, T.Real.to_int))) - | Type.Id { Id.ns = Id.Term; name = "$to_rat"; } -> - `Term (Base.term_app1_ast (module Type) env "$to_rat" + | Type.Id { Id.ns = Term; name = Simple "$to_rat"; } -> + `Term (Base.term_app1_ast (module Type) env s (dispatch1 env (T.Int.to_rat, T.Rat.to_rat, T.Real.to_rat))) - | Type.Id { Id.ns = Id.Term; name = "$to_real"; } -> - `Term (Base.term_app1_ast (module Type) env "$to_real" + | Type.Id { Id.ns = Term; name = Simple "$to_real"; } -> + `Term (Base.term_app1_ast (module Type) env s (dispatch1 env (T.Int.to_real, T.Rat.to_real, T.Real.to_real))) (* Catch-all *) @@ -953,18 +1054,47 @@ module Tptp = struct end -(* Ae arithmetic *) +(* Zf arithmetic *) (* ************************************************************************ *) -module Ae = struct +module Zf = struct - module Tff - (Type : Tff_intf.S) - (Ty : Dolmen.Intf.Ty.Ae_Arith with type t := Type.Ty.t) - (T : Dolmen.Intf.Term.Ae_Arith with type t := Type.T.t - and type ty := Type.Ty.t) = struct + module Thf + (Type : Thf_intf.S) + (Ty : Dolmen.Intf.Ty.Zf_Arith with type t := Type.Ty.t) + (T : Dolmen.Intf.Term.Zf_Arith with type t := Type.T.t) = struct + let parse env s = + match s with + (* Types *) + | Type.Builtin Term.Int -> + `Ty (Base.app0 (module Type) env s Ty.int) + (* Literals *) + | Type.Id { Id.ns = Value Integer; name = Simple name; } -> + `Term (Base.app0 (module Type) env s (T.int name)) + + (* Arithmetic *) + | Type.Builtin Term.Minus -> + `Term (Base.term_app1 (module Type) env s T.Int.minus) + | Type.Builtin Term.Add -> + `Term (Base.term_app2 (module Type) env s T.Int.add) + | Type.Builtin Term.Sub -> + `Term (Base.term_app2 (module Type) env s T.Int.sub) + | Type.Builtin Term.Mult -> + `Term (Base.term_app2 (module Type) env s T.Int.mul) + | Type.Builtin Term.Lt -> + `Term (Base.term_app2 (module Type) env s T.Int.lt) + | Type.Builtin Term.Leq -> + `Term (Base.term_app2 (module Type) env s T.Int.le) + | Type.Builtin Term.Gt -> + `Term (Base.term_app2 (module Type) env s T.Int.gt) + | Type.Builtin Term.Geq -> + `Term (Base.term_app2 (module Type) env s T.Int.ge) + + (* Catch-all *) + | _ -> `Not_found end end + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arith.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arith.mli index 2c0d74396dff4648415bc8cf2d20df8bab048945..1118727ea604b62257c8cc2be95ad9c5f3b6d217 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arith.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arith.mli @@ -1,4 +1,24 @@ +(** AE Integer Arithmetic *) +module Ae: sig + + module Tff + (Type : Tff_intf.S) + (Ty : Dolmen.Intf.Ty.Ae_Arith with type t := Type.Ty.t) + (T : Dolmen.Intf.Term.Ae_Arith with type t := Type.T.t + and type ty := Type.Ty.t) : sig + + type _ Type.err += + | Expected_arith_type : Type.Ty.t -> Dolmen.Std.Term.t Type.err + (** Error raised when an arithmetic type was expected (i.e. either + int or real), but another type was found. *) + (** Additional errors specific to arithmetic typing. *) + + val parse : Type.builtin_symbols + end + +end + (** Smtlib Integer and Real Arithmetic *) module Smtlib2 : sig @@ -97,7 +117,7 @@ module Tptp : sig module Tff (Type : Tff_intf.S) (Ty : Dolmen.Intf.Ty.Tptp_Arith with type t := Type.Ty.t) - (T : Dolmen.Intf.Term.Tptp_Arith with type t := Type.T.t + (T : Dolmen.Intf.Term.Tptp_Tff_Arith with type t := Type.T.t and type ty := Type.Ty.t) : sig type _ Type.err += @@ -114,3 +134,16 @@ module Tptp : sig end end + +(** Zf Arithmetic *) +module Zf : sig + + module Thf + (Type : Thf_intf.S) + (Ty : Dolmen.Intf.Ty.Zf_Arith with type t := Type.Ty.t) + (T : Dolmen.Intf.Term.Zf_Arith with type t := Type.T.t) : sig + + val parse : Type.builtin_symbols + end + +end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arrays.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arrays.ml index 76ce396a8fe5406639682c170180c7b9b08637fc..88d36e1c4d7528c5c669bf0fbf1c632d248ac8e5 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arrays.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arrays.ml @@ -1,6 +1,41 @@ module Id = Dolmen.Std.Id +(* Ae arrays *) +(* ************************************************************************ *) + +module Ae = struct + + module Tff + (Type : Tff_intf.S) + (Ty : Dolmen.Intf.Ty.Ae_Array with type t := Type.Ty.t) + (T : Dolmen.Intf.Term.Ae_Array with type t := Type.T.t) = struct + + type _ Type.err += + | Bad_farray_arity : Dolmen.Std.Term.t Type.err + + let parse env s = + match s with + | Type.Id { name = Simple "farray"; ns = Term } -> + `Ty ( + fun ast args -> + match args with + | [ty] -> + Ty.array Ty.int (Type.parse_ty env ty) + | [ity; vty] -> + Ty.array (Type.parse_ty env ity) (Type.parse_ty env vty) + | _ -> Type._error env (Ast ast) Bad_farray_arity + ) + | Type.Builtin Array_get -> + `Term (Base.term_app2 (module Type) env s T.select) + | Type.Builtin Array_set -> + `Term (Base.term_app3 (module Type) env s T.store) + | _ -> `Not_found + end + +end + + (* Smtlib arrays *) (* ************************************************************************ *) @@ -63,12 +98,12 @@ module Smtlib2 = struct let parse ~arrays _version env s = match s with - | Type.Id { Id.name = "Array"; ns = Id.Sort } -> - `Ty (Base.ty_app2_ast (module Type) env "Array" (mk_array_ty env arrays)) - | Type.Id { Id.name = "select"; ns = Id.Term } -> - `Term (Base.term_app2 (module Type) env "select" T.select) - | Type.Id { Id.name = "store"; ns = Id.Term } -> - `Term (Base.term_app3 (module Type) env "select" T.store) + | Type.Id { name = Simple "Array"; ns = Sort } -> + `Ty (Base.ty_app2_ast (module Type) env s (mk_array_ty env arrays)) + | Type.Id { name = Simple "select"; ns = Term } -> + `Term (Base.term_app2 (module Type) env s T.select) + | Type.Id { name = Simple "store"; ns = Term } -> + `Term (Base.term_app3 (module Type) env s T.store) | _ -> `Not_found end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arrays.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arrays.mli index c1da27eac915a1175cc80e949b0f7d6a21a38628..2e3ac73ee0c30e34330acf216e92181bf31b96bc 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arrays.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/arrays.mli @@ -1,4 +1,24 @@ +(** Ae array builtins *) +module Ae : sig + + module Tff + (Type : Tff_intf.S) + (Ty : Dolmen.Intf.Ty.Ae_Array with type t := Type.Ty.t) + (T : Dolmen.Intf.Term.Ae_Array with type t := Type.T.t) : sig + + type _ Type.err += + | Bad_farray_arity : Dolmen.Std.Term.t Type.err + (** Raised when an array is parametrized + with other than one or two parameters. *) + (** Errors for array type-checking. *) + + val parse : Type.builtin_symbols + end + +end + + (** Smtlib array builtins *) module Smtlib2 : sig diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/base.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/base.ml index 8edd31cddbea359ae9bfb2fa7d3e95162258769f..a3f42e0a5945375409ee18d1cb4996f2882d7eb7 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/base.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/base.ml @@ -17,37 +17,34 @@ let rec merge l env s = (* ************************************************************************ *) type 'ret indexed = [ + | `Not_indexed | `Unary of (string -> 'ret) | `Binary of (string -> string -> 'ret) | `Ternary of (string -> string -> string -> 'ret) | `Nary of int * (string list -> 'ret) ] -let parse_id id l ~err ~k = - let rec aux h r r_l = function - | [] -> k (h :: r) - | (s, `Unary f) :: _ when String.equal s h -> - begin match r with - | [x] -> f x - | _ -> err s 1 r_l - end - | (s, `Binary f) :: _ when String.equal s h -> - begin match r with - | [x; y] -> f x y - | _ -> err s 2 r_l - end - | (s, `Ternary f) :: _ when String.equal s h -> - begin match r with - | [x; y; z] -> f x y z - | _ -> err s 3 r_l - end - | (s, `Nary (n, f)) :: _ when String.equal s h -> - if r_l = n then f r else err s n r_l - | _ :: l' -> aux h r r_l l' - in - match Dolmen.Std.Id.split id with - | h :: r -> aux h r (List.length r) l - | r -> k r +let parse_indexed h r f ~err ~k = + let r_l = List.length r in + match f h with + | `Not_indexed -> k () + | `Unary f -> + begin match r with + | [x] -> f x + | _ -> err h 1 r_l + end + | `Binary f -> + begin match r with + | [x; y] -> f x y + | _ -> err h 2 r_l + end + | `Ternary f -> + begin match r with + | [x; y; z] -> f x y z + | _ -> err h 3 r_l + end + | `Nary (n, f') -> + if n = r_l then f' r else err h n r_l let bad_ty_index_arity (type env ty) (module Type: Tff_intf.S with type env = env and type Ty.t = ty) @@ -71,7 +68,7 @@ let bad_term_index_arity (type env term) type ('env, 'args, 'ret) helper = (module Tff_intf.S with type env = 'env) -> - 'env -> string -> (Dolmen.Std.Term.t -> 'args -> 'ret) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'args -> 'ret) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'ret) let make_op0 @@ -169,13 +166,13 @@ let map_chain let app0 (type env) (module Type : Tff_intf.S with type env = env) - ?(check=(fun _ -> ())) env name ret = - make_op0 (module Type) env name (fun ast () -> check ast; ret) + ?(check=(fun _ -> ())) env symbol ret = + make_op0 (module Type) env symbol (fun ast () -> check ast; ret) let app0_ast (type env) (module Type : Tff_intf.S with type env = env) - ?(check=(fun _ -> ())) env name mk = - make_op0 (module Type) env name (fun ast () -> check ast; mk ast) + ?(check=(fun _ -> ())) env symbol mk = + make_op0 (module Type) env symbol (fun ast () -> check ast; mk ast) (* Unary applications *) @@ -183,29 +180,29 @@ let app0_ast let ty_app1 (type env) (type ty) (module Type : Tff_intf.S with type env = env and type Ty.t = ty) - ?(check=(fun _ _ -> ())) env name mk = - make_op1 (module Type) env name + ?(check=(fun _ _ -> ())) env symbol mk = + make_op1 (module Type) env symbol (fun ast t -> check ast t; mk (Type.parse_ty env t)) let ty_app1_ast (type env) (type ty) (module Type : Tff_intf.S with type env = env and type Ty.t = ty) - ?(check=(fun _ _ -> ())) env name mk = - make_op1 (module Type) env name + ?(check=(fun _ _ -> ())) env symbol mk = + make_op1 (module Type) env symbol (fun ast t -> check ast t; mk ast (Type.parse_ty env t)) let term_app1 (type env) (type term) (module Type : Tff_intf.S with type env = env and type T.t = term) - ?(check=(fun _ _ -> ())) env name mk = - make_op1 (module Type) env name + ?(check=(fun _ _ -> ())) env symbol mk = + make_op1 (module Type) env symbol (fun ast t -> check ast t; mk (Type.parse_term env t)) let term_app1_ast (type env) (type term) (module Type : Tff_intf.S with type env = env and type T.t = term) - ?(check=(fun _ _ -> ())) env name mk = - make_op1 (module Type) env name + ?(check=(fun _ _ -> ())) env symbol mk = + make_op1 (module Type) env symbol (fun ast t -> check ast t; mk ast (Type.parse_term env t)) @@ -214,29 +211,29 @@ let term_app1_ast let ty_app2 (type env) (type ty) (module Type : Tff_intf.S with type env = env and type Ty.t = ty) - ?(check=(fun _ _ _ -> ())) env name mk = - make_op2 (module Type) env name (fun ast (a, b) -> + ?(check=(fun _ _ _ -> ())) env symbol mk = + make_op2 (module Type) env symbol (fun ast (a, b) -> check ast a b; mk (Type.parse_ty env a) (Type.parse_ty env b)) let ty_app2_ast (type env) (type ty) (module Type : Tff_intf.S with type env = env and type Ty.t = ty) - ?(check=(fun _ _ _ -> ())) env name mk = - make_op2 (module Type) env name (fun ast (a, b) -> + ?(check=(fun _ _ _ -> ())) env symbol mk = + make_op2 (module Type) env symbol (fun ast (a, b) -> check ast a b; mk ast (Type.parse_ty env a) (Type.parse_ty env b)) let term_app2 (type env) (type term) (module Type : Tff_intf.S with type env = env and type T.t = term) - ?(check=(fun _ _ _ -> ())) env name mk = - make_op2 (module Type) env name (fun ast (a, b) -> + ?(check=(fun _ _ _ -> ())) env symbol mk = + make_op2 (module Type) env symbol (fun ast (a, b) -> check ast a b; mk (Type.parse_term env a) (Type.parse_term env b)) let term_app2_ast (type env) (type term) (module Type : Tff_intf.S with type env = env and type T.t = term) - ?(check=(fun _ _ _ -> ())) env name mk = - make_op2 (module Type) env name (fun ast (a, b) -> + ?(check=(fun _ _ _ -> ())) env symbol mk = + make_op2 (module Type) env symbol (fun ast (a, b) -> check ast a b; mk ast (Type.parse_term env a) (Type.parse_term env b)) @@ -245,32 +242,32 @@ let term_app2_ast let ty_app3 (type env) (type ty) (module Type : Tff_intf.S with type env = env and type Ty.t = ty) - ?(check=(fun _ _ _ _ -> ())) env name mk = - make_op3 (module Type) env name (fun ast (a, b, c) -> + ?(check=(fun _ _ _ _ -> ())) env symbol mk = + make_op3 (module Type) env symbol (fun ast (a, b, c) -> check ast a b c; mk (Type.parse_ty env a) (Type.parse_ty env b) (Type.parse_ty env c)) let ty_app3_ast (type env) (type ty) (module Type : Tff_intf.S with type env = env and type Ty.t = ty) - ?(check=(fun _ _ _ _ -> ())) env name mk = - make_op3 (module Type) env name (fun ast (a, b, c) -> + ?(check=(fun _ _ _ _ -> ())) env symbol mk = + make_op3 (module Type) env symbol (fun ast (a, b, c) -> check ast a b c; mk ast (Type.parse_ty env a) (Type.parse_ty env b) (Type.parse_ty env c)) let term_app3 (type env) (type term) (module Type : Tff_intf.S with type env = env and type T.t = term) - ?(check=(fun _ _ _ _ -> ())) env name mk = - make_op3 (module Type) env name (fun ast (a, b, c) -> + ?(check=(fun _ _ _ _ -> ())) env symbol mk = + make_op3 (module Type) env symbol (fun ast (a, b, c) -> check ast a b c; mk (Type.parse_term env a) (Type.parse_term env b) (Type.parse_term env c)) let term_app3_ast (type env) (type term) (module Type : Tff_intf.S with type env = env and type T.t = term) - ?(check=(fun _ _ _ _ -> ())) env name mk = - make_op3 (module Type) env name (fun ast (a, b, c) -> + ?(check=(fun _ _ _ _ -> ())) env symbol mk = + make_op3 (module Type) env symbol (fun ast (a, b, c) -> check ast a b c; mk ast (Type.parse_term env a) (Type.parse_term env b) (Type.parse_term env c)) @@ -280,8 +277,8 @@ let term_app3_ast let ty_app4 (type env) (type ty) (module Type : Tff_intf.S with type env = env and type Ty.t = ty) - ?(check=(fun _ _ _ _ _ -> ())) env name mk = - make_op4 (module Type) env name (fun ast (a, b, c, d) -> + ?(check=(fun _ _ _ _ _ -> ())) env symbol mk = + make_op4 (module Type) env symbol (fun ast (a, b, c, d) -> check ast a b c d; mk (Type.parse_ty env a) (Type.parse_ty env b) (Type.parse_ty env c) (Type.parse_ty env d)) @@ -289,8 +286,8 @@ let ty_app4 let ty_app4_ast (type env) (type ty) (module Type : Tff_intf.S with type env = env and type Ty.t = ty) - ?(check=(fun _ _ _ _ _ -> ())) env name mk = - make_op4 (module Type) env name (fun ast (a, b, c, d) -> + ?(check=(fun _ _ _ _ _ -> ())) env symbol mk = + make_op4 (module Type) env symbol (fun ast (a, b, c, d) -> check ast a b c d; mk ast (Type.parse_ty env a) (Type.parse_ty env b) (Type.parse_ty env c) (Type.parse_ty env d)) @@ -298,8 +295,8 @@ let ty_app4_ast let term_app4 (type env) (type term) (module Type : Tff_intf.S with type env = env and type T.t = term) - ?(check=(fun _ _ _ _ _ -> ())) env name mk = - make_op4 (module Type) env name (fun ast (a, b, c, d) -> + ?(check=(fun _ _ _ _ _ -> ())) env symbol mk = + make_op4 (module Type) env symbol (fun ast (a, b, c, d) -> check ast a b c d; mk (Type.parse_term env a) (Type.parse_term env b) (Type.parse_term env c) (Type.parse_term env d)) @@ -307,28 +304,46 @@ let term_app4 let term_app4_ast (type env) (type term) (module Type : Tff_intf.S with type env = env and type T.t = term) - ?(check=(fun _ _ _ _ _ -> ())) env name mk = - make_op4 (module Type) env name (fun ast (a, b, c, d) -> + ?(check=(fun _ _ _ _ _ -> ())) env symbol mk = + make_op4 (module Type) env symbol (fun ast (a, b, c, d) -> check ast a b c d; mk ast (Type.parse_term env a) (Type.parse_term env b) (Type.parse_term env c) (Type.parse_term env d)) +(* N-ary application *) + +let term_app_list + (type env) (type term) + (module Type : Tff_intf.S with type env = env and type T.t = term) + ?(check=(fun _ -> ())) env _symbol mk = (fun _ast args -> + List.iter check args; + mk (List.map (Type.parse_term env) args) + ) + +let term_app_list_ast + (type env) (type term) + (module Type : Tff_intf.S with type env = env and type T.t = term) + ?(check=(fun _ -> ())) env _symbol mk = (fun ast args -> + List.iter check args; + mk ast (List.map (Type.parse_term env) args) + ) + (* Left associative applications *) let term_app_left (type env) (type term) (module Type : Tff_intf.S with type env = env and type T.t = term) - ?(check=(fun _ _ -> ())) env name mk = - make_assoc (module Type) env name (fun ast l -> + ?(check=(fun _ _ -> ())) env symbol mk = + make_assoc (module Type) env symbol (fun ast l -> check ast l; fold_left_assoc mk (List.map (Type.parse_term env) l)) let term_app_left_ast (type env) (type term) (module Type : Tff_intf.S with type env = env and type T.t = term) - ?(check=(fun _ _ -> ())) env name mk = - make_assoc (module Type) env name (fun ast l -> + ?(check=(fun _ _ -> ())) env symbol mk = + make_assoc (module Type) env symbol (fun ast l -> check ast l; fold_left_assoc (mk ast) (List.map (Type.parse_term env) l)) @@ -338,16 +353,16 @@ let term_app_left_ast let term_app_right (type env) (type term) (module Type : Tff_intf.S with type env = env and type T.t = term) - ?(check=(fun _ _ -> ())) env name mk = - make_assoc (module Type) env name (fun ast l -> + ?(check=(fun _ _ -> ())) env symbol mk = + make_assoc (module Type) env symbol (fun ast l -> check ast l; fold_right_assoc mk (List.map (Type.parse_term env) l)) let term_app_right_ast (type env) (type term) (module Type : Tff_intf.S with type env = env and type T.t = term) - ?(check=(fun _ _ -> ())) env name mk = - make_assoc (module Type) env name (fun ast l -> + ?(check=(fun _ _ -> ())) env symbol mk = + make_assoc (module Type) env symbol (fun ast l -> check ast l; fold_right_assoc (mk ast) (List.map (Type.parse_term env) l)) @@ -357,8 +372,8 @@ let term_app_right_ast let term_app_chain (type env) (type term) (module Type : Tff_intf.S with type env = env and type T.t = term) - ?(check=(fun _ _ -> ())) env name mk = - make_chain (module Type) env name (fun ast l -> + ?(check=(fun _ _ -> ())) env symbol mk = + make_chain (module Type) env symbol (fun ast l -> check ast l; let l' = List.map (Type.parse_term env) l in map_chain (module Type) mk l' @@ -367,11 +382,31 @@ let term_app_chain let term_app_chain_ast (type env) (type term) (module Type : Tff_intf.S with type env = env and type T.t = term) - ?(check=(fun _ _ -> ())) env name mk = - make_chain (module Type) env name (fun ast l -> + ?(check=(fun _ _ -> ())) env symbol mk = + make_chain (module Type) env symbol (fun ast l -> check ast l; let l' = List.map (Type.parse_term env) l in map_chain (module Type) (mk ast) l' ) +(* Higher-order application *) + +let term_app_cst + (type env) (type term) (type cst) + (module Type : Tff_intf.S with type env = env and type T.t = term and type T.Const.t = cst) + env cst = fun ast args -> + Type.unwrap_term env ast (Type.parse_app_term_cst env ast cst args) + +let term_app_ho + (type env) (type term) + (module Type : Thf_intf.S with type env = env and type T.t = term) + env f = fun ast args -> + Type.unwrap_term env ast (Type.parse_app_ho_term env ast f args) + +let term_app_ho_ast + (type env) (type term) + (module Type : Thf_intf.S with type env = env and type T.t = term) + env f = fun ast args -> + Type.unwrap_term env ast (Type.parse_app_ho_term env ast (f ast) args) + diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/base.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/base.mli index c6e991500f7c9ef547bdde49983d0f33af4793a1..fec33a819b9f4d8fbd9cb4be6850d62fa5c94ba9 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/base.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/base.mli @@ -13,6 +13,7 @@ val merge : (** {2 Smtlib Indexed id helpers} *) type 'ret indexed = [ + | `Not_indexed | `Unary of (string -> 'ret) | `Binary of (string -> string -> 'ret) | `Ternary of (string -> string -> string -> 'ret) @@ -20,20 +21,15 @@ type 'ret indexed = [ ] (** The type of indexed family of operators. *) -val parse_id : - Dolmen.Std.Id.t -> - (string * 'ret indexed) list -> +val parse_indexed : + string -> string list -> + (string -> 'ret indexed) -> err:(string -> int -> int -> 'ret) -> - k:(string list -> 'ret) -> + k:(unit -> 'ret) -> 'ret -(** [parse_id id l ~err ~k] splits [id] (using {split_id}) - into a list. If the list has a head [s] and a tail [l], it tries and find - in the list [l] a pair (s', indexed) such that [s = s']. - If the length of [l] matches the arity of [indexed], the provided function - is called, else [err] is called with [s], the arity of [indexed], - and the lenght of [l]. - If no match is found or the split list does not contain a head and a - tail, [k] is called wiht the split list. *) +(** [parse_id basename indexes f ~err ~k] uses the function [f] to + get an expected arity for the indexed identifier, and then tries + and parse the index list according to the returned specification. *) val bad_ty_index_arity : (module Tff_intf.S with type env = 'env and type Ty.t = 'ty) -> @@ -53,7 +49,7 @@ val bad_term_index_arity : type ('env, 'args, 'ret) helper = (module Tff_intf.S with type env = 'env) -> - 'env -> string -> (Dolmen.Std.Term.t -> 'args -> 'ret) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'args -> 'ret) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'ret) val make_op0: (_, unit, _) helper @@ -117,63 +113,63 @@ val map_chain : val app0 : (module Tff_intf.S with type env = 'env) -> ?check:(Dolmen.Std.Term.t -> unit) -> - 'env -> string -> ('ret) -> + 'env -> Intf.symbol -> ('ret) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'ret) val app0_ast : (module Tff_intf.S with type env = 'env) -> ?check:(Dolmen.Std.Term.t -> unit) -> - 'env -> string -> (Dolmen.Std.Term.t -> 'ret) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'ret) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'ret) val ty_app1 : (module Tff_intf.S with type env = 'env and type Ty.t = 'ty) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> ('ty -> 'ty) -> + 'env -> Intf.symbol -> ('ty -> 'ty) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'ty) val ty_app1_ast : (module Tff_intf.S with type env = 'env and type Ty.t = 'ty) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> (Dolmen.Std.Term.t -> 'ty -> 'ty) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'ty -> 'ty) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'ty) val term_app1 : (module Tff_intf.S with type env = 'env and type T.t = 'term) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> ('term -> 'term) -> + 'env -> Intf.symbol -> ('term -> 'term) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) val term_app1_ast : (module Tff_intf.S with type env = 'env and type T.t = 'term) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> (Dolmen.Std.Term.t -> 'term -> 'term) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'term -> 'term) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) val ty_app2 : (module Tff_intf.S with type env = 'env and type Ty.t = 'ty) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> ('ty -> 'ty -> 'ty) -> + 'env -> Intf.symbol -> ('ty -> 'ty -> 'ty) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'ty) val ty_app2_ast : (module Tff_intf.S with type env = 'env and type Ty.t = 'ty) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> (Dolmen.Std.Term.t -> 'ty -> 'ty -> 'ty) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'ty -> 'ty -> 'ty) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'ty) val term_app2 : (module Tff_intf.S with type env = 'env and type T.t = 'term) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> ('term -> 'term -> 'term) -> + 'env -> Intf.symbol -> ('term -> 'term -> 'term) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) val term_app2_ast : (module Tff_intf.S with type env = 'env and type T.t = 'term) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> (Dolmen.Std.Term.t -> 'term -> 'term -> 'term) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'term -> 'term -> 'term) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) @@ -181,28 +177,28 @@ val ty_app3 : (module Tff_intf.S with type env = 'env and type Ty.t = 'ty) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> ('ty -> 'ty -> 'ty -> 'ty) -> + 'env -> Intf.symbol -> ('ty -> 'ty -> 'ty -> 'ty) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'ty) val ty_app3_ast : (module Tff_intf.S with type env = 'env and type Ty.t = 'ty) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> (Dolmen.Std.Term.t -> 'ty -> 'ty -> 'ty -> 'ty) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'ty -> 'ty -> 'ty -> 'ty) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'ty) val term_app3 : (module Tff_intf.S with type env = 'env and type T.t = 'term) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> ('term -> 'term -> 'term -> 'term) -> + 'env -> Intf.symbol -> ('term -> 'term -> 'term -> 'term) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) val term_app3_ast : (module Tff_intf.S with type env = 'env and type T.t = 'term) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> (Dolmen.Std.Term.t -> 'term -> 'term -> 'term -> 'term) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'term -> 'term -> 'term -> 'term) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) @@ -210,66 +206,89 @@ val ty_app4 : (module Tff_intf.S with type env = 'env and type Ty.t = 'ty) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> ('ty -> 'ty -> 'ty -> 'ty -> 'ty) -> + 'env -> Intf.symbol -> ('ty -> 'ty -> 'ty -> 'ty -> 'ty) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'ty) val ty_app4_ast : (module Tff_intf.S with type env = 'env and type Ty.t = 'ty) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> (Dolmen.Std.Term.t -> 'ty -> 'ty -> 'ty -> 'ty -> 'ty) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'ty -> 'ty -> 'ty -> 'ty -> 'ty) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'ty) val term_app4 : (module Tff_intf.S with type env = 'env and type T.t = 'term) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> ('term -> 'term -> 'term -> 'term -> 'term) -> + 'env -> Intf.symbol -> ('term -> 'term -> 'term -> 'term -> 'term) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) val term_app4_ast : (module Tff_intf.S with type env = 'env and type T.t = 'term) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t -> unit) -> - 'env -> string -> (Dolmen.Std.Term.t -> 'term -> 'term -> 'term -> 'term -> 'term) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'term -> 'term -> 'term -> 'term -> 'term) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) +val term_app_list : + (module Tff_intf.S with type env = 'env and type T.t = 'term) -> + ?check:(Dolmen.Std.Term.t -> unit) -> + 'env -> Intf.symbol -> ('term list -> 'term) -> + (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) + +val term_app_list_ast : + (module Tff_intf.S with type env = 'env and type T.t = 'term) -> + ?check:(Dolmen.Std.Term.t -> unit) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'term list -> 'term) -> + (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) val term_app_left : (module Tff_intf.S with type env = 'env and type T.t = 'term) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> unit) -> - 'env -> string -> ('term -> 'term -> 'term) -> + 'env -> Intf.symbol -> ('term -> 'term -> 'term) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) val term_app_left_ast : (module Tff_intf.S with type env = 'env and type T.t = 'term) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> unit) -> - 'env -> string -> (Dolmen.Std.Term.t -> 'term -> 'term -> 'term) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'term -> 'term -> 'term) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) val term_app_right : (module Tff_intf.S with type env = 'env and type T.t = 'term) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> unit) -> - 'env -> string -> ('term -> 'term -> 'term) -> + 'env -> Intf.symbol -> ('term -> 'term -> 'term) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) val term_app_right_ast : (module Tff_intf.S with type env = 'env and type T.t = 'term) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> unit) -> - 'env -> string -> (Dolmen.Std.Term.t -> 'term -> 'term -> 'term) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'term -> 'term -> 'term) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) - val term_app_chain : (module Tff_intf.S with type env = 'env and type T.t = 'term) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> unit) -> - 'env -> string -> ('term -> 'term -> 'term) -> + 'env -> Intf.symbol -> ('term -> 'term -> 'term) -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) val term_app_chain_ast : (module Tff_intf.S with type env = 'env and type T.t = 'term) -> ?check:(Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> unit) -> - 'env -> string -> (Dolmen.Std.Term.t -> 'term -> 'term -> 'term) -> + 'env -> Intf.symbol -> (Dolmen.Std.Term.t -> 'term -> 'term -> 'term) -> + (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) + +val term_app_cst : + (module Tff_intf.S with type env = 'env and type T.t = 'term and type T.Const.t = 'cst) -> + 'env -> 'cst -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) + +val term_app_ho : + (module Thf_intf.S with type env = 'env and type T.t = 'term) -> + 'env -> 'term -> (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) +val term_app_ho_ast : + (module Thf_intf.S with type env = 'env and type T.t = 'term) -> + 'env -> (Dolmen.Std.Term.t -> 'term) -> + (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> 'term) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/bitv.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/bitv.ml index 56d4b8a5f001ca0df962605df53e50dfff38ee8b..5a168411c5652629aeebbd3c8b037c1657cbe59a 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/bitv.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/bitv.ml @@ -1,6 +1,46 @@ module Id = Dolmen.Std.Id +(* Ae Bitvector *) +(* ************************************************************************ *) +module Ae = struct + + module Tff + (Type : Tff_intf.S) + (Ty : Dolmen.Intf.Ty.Ae_Bitv with type t := Type.Ty.t) + (T : Dolmen.Intf.Term.Ae_Bitv with type t := Type.T.t) = struct + + type _ Type.err += + | Invalid_bin_char : char -> Dolmen.Std.Term.t Type.err + + let parse_binary env s ast = + match String.iter Misc.Bitv.check_bin s with + | () -> T.mk s + | exception Misc.Bitv.Invalid_char c -> + Type._error env (Ast ast) (Invalid_bin_char c) + + let parse env s = + match s with + + (* Bitvector sort *) + | Type.Builtin (Bitv n) -> + `Ty (Base.app0 (module Type) env s (Ty.bitv n)) + + (* Bitvector litterals *) + | Type.Id { ns = Value Bitvector; name = Simple name; } -> + `Term (Base.app0_ast (module Type) env s (parse_binary env name)) + + (* Bitvector operators *) + | Type.Builtin Bitv_concat -> + `Term (Base.term_app2 (module Type) env s T.concat) + | Type.Builtin (Bitv_extract (l, r)) -> + `Term (Base.term_app1 (module Type) env s (T.extract r l)) + + | _ -> `Not_found + + end +end + (* Smtlib Bitvector *) (* ************************************************************************ *) @@ -38,8 +78,8 @@ module Smtlib2 = struct | exception Misc.Bitv.Invalid_char c -> Type._error env (Ast ast) (Invalid_hex_char c) - let parse_extended_lit env s n = - Base.make_op0 (module Type) env s (fun ast () -> + let parse_extended_lit env symbol s n = + Base.make_op0 (module Type) env symbol (fun ast () -> assert (String.length s >= 2); let n = parse_int env ast n in match Misc.Bitv.parse_decimal s n with @@ -61,115 +101,116 @@ module Smtlib2 = struct match s with (* Bitvector sort *) - | Type.Id ({ Id.ns = Id.Sort; _ } as id) -> - Base.parse_id id [ - "BitVec", `Unary (function n_s -> - `Ty (Base.app0_ast (module Type) env "BitVec" (fun ast -> - Ty.bitv (parse_int env ast n_s)))); - ] ~err:(Base.bad_ty_index_arity (module Type) env) - ~k:(fun _ -> `Not_found) + | Type.Id { Id.ns = Sort; name = Indexed { basename; indexes; }; } as symbol -> + Base.parse_indexed basename indexes (function + | "BitVec" -> `Unary (function n_s -> + `Ty (Base.app0_ast (module Type) env symbol (fun ast -> + Ty.bitv (parse_int env ast n_s)))) + | _ -> `Not_indexed + ) ~k:(fun _ -> `Not_found) + ~err:(Base.bad_ty_index_arity (module Type) env) (* Bitvector litterals *) - | Type.Id { Id.ns = Id.Value Id.Binary; name; } -> - `Term (Base.app0_ast (module Type) env name (parse_binary env name)) - | Type.Id { Id.ns = Id.Value Id.Hexadecimal; name; } -> - `Term (Base.app0_ast (module Type) env name (parse_hexa env name)) + | Type.Id { ns = Value Binary; name = Simple name; } -> + `Term (Base.app0_ast (module Type) env s (parse_binary env name)) + | Type.Id { ns = Value Hexadecimal; name = Simple name; } -> + `Term (Base.app0_ast (module Type) env s (parse_hexa env name)) (* terms *) - | Type.Id ({ Id.ns = Id.Term; _ } as id) -> - Base.parse_id id [ - "repeat", `Unary (function i_s -> - `Term (Base.term_app1_ast (module Type) env "repeat" - (indexed1 env T.repeat i_s))); - "zero_extend", `Unary (function i_s -> - `Term (Base.term_app1_ast (module Type) env "zero_extend" - (indexed1 env T.zero_extend i_s))); - "sign_extend", `Unary (function i_s -> - `Term (Base.term_app1_ast (module Type) env "sign_extend" - (indexed1 env T.sign_extend i_s))); - "rotate_right", `Unary (function i_s -> - `Term (Base.term_app1_ast (module Type) env "rotate_right" - (indexed1 env T.rotate_right i_s))); - "rotate_left", `Unary (function i_s -> - `Term (Base.term_app1_ast (module Type) env "rotate_left" - (indexed1 env T.rotate_left i_s))); - "extract", `Binary (fun i_s j_s -> - `Term (Base.term_app1_ast (module Type) env "extract" - (indexed2 env T.extract i_s j_s))); - ] ~err:(Base.bad_term_index_arity (module Type) env) - ~k:(function - | [s; n] when (String.length s >= 2 && - s.[0] = 'b' && s.[1] = 'v') -> - `Term (parse_extended_lit env s n) - - | ["bvnot"] -> - `Term (Base.term_app1 (module Type) env "bvnot" T.not) - | ["bvand"] -> - `Term (Base.term_app_left (module Type) env "bvand" T.and_) - | ["bvor"] -> - `Term (Base.term_app_left (module Type) env "bvor" T.or_) - | ["bvnand"] -> - `Term (Base.term_app2 (module Type) env "bvnand" T.nand) - | ["bvnor"] -> - `Term (Base.term_app2 (module Type) env "bvnor" T.nor) - | ["bvxor"] -> - `Term (Base.term_app_left (module Type) env "bvxor" T.xor) - | ["bvxnor"] -> - `Term (Base.term_app_left (module Type) env "bvxnor" T.xnor) - - | ["bvcomp"] -> - `Term (Base.term_app2 (module Type) env "bvcomp" T.comp) - - | ["bvneg"] -> - `Term (Base.term_app1 (module Type) env "bvneg" T.neg) - | ["bvadd"] -> - `Term (Base.term_app_left (module Type) env "bvadd" T.add) - | ["bvsub"] -> - `Term (Base.term_app2 (module Type) env "bvsub" T.sub) - | ["bvmul"] -> - `Term (Base.term_app_left (module Type) env "bvmul" T.mul) - - | ["bvudiv"] -> - `Term (Base.term_app2 (module Type) env "bvudiv" T.udiv) - | ["bvurem"] -> - `Term (Base.term_app2 (module Type) env "bvurem" T.urem) - - | ["bvsdiv"] -> - `Term (Base.term_app2 (module Type) env "bvsdiv" T.sdiv) - | ["bvsrem"] -> - `Term (Base.term_app2 (module Type) env "bvsrem" T.srem) - | ["bvsmod"] -> - `Term (Base.term_app2 (module Type) env "bvsmod" T.smod) - - | ["bvshl"] -> - `Term (Base.term_app2 (module Type) env "bvshl" T.shl) - | ["bvlshr"] -> - `Term (Base.term_app2 (module Type) env "bvlshr" T.lshr) - | ["bvashr"] -> - `Term (Base.term_app2 (module Type) env "bvashr" T.ashr) - - | ["bvult"] -> - `Term (Base.term_app2 (module Type) env "bvult" T.ult) - | ["bvule"] -> - `Term (Base.term_app2 (module Type) env "bvule" T.ule) - | ["bvugt"] -> - `Term (Base.term_app2 (module Type) env "bvugt" T.ugt) - | ["bvuge"] -> - `Term (Base.term_app2 (module Type) env "bvuge" T.uge) - - | ["bvslt"] -> - `Term (Base.term_app2 (module Type) env "bvslt" T.slt) - | ["bvsle"] -> - `Term (Base.term_app2 (module Type) env "bvsle" T.sle) - | ["bvsgt"] -> - `Term (Base.term_app2 (module Type) env "bvsgt" T.sgt) - | ["bvsge"] -> - `Term (Base.term_app2 (module Type) env "bvsge" T.sge) - - | ["concat"] -> - `Term (Base.term_app2 (module Type) env "concat" T.concat) - | _ -> `Not_found - ) + | Type.Id { ns = Term; name = Simple "bvnot"; } -> + `Term (Base.term_app1 (module Type) env s T.not) + | Type.Id { ns = Term; name = Simple "bvand"; } -> + `Term (Base.term_app_left (module Type) env s T.and_) + | Type.Id { ns = Term; name = Simple "bvor"; } -> + `Term (Base.term_app_left (module Type) env s T.or_) + | Type.Id { ns = Term; name = Simple "bvnand"; } -> + `Term (Base.term_app2 (module Type) env s T.nand) + | Type.Id { ns = Term; name = Simple "bvnor"; } -> + `Term (Base.term_app2 (module Type) env s T.nor) + | Type.Id { ns = Term; name = Simple "bvxor"; } -> + `Term (Base.term_app_left (module Type) env s T.xor) + | Type.Id { ns = Term; name = Simple "bvxnor"; } -> + `Term (Base.term_app_left (module Type) env s T.xnor) + + | Type.Id { ns = Term; name = Simple "bvcomp"; } -> + `Term (Base.term_app2 (module Type) env s T.comp) + + | Type.Id { ns = Term; name = Simple "bvneg"; } -> + `Term (Base.term_app1 (module Type) env s T.neg) + | Type.Id { ns = Term; name = Simple "bvadd"; } -> + `Term (Base.term_app_left (module Type) env s T.add) + | Type.Id { ns = Term; name = Simple "bvsub"; } -> + `Term (Base.term_app2 (module Type) env s T.sub) + | Type.Id { ns = Term; name = Simple "bvmul"; } -> + `Term (Base.term_app_left (module Type) env s T.mul) + + | Type.Id { ns = Term; name = Simple "bvudiv"; } -> + `Term (Base.term_app2 (module Type) env s T.udiv) + | Type.Id { ns = Term; name = Simple "bvurem"; } -> + `Term (Base.term_app2 (module Type) env s T.urem) + + | Type.Id { ns = Term; name = Simple "bvsdiv"; } -> + `Term (Base.term_app2 (module Type) env s T.sdiv) + | Type.Id { ns = Term; name = Simple "bvsrem"; } -> + `Term (Base.term_app2 (module Type) env s T.srem) + | Type.Id { ns = Term; name = Simple "bvsmod"; } -> + `Term (Base.term_app2 (module Type) env s T.smod) + + | Type.Id { ns = Term; name = Simple "bvshl"; } -> + `Term (Base.term_app2 (module Type) env s T.shl) + | Type.Id { ns = Term; name = Simple "bvlshr"; } -> + `Term (Base.term_app2 (module Type) env s T.lshr) + | Type.Id { ns = Term; name = Simple "bvashr"; } -> + `Term (Base.term_app2 (module Type) env s T.ashr) + + | Type.Id { ns = Term; name = Simple "bvult"; } -> + `Term (Base.term_app2 (module Type) env s T.ult) + | Type.Id { ns = Term; name = Simple "bvule"; } -> + `Term (Base.term_app2 (module Type) env s T.ule) + | Type.Id { ns = Term; name = Simple "bvugt"; } -> + `Term (Base.term_app2 (module Type) env s T.ugt) + | Type.Id { ns = Term; name = Simple "bvuge"; } -> + `Term (Base.term_app2 (module Type) env s T.uge) + + | Type.Id { ns = Term; name = Simple "bvslt"; } -> + `Term (Base.term_app2 (module Type) env s T.slt) + | Type.Id { ns = Term; name = Simple "bvsle"; } -> + `Term (Base.term_app2 (module Type) env s T.sle) + | Type.Id { ns = Term; name = Simple "bvsgt"; } -> + `Term (Base.term_app2 (module Type) env s T.sgt) + | Type.Id { ns = Term; name = Simple "bvsge"; } -> + `Term (Base.term_app2 (module Type) env s T.sge) + + | Type.Id { ns = Term; name = Simple "concat"; } -> + `Term (Base.term_app2 (module Type) env s T.concat) + + (* indexed terms *) + | Type.Id { ns = Term; name = Indexed { basename; indexes; } } as symbol -> + Base.parse_indexed basename indexes (function + | s when (String.length s >= 2 && s.[0] = 'b' && s.[1] = 'v') -> + `Unary (fun n -> `Term (parse_extended_lit env symbol s n)) + | "repeat" -> `Unary (function i_s -> + `Term (Base.term_app1_ast (module Type) env symbol + (indexed1 env T.repeat i_s))) + | "zero_extend" -> `Unary (function i_s -> + `Term (Base.term_app1_ast (module Type) env symbol + (indexed1 env T.zero_extend i_s))) + | "sign_extend" -> `Unary (function i_s -> + `Term (Base.term_app1_ast (module Type) env symbol + (indexed1 env T.sign_extend i_s))) + | "rotate_right" -> `Unary (function i_s -> + `Term (Base.term_app1_ast (module Type) env symbol + (indexed1 env T.rotate_right i_s))) + | "rotate_left" -> `Unary (function i_s -> + `Term (Base.term_app1_ast (module Type) env symbol + (indexed1 env T.rotate_left i_s))) + | "extract" -> `Binary (fun i_s j_s -> + `Term (Base.term_app1_ast (module Type) env symbol + (indexed2 env T.extract i_s j_s))) + | _ -> `Not_indexed) + ~err:(Base.bad_term_index_arity (module Type) env) + ~k:(function () -> `Not_found) + | _ -> `Not_found end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/bitv.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/bitv.mli index d07e1112830c8647306101c96efbf9917a8f504d..0df18e518b364df438c95790419e592baa8d2e00 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/bitv.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/bitv.mli @@ -1,5 +1,26 @@ + +(** Alt-Ergo bitvector builtins *) +module Ae : sig + + module Tff + (Type : Tff_intf.S) + (Ty : Dolmen.Intf.Ty.Ae_Bitv with type t := Type.Ty.t) + (T : Dolmen.Intf.Term.Ae_Bitv with type t := Type.T.t) : sig + + type _ Type.err += + | Invalid_bin_char : char -> Dolmen.Std.Term.t Type.err + (** Error raised when a character that isn't '0' or '1' is + used inside a bitvector string *) + (** Additional errors specific to Alt-Ergo's bitvectors' typing. *) + + val parse : Type.builtin_symbols + + end + +end + (** Smtlib bitvector builtins *) module Smtlib2 : sig diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/core.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/core.ml index e716d2ddafc4ddad7bd6175fdd7ead6c3178e9ae..99bf412b3b073478c65be9874cd93eeba81a084a 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/core.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/core.ml @@ -9,23 +9,117 @@ module Ae = struct module Tff (Type : Tff_intf.S) + (Tag : Dolmen.Intf.Tag.Ae_Base with type 'a t = 'a Type.Tag.t + and type term := Type.T.t) (Ty : Dolmen.Intf.Ty.Ae_Base with type t = Type.Ty.t) (T : Dolmen.Intf.Term.Ae_Base with type t = Type.T.t) = struct + let mk_or a b = T._or [a; b] + let mk_and a b = T._and [a; b] + + let parse_trigger env ast = function + | { Ast.term = Ast.App ( + { Ast.term = Ast.Builtin And; _ }, l + ); _} -> + List.map (Type.parse_term env) l + | _ -> + Type._error env (Ast ast) + (Type.Expected ("A multi-trigger (i.e. a list of term patterns)", None)) + let parse env s = match s with + + (* Types *) | Type.Builtin Ast.Bool -> - `Ty (Base.app0 (module Type) env "bool" Ty.bool) + `Ty (Base.app0 (module Type) env s Ty.bool) + | Type.Builtin Ast.Prop -> + `Ty (Base.app0 (module Type) env s Ty.bool) | Type.Builtin Ast.Unit -> - `Ty (Base.app0 (module Type) env "unit" Ty.unit) + `Ty (Base.app0 (module Type) env s Ty.unit) + + (* Constants *) | Type.Builtin Ast.Void -> - `Term (Base.app0 (module Type) env "void" T.void) + `Term (Base.app0 (module Type) env s T.void) + | Type.Builtin Ast.True -> + `Term (Base.app0 (module Type) env s T._true) + | Type.Builtin Ast.False -> + `Term (Base.app0 (module Type) env s T._false) + + (* Boolean operators *) + | Type.Builtin Ast.Not -> + `Term (Base.term_app1 (module Type) env s T.neg) + | Type.Builtin Ast.And -> + `Term (Base.term_app_left (module Type) env s mk_and) + | Type.Builtin Ast.Or -> + `Term (Base.term_app_left (module Type) env s mk_or) + | Type.Builtin Ast.Xor -> + `Term (Base.term_app_left (module Type) env s T.xor) + | Type.Builtin Ast.Imply -> + `Term (Base.term_app_right (module Type) env s T.imply) + | Type.Builtin Ast.Equiv -> + `Term (Base.term_app_right (module Type) env s T.equiv) + + (* If-then-else *) + | Type.Builtin Ast.Ite -> + `Term ( + Base.make_op3 (module Type) env s (fun _ (a, b, c) -> + let cond = Type.parse_prop env a in + let then_ = Type.parse_term env b in + let else_ = Type.parse_term env c in + T.ite cond then_ else_ + ) + ) + + (* Equality *) + | Type.Builtin Ast.Eq -> + `Term (Base.term_app2 (module Type) env s T.eq) + | Type.Builtin Ast.Distinct -> + `Term (Base.term_app_list (module Type) env s T.distinct) + + (* AC (Associative Commutative) symbol *) + | Type.Id { name = Simple "ac"; ns = Attr; }-> + `Tags (fun _ _ -> [Type.Set (Tag.ac, ())]) + + (* Triggers *) + | Type.Id { name = Simple "triggers"; ns = Attr; } -> + `Tags (fun ast l -> + let l = List.map (parse_trigger env ast) l in + [Type.Set (Tag.triggers, l)] + ) + + (* Filters *) + | Type.Id { name = Simple "filters"; ns = Attr; } -> + `Tags (fun _ l -> + let l = List.map (Type.parse_prop env) l in + [Type.Set (Tag.filters, l)] + ) + | _ -> `Not_found end end +(* Dimacs builtins *) +(* ************************************************************************ *) + +module Dimacs = struct + + module Tff + (Type : Tff_intf.S) + (T : Dolmen.Intf.Term.Dimacs with type t = Type.T.t) = struct + + let parse env s = + match s with + | Type.Builtin Ast.Not -> + `Term (Base.term_app1 (module Type) env s T.neg) + | _ -> `Not_found + + end + +end + + (* TPTP builtins ($i, $o, etc..) *) (* ************************************************************************ *) @@ -34,26 +128,142 @@ module Tptp = struct module Tff (Type : Tff_intf.S) (Ty : Dolmen.Intf.Ty.Tptp_Base with type t = Type.Ty.t) - (T : Dolmen.Intf.Term.Tptp_Base with type t = Type.T.t) = struct + (T : Dolmen.Intf.Term.Tptp_Tff_Core with type t = Type.T.t) = struct + + let mk_or a b = T._or [a; b] + let mk_and a b = T._and [a; b] let parse _version env s = match s with - (* - | Type.Id ({ Id.name = "$_"; ns = Id.Term } as id) -> - Some (Type.wildcard env ast id args) - *) - | Type.Id { Id.name = "$tType"; ns = Id.Term } -> - `Ttype (Base.app0 (module Type) env "$tType" ()) - | Type.Id { Id.name = "$o"; ns = Id.Term } -> - `Ty (Base.app0 (module Type) env "$o" Ty.prop) - | Type.Id { Id.name = "$i"; ns = Id.Term } -> - `Ty (Base.app0 (module Type) env "$i" Ty.base) - | Type.Id { Id.name = "$true"; ns = Id.Term } -> - `Term (Base.app0 (module Type) env "$true" T._true) - | Type.Id { Id.name = "$false"; ns = Id.Term } -> - `Term (Base.app0 (module Type) env "$false" T._false) + + (* Predefined symbols *) + | Type.Id { name = Simple "$tType"; ns = Term } -> + `Ttype (Base.app0 (module Type) env s ()) + | Type.Id { name = Simple "$o"; ns = Term } -> + `Ty (Base.app0 (module Type) env s Ty.prop) + | Type.Id { name = Simple "$i"; ns = Term } -> + `Ty (Base.app0 (module Type) env s Ty.base) + | Type.Id { name = Simple "$true"; ns = Term } -> + `Term (Base.app0 (module Type) env s T._true) + | Type.Id { name = Simple "$false"; ns = Term } -> + `Term (Base.app0 (module Type) env s T._false) + + (* Predefined connectives *) + | Type.Builtin Ast.Eq -> + `Term (Base.term_app2 (module Type) env s T.eq) + | Type.Builtin Ast.Distinct -> + `Term (Base.term_app_list (module Type) env s T.distinct) + | Type.Id { name = Simple "$distinct"; ns = Term; } -> + `Term (Base.term_app_list (module Type) env s T.distinct) + + | Type.Builtin Ast.Not -> + `Term (Base.term_app1 (module Type) env s T.neg) + | Type.Builtin Ast.Or -> + `Term (Base.term_app2 (module Type) env s mk_or) + | Type.Builtin Ast.And -> + `Term (Base.term_app2 (module Type) env s mk_and) + | Type.Builtin Ast.Xor -> + `Term (Base.term_app2 (module Type) env s T.xor) + | Type.Builtin Ast.Nor -> + `Term (Base.term_app2 (module Type) env s T.nor) + | Type.Builtin Ast.Nand -> + `Term (Base.term_app2 (module Type) env s T.nand) + | Type.Builtin Ast.Equiv -> + `Term (Base.term_app2 (module Type) env s T.equiv) + | Type.Builtin Ast.Imply -> + `Term (Base.term_app2 (module Type) env s T.imply) + | Type.Builtin Ast.Implied -> + `Term (Base.term_app2 (module Type) env s T.implied) + + (* Ite *) + | Type.Builtin Ast.Ite -> + `Term ( + Base.make_op3 (module Type) env s (fun _ (a, b, c) -> + let cond = Type.parse_prop env a in + let then_ = Type.parse_term env b in + let else_ = Type.parse_term env c in + T.ite cond then_ else_ + ) + ) + + (* Ignore the role and kind attributes *) | Type.Id id when Id.equal id Id.tptp_role -> `Tags (fun _ast _args -> []) + | Type.Id id when Id.equal id Id.tptp_kind -> + `Tags (fun _ast _args -> []) + | _ -> `Not_found + + end + + module Thf + (Type : Thf_intf.S) + (Ty : Dolmen.Intf.Ty.Tptp_Base with type t = Type.Ty.t) + (T : Dolmen.Intf.Term.Tptp_Thf_Core with type t = Type.T.t + and type Const.t = Type.T.Const.t) = struct + + let parse _version env s = + match s with + + (* Ttype and types *) + | Type.Id { name = Simple "$tType"; ns = Term } -> + `Ttype (Base.app0 (module Type) env s ()) + | Type.Id { name = Simple "$o"; ns = Term } -> + `Ty (Base.app0 (module Type) env s Ty.prop) + | Type.Id { name = Simple "$i"; ns = Term } -> + `Ty (Base.app0 (module Type) env s Ty.base) + + (* Predefined symbols *) + | Type.Id { name = Simple "$true"; ns = Term } -> + `Term (Base.term_app_cst (module Type) env T.Const._true) + | Type.Id { name = Simple "$false"; ns = Term } -> + `Term (Base.term_app_cst (module Type) env T.Const._false) + + (* Predefined connectives *) + | Type.Builtin Ast.Eq -> + `Term (Base.term_app_ho_ast (module Type) env + (fun ast -> Type.monomorphize env ast (T.of_cst T.Const.eq))) + | Type.Builtin Ast.Distinct -> + `Term (Base.term_app_list (module Type) env s T.distinct) + | Type.Id { name = Simple "$distinct"; ns = Term; } -> + `Term (Base.term_app_list (module Type) env s T.distinct) + + | Type.Builtin Ast.Not -> + `Term (Base.term_app_cst (module Type) env T.Const.neg) + | Type.Builtin Ast.Or -> + `Term (Base.term_app_cst (module Type) env T.Const.or_) + | Type.Builtin Ast.And -> + `Term (Base.term_app_cst (module Type) env T.Const.and_) + | Type.Builtin Ast.Xor -> + `Term (Base.term_app_cst (module Type) env T.Const.xor) + | Type.Builtin Ast.Nor -> + `Term (Base.term_app_cst (module Type) env T.Const.nor) + | Type.Builtin Ast.Nand -> + `Term (Base.term_app_cst (module Type) env T.Const.nand) + | Type.Builtin Ast.Equiv -> + `Term (Base.term_app_cst (module Type) env T.Const.equiv) + | Type.Builtin Ast.Imply -> + `Term (Base.term_app_cst (module Type) env T.Const.imply) + | Type.Builtin Ast.Implied -> + `Term (Base.term_app_cst (module Type) env T.Const.implied) + + (* Ite *) + | Type.Builtin Ast.Ite -> + `Term (Base.term_app_ho_ast (module Type) env + (fun ast -> Type.monomorphize env ast (T.of_cst T.Const.ite))) + + (* Pi & Sigma *) + | Type.Builtin Ast.Pi -> + `Term (Base.term_app_ho_ast (module Type) env + (fun ast -> Type.monomorphize env ast (T.of_cst T.Const.pi))) + | Type.Builtin Ast.Sigma -> + `Term (Base.term_app_ho_ast (module Type) env + (fun ast -> Type.monomorphize env ast (T.of_cst T.Const.sigma))) + + (* Ignore the role and kind attributes *) + | Type.Id id when Id.equal id Id.tptp_role -> + `Tags (fun _ast _args -> []) + | Type.Id id when Id.equal id Id.tptp_kind -> + `Tags (fun _ast _args -> []) | _ -> `Not_found end @@ -73,16 +283,19 @@ module Smtlib2 = struct (T : Dolmen.Intf.Term.Smtlib_Base with type t = Type.T.t and type cstr := Type.T.Cstr.t) = struct - let parse_symbol env = function - | { Ast.term = Ast.Symbol s; _ } - | { Ast.term = Ast.App ({ Ast.term = Ast.Symbol s; _ }, []); _ } -> - Id.full_name s + let parse_name env = function + | ({ Ast.term = Ast.Symbol s; _ } as ast) + | ({ Ast.term = Ast.App ({ Ast.term = Ast.Symbol s; _ }, []); _ } as ast) -> + begin match Dolmen.Std.Id.name s with + | Simple s -> s + | _ -> Type._error env (Ast ast) (Type.Expected ("simple name", None)) + end | ast -> Type._error env (Ast ast) (Type.Expected ("symbol", None)) let parse_sexpr_list env = function | { Ast.term = Ast.App ( - { Ast.term = Ast.Symbol { Id.name = "$data"; ns = Id.Attr }; _ }, + { Ast.term = Ast.Symbol { name = Simple "$data"; ns = Attr }; _ }, l); _} -> l | ast -> @@ -97,52 +310,61 @@ module Smtlib2 = struct let parse _version env s = match s with (* Bool sort and constants *) - | Type.Id { Id.name = "Bool"; ns = Id.Sort } -> - `Ty (Base.app0 (module Type) env "Bool" Ty.prop) - | Type.Id { Id.name = "true"; ns = Id.Term } -> - `Term (Base.app0 (module Type) env "true" Type.T._true) - | Type.Id { Id.name = "false"; ns = Id.Term } -> - `Term (Base.app0 (module Type) env "false" Type.T._false) + | Type.Id { name = Simple "Bool"; ns = Sort } -> + `Ty (Base.app0 (module Type) env s Ty.prop) + | Type.Id { name = Simple "true"; ns = Term } -> + `Term (Base.app0 (module Type) env s T._true) + | Type.Id { name = Simple "false"; ns = Term } -> + `Term (Base.app0 (module Type) env s T._false) (* Boolean operators *) - | Type.Id { Id.name = "not"; ns = Id.Term } -> - `Term (Base.term_app1 (module Type) env "not" Type.T.neg) - | Type.Id { Id.name = "and"; ns = Id.Term } -> - `Term (fun ast args -> parse_f env ast (Ast.and_t ()) args) - | Type.Id { Id.name = "or"; ns = Id.Term } -> - `Term (fun ast args -> parse_f env ast (Ast.or_t ()) args) - | Type.Id { Id.name = "xor"; ns = Id.Term } -> - `Term (Base.term_app_left (module Type) env "xor" Type.T.xor) - | Type.Id { Id.name = "=>"; ns = Id.Term } -> - `Term (Base.term_app_right (module Type) env "=>" Type.T.imply) + | Type.Id { name = Simple "not"; ns = Term } -> + `Term (Base.term_app1 (module Type) env s T.neg) + | Type.Id { name = Simple "and"; ns = Term } -> + `Term (Base.term_app_list (module Type) env s T._and) + | Type.Id { name = Simple "or"; ns = Term } -> + `Term (Base.term_app_list (module Type) env s T._or) + | Type.Id { name = Simple "xor"; ns = Term } -> + `Term (Base.term_app_left (module Type) env s T.xor) + | Type.Id { name = Simple "=>"; ns = Term } -> + `Term (Base.term_app_right (module Type) env s T.imply) (* If-then-else *) - | Type.Id { Id.name = "ite"; ns = Id.Term } -> - `Term (fun ast args -> parse_f env ast (Ast.ite_t ()) args) + | Type.Id { name = Simple "ite"; ns = Term } -> + `Term ( + Base.make_op3 (module Type) env s (fun _ (a, b, c) -> + let cond = Type.parse_prop env a in + let then_ = Type.parse_term env b in + let else_ = Type.parse_term env c in + T.ite cond then_ else_ + ) + ) (* Equality *) - | Type.Id { Id.name = "distinct"; ns = Id.Term } -> - `Term (fun ast args -> parse_f env ast (Ast.neq_t ()) args) - | Type.Id { Id.name = "="; ns = Id.Term } -> - `Term (fun _ast args -> T.eqs (List.map (Type.parse_term env) args)) + | Type.Id { name = Simple "distinct"; ns = Term } -> + `Term (fun _ast args -> + let args = List.map (Type.parse_term env) args in + T.distinct args) + | Type.Id { name = Simple "="; ns = Term } -> + `Term (Base.term_app_chain (module Type) env s T.eq) (* Named formulas *) - | Type.Id { Id.name = ":named"; ns = Id.Attr } -> - `Tags (Base.make_op1 (module Type) env ":named" (fun _ t -> - let name = parse_symbol env t in - [Type.Any (Tag.named, name)] + | Type.Id { name = Simple ":named"; ns = Attr } -> + `Tags (Base.make_op1 (module Type) env s (fun _ t -> + let name = parse_name env t in + [Type.Set (Tag.named, name)] )) (* Trigger annotations *) - | Type.Id { Id.name = ":pattern"; ns = Id.Attr } -> - `Tags (Base.make_op1 (module Type) env ":pattern" (fun _ t -> + | Type.Id { name = Simple ":pattern"; ns = Attr } -> + `Tags (Base.make_op1 (module Type) env s (fun _ t -> let l = parse_sexpr_list env t in let l = List.map (Type.parse_term env) l in - [Type.Any (Tag.triggers, l)] + [Type.Add (Tag.triggers, l)] )) (* N-ary s-expressions in attributes *) - | Type.Id { Id.name = "$data"; ns = Id.Attr } -> + | Type.Id { name = Simple "$data"; ns = Attr } -> `Term (fun ast args -> begin match args with | f :: r -> parse_f env ast f r @@ -152,20 +374,22 @@ module Smtlib2 = struct (* Rewrite rules *) | Type.Id id when Id.equal id Id.rwrt_rule -> - `Tags (fun _ast _args -> [Type.Any (Tag.rwrt, ())]) + `Tags (fun _ast _args -> [Type.Set (Tag.rwrt, ())]) (* ADT testers *) - | Type.Id ({ Id.ns = Id.Term; _ } as id) -> - Base.parse_id id [ - "is", `Unary (function s -> - let id = Id.mk Id.Term s in - begin match Type.find_bound env id with - | `Cstr c -> - `Term (Base.term_app1 (module Type) env "is" (T.cstr_tester c)) - | _ -> `Not_found - end); - ] ~err:(fun _ _ _ -> `Not_found) + | Type.Id { Id.ns = Term; name = Indexed { basename; indexes; } } as symbol -> + Base.parse_indexed basename indexes ~k:(fun _ -> `Not_found) + ~err:(fun _ _ _ -> `Not_found) (function + | "is" -> `Unary (function s -> + let id = Id.mk Term s in + begin match Type.find_bound env id with + | `Cstr c -> + `Term (Base.term_app1 (module Type) env symbol (T.cstr_tester c)) + | _ -> `Not_found + end) + | _ -> `Not_indexed + ) | _ -> `Not_found @@ -180,27 +404,68 @@ module Zf = struct module Tff (Type : Tff_intf.S) - (Tag : Dolmen.Intf.Tag.Zf_Base with type 'a t = 'a Type.Tag.t) = struct + (Tag : Dolmen.Intf.Tag.Zf_Base with type 'a t = 'a Type.Tag.t) + (Ty : Dolmen.Intf.Ty.Zf_Base with type t = Type.Ty.t) + (T : Dolmen.Intf.Term.Zf_Base with type t = Type.T.t) = struct + + let mk_or a b = T._or [a; b] + let mk_and a b = T._and [a; b] let parse env s = match s with + (* Types *) + | Type.Builtin Ast.Prop -> + `Ty (Base.app0 (module Type) env s Ty.prop) + + (* Terms *) + | Type.Builtin Ast.True -> + `Term (Base.app0 (module Type) env s T._true) + | Type.Builtin Ast.False -> + `Term (Base.app0 (module Type) env s T._false) + | Type.Builtin Ast.Not -> + `Term (Base.term_app1 (module Type) env s T.neg) + | Type.Builtin Ast.Or -> + `Term (Base.term_app2 (module Type) env s mk_or) + | Type.Builtin Ast.And -> + `Term (Base.term_app2 (module Type) env s mk_and) + | Type.Builtin Ast.Imply -> + `Term (Base.term_app2 (module Type) env s T.imply) + | Type.Builtin Ast.Equiv -> + `Term (Base.term_app2 (module Type) env s T.equiv) + | Type.Builtin Ast.Eq -> + `Term (Base.term_app2 (module Type) env s T.eq) + | Type.Builtin Ast.Distinct -> + `Term (Base.term_app2 (module Type) env s T.neq) + + (* Ite *) + | Type.Builtin Ast.Ite -> + `Term ( + Base.make_op3 (module Type) env s (fun _ (a, b, c) -> + let cond = Type.parse_prop env a in + let then_ = Type.parse_term env b in + let else_ = Type.parse_term env c in + T.ite cond then_ else_ + ) + ) + + (* Tags *) | Type.Id id when Id.equal id Id.rwrt_rule -> - `Tags (fun _ast _args -> [Type.Any (Tag.rwrt, ())]) - | Type.Id { Id.name = "infix"; ns = Id.Term } -> + `Tags (fun _ast _args -> [Type.Set (Tag.rwrt, ())]) + | Type.Id { name = Simple "infix"; ns = Term } -> `Tags (fun ast args -> match args with - | [ { Ast.term = Ast.Symbol { Id.name; _ }; _ } ] -> [ - Type.Any (Tag.name, Tag.exact name); - Type.Any (Tag.pos, Tag.infix); + | [ { Ast.term = Ast.Symbol { name = Simple name; _ }; _ } ] -> [ + Type.Set (Tag.name, Tag.exact name); + Type.Set (Tag.pos, Tag.infix); ] | _ -> Type._error env (Ast ast) (Type.Expected ("a symbol", None)) ) - | Type.Id { Id.name = "prefix"; ns = Id.Term } -> + | Type.Id { name = Simple "prefix"; ns = Term } -> `Tags (fun ast args -> match args with - | [ { Ast.term = Ast.Symbol { Id.name; _ }; _ } ] -> [ - Type.Any (Tag.name, Tag.exact name); - Type.Any (Tag.pos, Tag.prefix); + | [ { Ast.term = Ast.Symbol { name = Simple name; _ }; _ } ] -> [ + Type.Set (Tag.name, Tag.exact name); + Type.Set (Tag.pos, Tag.prefix); ] | _ -> Type._error env (Ast ast) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/core.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/core.mli index dac8d0e53d0ab949651a7ee121b58463cf32874d..87d0a3f9163928d0e4c0850985831c4d004968cd 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/core.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/core.mli @@ -7,6 +7,8 @@ module Ae : sig (** Builtin symbols for tptp's tff *) module Tff (Type : Tff_intf.S) + (Tag : Dolmen.Intf.Tag.Ae_Base with type 'a t = 'a Type.Tag.t + and type term := Type.T.t) (Ty : Dolmen.Intf.Ty.Ae_Base with type t = Type.Ty.t) (T : Dolmen.Intf.Term.Ae_Base with type t = Type.T.t) : sig @@ -15,6 +17,19 @@ module Ae : sig end end +(** Dimacs builtins *) +module Dimacs : sig + + (** Builtin symbols for tptp's tff *) + module Tff + (Type : Tff_intf.S) + (T : Dolmen.Intf.Term.Dimacs with type t = Type.T.t) : sig + + val parse : Type.builtin_symbols + + end +end + (** TPTP builtins ($i, $o, etc..) *) module Tptp : sig @@ -22,7 +37,18 @@ module Tptp : sig module Tff (Type : Tff_intf.S) (Ty : Dolmen.Intf.Ty.Tptp_Base with type t = Type.Ty.t) - (T : Dolmen.Intf.Term.Tptp_Base with type t = Type.T.t) : sig + (T : Dolmen.Intf.Term.Tptp_Tff_Core with type t = Type.T.t) : sig + + val parse : Dolmen.Tptp.version -> Type.builtin_symbols + + end + + (** Builtin symbols for tptp's tff *) + module Thf + (Type : Thf_intf.S) + (Ty : Dolmen.Intf.Ty.Tptp_Base with type t = Type.Ty.t) + (T : Dolmen.Intf.Term.Tptp_Thf_Core with type t = Type.T.t + and type Const.t = Type.T.Const.t) : sig val parse : Dolmen.Tptp.version -> Type.builtin_symbols @@ -52,7 +78,9 @@ module Zf : sig (** Builtins for smtlib's core theory *) module Tff (Type : Tff_intf.S) - (Tag : Dolmen.Intf.Tag.Zf_Base with type 'a t = 'a Type.Tag.t) : sig + (Tag : Dolmen.Intf.Tag.Zf_Base with type 'a t = 'a Type.Tag.t) + (Ty : Dolmen.Intf.Ty.Zf_Base with type t = Type.Ty.t) + (T : Dolmen.Intf.Term.Zf_Base with type t = Type.T.t) : sig val parse : Type.builtin_symbols end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/def.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/def.ml index 611d8a7e8f76e5e118d3159d6555c2c68f7cbd85..b58716c1ad2320c2b02e2848a35074790c93d7fe 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/def.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/def.ml @@ -1,4 +1,6 @@ +module M = Map.Make(Dolmen.Std.Id) + (* Definitions by Substitution *) (* ************************************************************************ *) @@ -24,8 +26,6 @@ module Subst and type term := Type.T.t and type term_var := Type.T.Var.t) = struct - module H = Hashtbl.Make(Dolmen.Std.Id) - let take_drop n l = let rec aux acc n = function | r when n <= 0 -> List.rev acc, r @@ -34,21 +34,30 @@ module Subst in aux [] n l - let definitions = H.create 13 + let key = Dolmen.Std.Tag.create () - let define_ty id vars body = - H.add definitions id (`Ty (vars, body)) + let get_defs env = + match Type.get_global_custom env key with + | None -> M.empty + | Some m -> m - let define_term id vars args body = - H.add definitions id (`Term (vars, args, body)) + let define_ty env id vars body = + let map = get_defs env in + let m = M.add id (`Ty (vars, body)) map in + Type.set_global_custom env key m + + let define_term env id vars args body = + let map = get_defs env in + let m = M.add id (`Term (vars, args, body)) map in + Type.set_global_custom env key m let parse env symbol = match (symbol : Type.symbol) with | Id id -> - begin match H.find definitions id with + begin match M.find id (get_defs env) with | `Ty (vars, body) -> `Ty (Base.make_opn (List.length vars) - (module Type) env (Dolmen.Std.Id.full_name id) (fun _ args -> + (module Type) env symbol (fun _ args -> let ty_args = List.map (Type.parse_ty env) args in let l = List.map2 (fun x y -> x, y) vars ty_args in T.ty_subst l body @@ -63,8 +72,7 @@ module Subst take_drop n_ty args else begin Type._error env (Ast ast) - (Type.Bad_op_arity (Dolmen.Std.Id.full_name id, - [n_ty + n_t], n_args)) + (Type.Bad_op_arity (symbol, [n_ty + n_t], n_args)) end in let ty_l = List.map2 (fun x y -> x, y) ty_vars @@ -75,7 +83,7 @@ module Subst ) | exception Not_found -> `Not_found end - | Builtin _ -> `Not_found + | _ -> `Not_found end @@ -85,33 +93,41 @@ end module Declare(Type : Tff_intf.S) = struct - module H = Hashtbl.Make(Dolmen.Std.Id) + let key = Dolmen.Std.Tag.create () - let definitions = H.create 13 + let get_defs env = + match Type.get_global_custom env key with + | None -> M.empty + | Some m -> m - let add_definition id def = - H.add definitions id def + let add_definition env id def = + let map = get_defs env in + let m = M.add id def map in + Type.set_global_custom env key m - let define_ty id vars _body = - let c = Type.Ty.Const.mk (Dolmen.Std.Id.full_name id) (List.length vars) in - let () = add_definition id (`Ty c) in + let define_ty env id vars _body = + let path = Type.cst_path env (Dolmen.Std.Id.name id) in + let c = Type.Ty.Const.mk path (List.length vars) in + let () = add_definition env id (`Ty c) in c - let define_term id vars args body = + let define_term env id vars args body = let ret_ty = Type.T.ty body in let args_ty = List.map Type.T.Var.ty args in - let c = Type.T.Const.mk (Dolmen.Std.Id.full_name id) vars args_ty ret_ty in - let () = add_definition id (`Term c) in + let path = Type.cst_path env (Dolmen.Std.Id.name id) in + let ty = Type.Ty.pi vars (Type.Ty.arrow args_ty ret_ty) in + let c = Type.T.Const.mk path ty in + let () = add_definition env id (`Term c) in c let parse env symbol = match (symbol : Type.symbol) with | Id id -> - begin match H.find definitions id with + begin match M.find id (get_defs env) with | `Ty c -> `Ty (fun ast args -> - Type.unwrap_ty env ast (Type.parse_app_ty env ast c args)) + Type.unwrap_ty env ast (Type.parse_app_ty_cst env ast c args)) | `Term c -> `Term (fun ast args -> - Type.unwrap_term env ast (Type.parse_app_term env ast c args)) + Type.unwrap_term env ast (Type.parse_app_term_cst env ast c args)) | exception Not_found -> `Not_found end | Builtin _ -> `Not_found diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/def.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/def.mli index 973672b50d6c57bf4c952a14a60911e6ac3795ee..e46ae6bba8b303c16cf95400d8926c3bdf59e40f 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/def.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/def.mli @@ -11,15 +11,18 @@ module Declare(Type : Tff_intf.S) : sig val add_definition : - Dolmen.Std.Id.t -> [ `Ty of Type.Ty.Const.t | `Term of Type.T.Const.t ] -> unit + Type.env -> Dolmen.Std.Id.t -> + [ `Ty of Type.Ty.Const.t | `Term of Type.T.Const.t ] -> unit (** Add a declaration binding. *) val define_ty : - Dolmen.Std.Id.t -> Type.Ty.Var.t list -> Type.Ty.t -> Type.Ty.Const.t + Type.env -> Dolmen.Std.Id.t -> + Type.Ty.Var.t list -> Type.Ty.t -> Type.Ty.Const.t (** Define a type constant. *) val define_term : - Dolmen.Std.Id.t -> Type.Ty.Var.t list -> Type.T.Var.t list -> Type.T.t -> Type.T.Const.t + Type.env -> Dolmen.Std.Id.t -> + Type.Ty.Var.t list -> Type.T.Var.t list -> Type.T.t -> Type.T.Const.t (** Define a term constant. *) val parse : Type.builtin_symbols @@ -53,11 +56,13 @@ module Subst(Type : Tff_intf.S) and type term_var := Type.T.Var.t) : sig val define_ty : - Dolmen.Std.Id.t -> Type.Ty.Var.t list -> Type.Ty.t -> unit + Type.env -> Dolmen.Std.Id.t -> + Type.Ty.Var.t list -> Type.Ty.t -> unit (** Define a type constant. *) val define_term : - Dolmen.Std.Id.t -> Type.Ty.Var.t list -> Type.T.Var.t list -> Type.T.t -> unit + Type.env -> Dolmen.Std.Id.t -> + Type.Ty.Var.t list -> Type.T.Var.t list -> Type.T.t -> unit (** Define a term constant. *) val parse : Type.builtin_symbols diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/dune b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/dune index 14d8d2769255e41e7ca4bbf575a5e01a4c492ed4..8546d937b0f86441ea8c8078b76a57df07c363ae 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/dune @@ -1,6 +1,7 @@ (library (name dolmen_type) (public_name dolmen_type) + (instrumentation (backend bisect_ppx)) (libraries ; external deps spelll uutf @@ -10,8 +11,8 @@ dolmen.smtlib2 dolmen.tptp ) (modules - ; TFF typechecking - Tff_intf Tff + ; TFF and THF typechecking + Intf Tff_intf Tff Thf_intf Thf ; Builtins Core Def Arith Arrays Bitv Float Strings ; Helpers diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/float.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/float.ml index a1e0b253441c4507031581b604d7053b60f07365..cb6c3d04040358c11caa5548b175cfdc7a38b0b6 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/float.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/float.ml @@ -17,10 +17,6 @@ module Smtlib2 = struct module R = T.Real module F = T.Float - type _ Type.warn += - | Real_lit : Ast.t Type.warn - | Bitv_extended_lit : Ast.t Type.warn - type _ Type.err += | Invalid_bin_char : char -> Ast.t Type.err | Invalid_hex_char : char -> Ast.t Type.err @@ -48,8 +44,8 @@ module Smtlib2 = struct | exception Misc.Bitv.Invalid_char c -> Type._error env (Ast ast) (Invalid_hex_char c) - let parse_extended_lit env s n = - Base.make_op0 (module Type) env s (fun ast () -> + let parse_extended_lit env symbol s n = + Base.make_op0 (module Type) env symbol (fun ast () -> assert (String.length s >= 2); let n = parse_int env ast n in match Misc.Bitv.parse_decimal s n with @@ -67,7 +63,7 @@ module Smtlib2 = struct let j = parse_int env ast j_s in mk i j - let to_fp env e s = `Term (fun ast args -> + let to_fp env symbol e s = `Term (fun ast args -> let e = parse_int env ast e in let s = parse_int env ast s in let args = List.map (Type.parse_term env) args in @@ -82,149 +78,151 @@ module Smtlib2 = struct Type.Expected ("a real, bitvector or float", Some (Term b))) end | _ -> Type._error env (Ast ast) - (Type.Bad_op_arity ("to_fp", [1; 2], List.length args)) + (Type.Bad_op_arity (symbol, [1; 2], List.length args)) ) let parse _version env s = match s with - (* sort *) - | Type.Id ({ Id.ns = Id.Sort; _ } as id) -> - Base.parse_id id [ - "BitVec", `Unary (function n_s -> - `Ty (Base.app0_ast (module Type) env "BitVec" - (indexed1 env Ty.bitv n_s))); - "FloatingPoint", `Binary (fun n_e n_s -> - `Ty (Base.app0_ast (module Type) env "FloatingPoint" - (indexed2 env Ty.float n_e n_s))); - ] ~err:(Base.bad_ty_index_arity (module Type) env) - ~k:(function - | ["Float16"] -> - `Ty (Base.app0 (module Type) env "Float16" (Ty.float 5 11)) - | ["Float32"] -> - `Ty (Base.app0 (module Type) env "Float32" (Ty.float 8 24)) - | ["Float64"] -> - `Ty (Base.app0 (module Type) env "Float64" (Ty.float 11 53)) - | ["Float128"] -> - `Ty (Base.app0 (module Type) env "Float128" (Ty.float 15 113)) - | ["RoundingMode"] -> - `Ty (Base.app0 (module Type) env "RoundingMode" Ty.roundingMode) - | _ -> - `Not_found) + (* sorts *) + | Type.Id { ns = Sort; name = Simple "Float16"; } -> + `Ty (Base.app0 (module Type) env s (Ty.float 5 11)) + | Type.Id { ns = Sort; name = Simple "Float32"; } -> + `Ty (Base.app0 (module Type) env s (Ty.float 8 24)) + | Type.Id { ns = Sort; name = Simple "Float64"; } -> + `Ty (Base.app0 (module Type) env s (Ty.float 11 53)) + | Type.Id { ns = Sort; name = Simple "Float128"; } -> + `Ty (Base.app0 (module Type) env s (Ty.float 15 113)) + | Type.Id { ns = Sort; name = Simple "RoundingMode"; } -> + `Ty (Base.app0 (module Type) env s Ty.roundingMode) + + (* indexed sorts *) + | Type.Id { ns = Sort; name = Indexed { basename; indexes; } } -> + Base.parse_indexed basename indexes (function + | "BitVec" -> `Unary (function n_s -> + `Ty (Base.app0_ast (module Type) env s + (indexed1 env Ty.bitv n_s))) + | "FloatingPoint" -> `Binary (fun n_e n_s -> + `Ty (Base.app0_ast (module Type) env s + (indexed2 env Ty.float n_e n_s))) + | _ -> `Not_indexed) + ~err:(Base.bad_ty_index_arity (module Type) env) + ~k:(function () -> `Not_found) + (* Bitvector litterals *) - | Type.Id { Id.ns = Id.Value Id.Binary; name; } -> - `Term (Base.app0_ast (module Type) env name (parse_binary env name)) - | Type.Id { Id.ns = Id.Value Id.Hexadecimal; name; } -> - `Term (Base.app0_ast (module Type) env name (parse_hexa env name)) - (* Added with a warning for compatibility *) - | Type.Id { Id.ns = Id.Value Id.Real; name; } -> - `Term (fun ast args -> - Type._warn env (Ast ast) Real_lit; - Base.app0 (module Type) env name (R.mk name) ast args) + | Type.Id { ns = Value Binary; name = Simple name; } -> + `Term (Base.app0_ast (module Type) env s (parse_binary env name)) + | Type.Id { ns = Value Hexadecimal; name = Simple name; } -> + `Term (Base.app0_ast (module Type) env s (parse_hexa env name)) + (* Added for compatibility *) + | Type.Id { ns = Value Real; name = Simple name; } -> + `Term (Base.app0 (module Type) env s (R.mk name)) (* terms *) - | Type.Id ({ Id.ns = Id.Term; name; } as id) -> + | Type.Id { ns = Term; name = Simple name; } -> begin match name with | "fp" -> - `Term (Base.term_app3 (module Type) env name F.fp) + `Term (Base.term_app3 (module Type) env s F.fp) | "RNE" | "roundNearestTiesToEven" -> - `Term (Base.app0 (module Type) env name F.roundNearestTiesToEven) + `Term (Base.app0 (module Type) env s F.roundNearestTiesToEven) | "RNA" | "roundNearestTiesToAway" -> - `Term (Base.app0 (module Type) env name F.roundNearestTiesToAway) + `Term (Base.app0 (module Type) env s F.roundNearestTiesToAway) | "RTP" | "roundTowardPositive" -> - `Term (Base.app0 (module Type) env name F.roundTowardPositive) + `Term (Base.app0 (module Type) env s F.roundTowardPositive) | "RTN" | "roundTowardNegative" -> - `Term (Base.app0 (module Type) env name F.roundTowardNegative) + `Term (Base.app0 (module Type) env s F.roundTowardNegative) | "RTZ" | "roundTowardZero" -> - `Term (Base.app0 (module Type) env name F.roundTowardZero) + `Term (Base.app0 (module Type) env s F.roundTowardZero) | "fp.abs" -> - `Term (Base.term_app1 (module Type) env name F.abs) + `Term (Base.term_app1 (module Type) env s F.abs) | "fp.neg" -> - `Term (Base.term_app1 (module Type) env name F.neg) + `Term (Base.term_app1 (module Type) env s F.neg) | "fp.add" -> - `Term (Base.term_app3 (module Type) env name F.add) + `Term (Base.term_app3 (module Type) env s F.add) | "fp.sub" -> - `Term (Base.term_app3 (module Type) env name F.sub) + `Term (Base.term_app3 (module Type) env s F.sub) | "fp.mul" -> - `Term (Base.term_app3 (module Type) env name F.mul) + `Term (Base.term_app3 (module Type) env s F.mul) | "fp.div" -> - `Term (Base.term_app3 (module Type) env name F.div) + `Term (Base.term_app3 (module Type) env s F.div) | "fp.fma" -> - `Term (Base.term_app4 (module Type) env name F.fma) + `Term (Base.term_app4 (module Type) env s F.fma) | "fp.sqrt" -> - `Term (Base.term_app2 (module Type) env name F.sqrt) + `Term (Base.term_app2 (module Type) env s F.sqrt) | "fp.rem" -> - `Term (Base.term_app2 (module Type) env name F.rem) + `Term (Base.term_app2 (module Type) env s F.rem) | "fp.roundToIntegral" -> - `Term (Base.term_app2 (module Type) env name F.roundToIntegral) + `Term (Base.term_app2 (module Type) env s F.roundToIntegral) | "fp.min" -> - `Term (Base.term_app2 (module Type) env name F.min) + `Term (Base.term_app2 (module Type) env s F.min) | "fp.max" -> - `Term (Base.term_app2 (module Type) env name F.max) + `Term (Base.term_app2 (module Type) env s F.max) | "fp.leq" -> - `Term (Base.term_app_chain (module Type) env name F.leq) + `Term (Base.term_app_chain (module Type) env s F.leq) | "fp.lt" -> - `Term (Base.term_app_chain (module Type) env name F.lt) + `Term (Base.term_app_chain (module Type) env s F.lt) | "fp.geq" -> - `Term (Base.term_app_chain (module Type) env name F.geq) + `Term (Base.term_app_chain (module Type) env s F.geq) | "fp.gt" -> - `Term (Base.term_app_chain (module Type) env name F.gt) + `Term (Base.term_app_chain (module Type) env s F.gt) | "fp.eq" -> - `Term (Base.term_app_chain (module Type) env name F.eq) + `Term (Base.term_app_chain (module Type) env s F.eq) | "fp.isNormal" -> - `Term (Base.term_app1 (module Type) env name F.isNormal) + `Term (Base.term_app1 (module Type) env s F.isNormal) | "fp.isSubnormal" -> - `Term (Base.term_app1 (module Type) env name F.isSubnormal) + `Term (Base.term_app1 (module Type) env s F.isSubnormal) | "fp.isZero" -> - `Term (Base.term_app1 (module Type) env name F.isZero) + `Term (Base.term_app1 (module Type) env s F.isZero) | "fp.isInfinite" -> - `Term (Base.term_app1 (module Type) env name F.isInfinite) + `Term (Base.term_app1 (module Type) env s F.isInfinite) | "fp.isNaN" -> - `Term (Base.term_app1 (module Type) env name F.isNaN) + `Term (Base.term_app1 (module Type) env s F.isNaN) | "fp.isNegative" -> - `Term (Base.term_app1 (module Type) env name F.isNegative) + `Term (Base.term_app1 (module Type) env s F.isNegative) | "fp.isPositive" -> - `Term (Base.term_app1 (module Type) env name F.isPositive) + `Term (Base.term_app1 (module Type) env s F.isPositive) | "fp.to_real" -> - `Term (Base.term_app1 (module Type) env name F.to_real) - | _ -> Base.parse_id id [ - "+oo", `Binary (fun e s -> - `Term (Base.app0_ast (module Type) env "plus_infinity" - (indexed2 env F.plus_infinity e s))); - "-oo", `Binary (fun e s -> - `Term (Base.app0_ast (module Type) env "minus_infinity" - (indexed2 env F.minus_infinity e s))); - "+zero", `Binary (fun e s -> - `Term (Base.app0_ast (module Type) env "plus_zero" - (indexed2 env F.plus_zero e s))); - "-zero", `Binary (fun e s -> - `Term (Base.app0_ast (module Type) env "minus_zero" - (indexed2 env F.minus_zero e s))); - "NaN", `Binary (fun e s -> - `Term (Base.app0_ast (module Type) env "nan" - (indexed2 env F.nan e s))); - "to_fp", `Binary (to_fp env); - "to_fp_unsigned", `Binary (fun e s -> - `Term (Base.term_app2_ast (module Type) env "ubv_to_fp" - (indexed2 env F.ubv_to_fp e s))); - "fp.to_ubv", `Unary (fun n -> - `Term (Base.term_app2_ast (module Type) env "to_ubv" - (indexed1 env F.to_ubv n))); - "fp.to_sbv", `Unary (fun n -> - `Term (Base.term_app2_ast (module Type) env "to_sbv" - (indexed1 env F.to_sbv n))); - ] ~err:(Base.bad_term_index_arity (module Type) env) - ~k:(function - | [s; n] when (String.length s >= 2 && - s.[0] = 'b' && s.[1] = 'v') -> - `Term (fun ast args -> - Type._warn env (Ast ast) Bitv_extended_lit; - parse_extended_lit env s n ast args) - | _ -> `Not_found) + `Term (Base.term_app1 (module Type) env s F.to_real) + | _ -> `Not_found end + | Type.Id { ns = Term; name = Indexed { basename; indexes; } } as symbol -> + Base.parse_indexed basename indexes (function + | "+oo" -> `Binary (fun e s -> + `Term (Base.app0_ast (module Type) env symbol + (indexed2 env F.plus_infinity e s))) + | "-oo" -> `Binary (fun e s -> + `Term (Base.app0_ast (module Type) env symbol + (indexed2 env F.minus_infinity e s))) + | "+zero" -> `Binary (fun e s -> + `Term (Base.app0_ast (module Type) env symbol + (indexed2 env F.plus_zero e s))) + | "-zero" -> `Binary (fun e s -> + `Term (Base.app0_ast (module Type) env symbol + (indexed2 env F.minus_zero e s))) + | "NaN" -> `Binary (fun e s -> + `Term (Base.app0_ast (module Type) env symbol + (indexed2 env F.nan e s))) + | "to_fp" -> `Binary (to_fp env symbol) + | "to_fp_unsigned" -> `Binary (fun e s -> + `Term (Base.term_app2_ast (module Type) env symbol + (indexed2 env F.ubv_to_fp e s))) + | "fp.to_ubv" -> `Unary (fun n -> + `Term (Base.term_app2_ast (module Type) env symbol + (indexed1 env F.to_ubv n))) + | "fp.to_sbv" -> `Unary (fun n -> + `Term (Base.term_app2_ast (module Type) env symbol + (indexed1 env F.to_sbv n))) + | s when (String.length s >= 2 && + s.[0] = 'b' && s.[1] = 'v') -> + `Unary (fun n -> + `Term (parse_extended_lit env symbol s n)) + | _ -> `Not_indexed) + ~err:(Base.bad_term_index_arity (module Type) env) + ~k:(fun () -> `Not_found) + | _ -> `Not_found - end +end end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/float.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/float.mli index 5316efd5edcbc58ab9466cfbc1aa8479ad50650a..74777eec8a353d5fca079e8e235569f754301a9b 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/float.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/float.mli @@ -8,10 +8,6 @@ module Smtlib2 : sig (T : Dolmen.Intf.Term.Smtlib_Float with type t := Type.T.t and type ty := Type.Ty.t) : sig - type _ Type.warn += - | Real_lit : Dolmen.Std.Term.t Type.warn - | Bitv_extended_lit : Dolmen.Std.Term.t Type.warn - type _ Type.err += | Invalid_bin_char : char -> Dolmen.Std.Term.t Type.err | Invalid_hex_char : char -> Dolmen.Std.Term.t Type.err diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/intf.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/intf.ml new file mode 100644 index 0000000000000000000000000000000000000000..0d2f6380d10c9879761214c42748a9def9961b4b --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/intf.ml @@ -0,0 +1,531 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +(** External Typechecker interface + + This module defines the external typechcker interface, that is, + the interface of an instantiated typechecker. *) + +(** {1 Typechecker interface} *) + +type symbol = + | Id of Dolmen.Std.Id.t + | Builtin of Dolmen.Std.Term.builtin (**) +(** Wrapper around potential function symbols from the Dolmen AST. *) + + +(** Typechecker interface *) +module type Formulas = sig + + (** {2 types} *) + type ty + type ty_var + type ty_cst + + type term + type term_var + type term_cst + type term_cstr + type term_field + + type 'a ast_tag + + (** {2 Type definitions} *) + + type order = + | First_order (** First-oreder typechecking *) + | Higher_order (** Higher-order typechecking *) + (** Control whether the typechecker should type *) + + + type poly = + | Explicit + (** Type arguments must be explicitly given in funciton applications *) + | Implicit + (** Type arguments are not given in funciton applications, and instead + type annotations/coercions are used to disambiguate applications + of polymorphic symbols. *) + | Flexible + (** Mix between explicit and implicit: depending on the arity of a + symbol and the number of arguments provided, either the provided + type arguments are used, or wildcards are generated for all of them, + and later instantiated when needed. *) + (** The various polymorphism mode for the typechecker *) + + type sym_inference_source = { + symbol : Dolmen.Std.Id.t; + symbol_loc : Dolmen.Std.Loc.t; + mutable inferred_ty : ty; + } + (** *) + + type var_inference_source = { + variable : Dolmen.Std.Id.t; + variable_loc : Dolmen.Std.Loc.t; + mutable inferred_ty : ty; + } + (** *) + + type wildcard_source = + | Arg_of of wildcard_source + | Ret_of of wildcard_source + | From_source of Dolmen.Std.Term.t + | Added_type_argument of Dolmen.Std.Term.t + | Symbol_inference of sym_inference_source + | Variable_inference of var_inference_source (**) + (** *) + + type wildcard_shape = + | Forbidden + | Any_in_scope + | Any_base of { + allowed : ty list; + preferred : ty; + } + | Arrow of { + arg_shape : wildcard_shape; + ret_shape : wildcard_shape; + } (**) + (** *) + + type infer_unbound_var_scheme = + | No_inference + | Unification_type_variable (**) + (** *) + + type infer_term_scheme = + | No_inference + | Wildcard of wildcard_shape (**) + (** *) + + type var_infer = { + infer_unbound_vars : infer_unbound_var_scheme; + infer_type_vars_in_binding_pos : bool; + infer_term_vars_in_binding_pos : infer_term_scheme; + } + (** Specification of how to infer variables. *) + + type sym_infer = { + infer_type_csts : bool; + infer_term_csts : infer_term_scheme; + } + (** Specification of how to infer symbols. *) + + type free_wildcards = + | Forbidden + | Implicitly_universally_quantified (**) + (** *) + + type expect = + | Type + | Term + | Anything (**) + (** *) + + type tag = + | Set : 'a ast_tag * 'a -> tag + | Add : 'a list ast_tag * 'a -> tag + (** Existencial wrapper around tags *) + + type res = + | Ttype + | Ty of ty + | Term of term + | Tags of tag list (**) + (** The results of parsing an untyped term. *) + + type builtin_res = [ + | `Ttype of (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> unit) + | `Ty of (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> ty) + | `Term of (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> term) + | `Tags of (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> tag list) + ] + (** The result of parsing a symbol by the theory *) + + type not_found = [ `Not_found ] + (** Not bound bindings *) + + type inferred = + | Ty_fun of ty_cst + | Term_fun of term_cst (**) + (** The type for inferred symbols. *) + + type reason = + | Builtin + | Bound of Dolmen.Std.Loc.file * Dolmen.Std.Term.t + | Inferred of Dolmen.Std.Loc.file * Dolmen.Std.Term.t + | Defined of Dolmen.Std.Loc.file * Dolmen.Std.Statement.def + | Declared of Dolmen.Std.Loc.file * Dolmen.Std.Statement.decl + (** The type of reasons for constant typing *) + + type binding = [ + | `Not_found + | `Builtin of [ + | `Ttype + | `Ty + | `Term + | `Tag + ] + | `Variable of [ + | `Ty of ty_var * reason option + | `Term of term_var * reason option + ] + | `Constant of [ + | `Ty of ty_cst * reason option + | `Cstr of term_cstr * reason option + | `Term of term_cst * reason option + | `Field of term_field * reason option + ] + ] + (** The bindings that can occur. *) + + type nonrec symbol = symbol = + | Id of Dolmen.Std.Id.t + | Builtin of Dolmen.Std.Term.builtin (**) + (** Wrapper around potential function symbols from the Dolmen AST. *) + + + (** {2 Errors and warnings} *) + + type _ fragment = + | Ast : Dolmen.Std.Term.t -> Dolmen.Std.Term.t fragment + | Def : Dolmen.Std.Statement.def -> Dolmen.Std.Statement.def fragment + | Defs : Dolmen.Std.Statement.defs -> Dolmen.Std.Statement.defs fragment + | Decl : Dolmen.Std.Statement.decl -> Dolmen.Std.Statement.decl fragment + | Decls : Dolmen.Std.Statement.decls -> Dolmen.Std.Statement.decls fragment + | Located : Dolmen.Std.Loc.t -> Dolmen.Std.Loc.t fragment (**) + (** Fragments of input that represent the sources of warnings/errors *) + + type _ warn = .. + (** The type of warnings, parameterized by the type of fragment they can + trigger on *) + + type _ warn += + | Unused_type_variable : + [ `Quantified | `Letbound ] * ty_var -> Dolmen.Std.Term.t warn + (** Unused quantified type variable *) + | Unused_term_variable : + [ `Quantified | `Letbound ] * term_var -> Dolmen.Std.Term.t warn + (** Unused quantified term variable *) + | Error_in_attribute : exn -> Dolmen.Std.Term.t warn + (** An error occurred wile parsing an attribute *) + | Superfluous_destructor : + Dolmen.Std.Id.t * Dolmen.Std.Id.t * term_cst -> Dolmen.Std.Term.t warn + (** The user implementation of typed terms returned a destructor where + was asked for. This warning can very safely be ignored. *) + (** Warnings that cna trigger on regular parsed terms. *) + + type _ warn += + | Shadowing : Dolmen.Std.Id.t * binding * binding -> _ warn + (** Shadowing of the given identifier, + together with the old and current binding. *) + (** Special case of warnings for shadowing, as it can happen both from a + term but also a declaration, hence why the type variable of [warn] is + left wild. *) + + type _ err = .. + (** The type of errors, parameterized by the type of fragment they can + trigger on *) + + type _ err += + | Not_well_founded_datatypes : + Dolmen.Std.Statement.decl list -> Dolmen.Std.Statement.decls err + (** Not well-dounded datatypes definitions. *) + (** Errors that occur on declaration(s) *) + + type _ err += + | Expected : string * res option -> Dolmen.Std.Term.t err + (** The parsed term didn't match the expected shape *) + | Bad_index_arity : string * int * int -> Dolmen.Std.Term.t err + (** [Bad_index_arity (name, expected, actual)] denotes an error where + an indexed family of operators (based on [name]) expect to be indexed + by [expected] arguments but got [actual] instead. *) + | Bad_ty_arity : ty_cst * int -> Dolmen.Std.Term.t err + (** [Bad_ty_arity (cst, actual)] denotes a type constant that was applied + to [actual] arguments, but which has a different arity (which should + be accessible by getting its type/sort/arity). *) + | Bad_op_arity : symbol * int list * int -> Dolmen.Std.Term.t err + (** [Bad_op_arity (symbol, expected, actual)] denotes a named operator + (which may be a builtin operator, a top-level defined constant which + is being substituted, etc...) expecting a number of arguments among + the [expected] list, but instead got [actual] number of arguments. *) + | Bad_cstr_arity : term_cstr * int list * int -> Dolmen.Std.Term.t err + (** [Bad_cstr_arity (cstr, expected, actual)] denotes an ADT constructor, + which was expecting one of [expected] arguments, but which was applied + to [actual] arguments. *) + | Bad_term_arity : term_cst * int list * int -> Dolmen.Std.Term.t err + (** [Bad_term_arity (func, expected, actual)] denotes a function symbol, + which was expecting one of [expected] arguments, but which was applied + to [actual] arguments. *) + | Bad_poly_arity : ty_var list * ty list -> Dolmen.Std.Term.t err + (** [Bad_poly_arity (ty_vars, ty_args) denotes a polymorphic term + application, where the function term being applied was provided with + the type arguments [ty_args], but the function type expected + a number of arguments that is the length of [ty_vars], and the + two lengths differ. Under application is allowed, so in the cases + where there are less provided arguments than expected type arguments, + the presence of term arguments after the type arguments forced + the raising of this exception. *) + | Over_application : term list -> Dolmen.Std.Term.t err + (** [Over_application over_args] denotes an application where after applying + the provided arguments, the application resulted in a term with a + non-function type, but that term was still provided with [over_args]. *) + | Repeated_record_field : term_field -> Dolmen.Std.Term.t err + (** [Repeated_record_field f] denotes an error within an expression + that builds a record by giving values to all fields, but where the + field [f] appears more than once. *) + | Missing_record_field : term_field -> Dolmen.Std.Term.t err + (** [Missing_record_field f] denotes an error within an expression + that builds a record by giving values to all fields, but where the + field [f] does not appear. *) + | Mismatch_record_type : term_field * ty_cst -> Dolmen.Std.Term.t err + (** [Mismatch_record_type (f, r)] denotes an error where while building + a record expression for a record of type [c], a field [f] belonging + to another record type was used. *) + | Mismatch_sum_type : term_cstr * ty -> Dolmen.Std.Term.t err + (** *) + | Var_application : term_var -> Dolmen.Std.Term.t err + (** [Var_application v] denotes a variable which was applied to other + terms, which is forbidden in first-order formulas. *) + | Ty_var_application : ty_var -> Dolmen.Std.Term.t err + (** [Ty_var_application v] denotes a type variable which was applied to + other terms, which is forbidden in first-order formulas. *) + | Type_mismatch : term * ty -> Dolmen.Std.Term.t err + (** [Type_mismatch (term, expected)] denotes a context where [term] was + expected to have type [expected], but it is not the case. *) + | Var_in_binding_pos_underspecified : Dolmen.Std.Term.t err + (** Variable in a binding pos (e.g. quantifier) without a type, + and no configured way to infer its type. *) + | Unhandled_builtin : Dolmen.Std.Term.builtin -> Dolmen.Std.Term.t err + (** *) + | Cannot_tag_tag : Dolmen.Std.Term.t err + (** *) + | Cannot_tag_ttype : Dolmen.Std.Term.t err + (** *) + | Cannot_find : Dolmen.Std.Id.t * string -> Dolmen.Std.Term.t err + (** *) + | Forbidden_quantifier : Dolmen.Std.Term.t err + (** *) + | Missing_destructor : Dolmen.Std.Id.t -> Dolmen.Std.Term.t err + (** *) + | Type_def_rec : Dolmen.Std.Statement.def -> Dolmen.Std.Statement.defs err + (** *) + | Higher_order_application : Dolmen.Std.Term.t err + (** *) + | Higher_order_type : Dolmen.Std.Term.t err + (** *) + | Higher_order_env_in_tff_typechecker : Dolmen.Std.Loc.t err + (** Programmer error *) + | Polymorphic_function_argument : Dolmen.Std.Term.t err + (** *) + | Non_prenex_polymorphism : ty -> Dolmen.Std.Term.t err + (** *) + | Inference_forbidden : + ty_var * wildcard_source * ty -> Dolmen.Std.Term.t err + (** *) + | Inference_conflict : + ty_var * wildcard_source * ty * ty list -> Dolmen.Std.Term.t err + (** *) + | Inference_scope_escape : + ty_var * wildcard_source * ty_var * reason option -> Dolmen.Std.Term.t err + (** [Inference_scope_escape (w, w_src, v, reason)] denotes a situation where + the wildcard variable [w] (which comes from [w_src]), was instantiated + with a type that would lead to the variable [v] from escaping its scope; + [reason] is the reason of the binding for [v]. *) + | Unbound_type_wildcards : + (ty_var * wildcard_source list) list -> Dolmen.Std.Term.t err + (** *) + | Uncaught_exn : exn * Printexc.raw_backtrace -> Dolmen.Std.Term.t err + (** *) + | Unhandled_ast : Dolmen.Std.Term.t err + (** *) + (** Errors that occur on regular parsed terms. *) + + + (** {2 Global State} *) + + type state + (** The type of mutable state for typechecking. *) + + val new_state : unit -> state + (** Create a new state. *) + + val copy_state : state -> state + (** Make a copy of the global state included in the env *) + + + (** {2 Typing Environment} *) + + type env + (** The type of environments for typechecking. *) + + type 'a typer = env -> Dolmen.Std.Term.t -> 'a + (** A general type for typers. Takes a local environment and the current untyped term, + and return a value. The typer may need additional information for parsing, + in which case the return value will be a function. + @raise Typing_error *) + + type builtin_symbols = env -> symbol -> [ builtin_res | not_found ] + (** The type of a typer for builtin symbols. Given the environment and a symbol, + the theory should return a typing function if the symbol belongs to the + theory. This typing function takes first the ast term of the whole + application that is beign typechecked, and the list of arguments to the + symbol. *) + + type warning = + | Warning : env * 'a fragment * 'a warn -> warning (**) + (** Existential wrapper around warnings *) + + type error = + | Error : env * 'a fragment * 'a err -> error (**) + (** Existential wrapper around errors *) + + exception Typing_error of error + (** Exception for typing errors *) + + val empty_env : + ?st:state -> + ?expect:expect -> + ?var_infer:var_infer -> + ?sym_infer:sym_infer -> + ?order:order -> + ?poly:poly -> + ?quants:bool -> + ?free_wildcards:free_wildcards -> + warnings:(warning -> unit) -> + file:Dolmen.Std.Loc.file -> + builtin_symbols -> env + (** Create a new environment. *) + + + (** {2 Location helpers} *) + + val loc : env -> Dolmen.Std.Loc.t -> Dolmen.Std.Loc.full + (** Completes the location with the file name form the env. *) + + val fragment_loc : env -> _ fragment -> Dolmen.Std.Loc.full + (** Convenient function to get the location of a fragment. *) + + val binding_reason : binding -> reason option + (** Extract the reason from a binding + @raise Invalid_argument if the binding is [`Not_found] *) + + + (** {2 Name/Path helpers} *) + + val var_name : env -> Dolmen.Std.Name.t -> string + (** Extract a variable name from a standard name. *) + + val cst_path : env -> Dolmen.Std.Name.t -> Dolmen.Std.Path.t + (** Build a path from a standard name. *) + + + (** {2 Bindings helpers} *) + + type var = [ + | `Ty_var of ty_var + | `Term_var of term_var + | `Letin of env * Dolmen.Std.Term.t * term_var * term + ] + (** Variable bindings *) + + type cst = [ + | `Cstr of term_cstr + | `Field of term_field + | `Ty_cst of ty_cst + | `Term_cst of term_cst + ] + (** Constant bindings *) + + type builtin = [ + | `Builtin of builtin_res + ] + (** Builtin binding *) + + type bound = [ var | cst | builtin ] + (* All internal bindings *) + + val find_var : env -> Dolmen.Std.Id.t -> [ var | not_found ] + (** Try and find the given id in the set of locally bound variables. *) + + val find_global : env -> Dolmen.Std.Id.t -> [ cst | not_found ] + (** Try and find the given id in the set of globally bound constants. *) + + val find_builtin : env -> Dolmen.Std.Id.t -> [ builtin | not_found ] + (** Try and find the given id in the set of bound builtin symbols. *) + + val find_bound : env -> Dolmen.Std.Id.t -> [ bound | not_found ] + (** Try and find a bound identifier in the env, whetehr it be locally bound + (such as bound variables), constants bound at top-level, or builtin + symbols bound by the builtin theory. *) + + val get_global_custom : env -> 'a Dolmen.Std.Tag.t -> 'a option + (** Get a custom value from the global environment. *) + + val set_global_custom : env -> 'a Dolmen.Std.Tag.t -> 'a -> unit + (** Set a custom value in the global environment. *) + + + (** {2 Errors & Warnings} *) + + val _warn : env -> 'a fragment -> 'a warn -> unit + (** Emit a warning *) + + val _error : env -> 'a fragment -> 'a err -> _ + (** Raise an error *) + + val suggest : limit:int -> env -> Dolmen.Std.Id.t -> Dolmen.Std.Id.t list + (** From a dolmen identifier, return a list of existing bound identifiers + in the env that are up to [~limit] in terms of distance of edition. *) + + + (** {2 Parsing functions} *) + + val monomorphize : env -> Dolmen.Std.Term.t -> term -> term + (** Monomorphize a term. *) + + val parse_expr : res typer + (** Main parsing function. *) + + val parse_ty : ty typer + val parse_term : term typer + val parse_prop : term typer + (** Wrappers around {parse_expr} to set the expect field of the env, + and unwrap an expected return value. *) + + val parse_app_ty_cst : (ty_cst -> Dolmen.Std.Term.t list -> res) typer + val parse_app_term_cst : (term_cst -> Dolmen.Std.Term.t list -> res) typer + (** Function used for parsing applications. The first dolmen term given + is the application term being parsed (used for reporting errors). *) + + val parse_app_ho_term : (term -> Dolmen.Std.Term.t list -> res) typer + (** Function used for parsing an higher-order application. *) + + val unwrap_ty : env -> Dolmen.Std.Term.t -> res -> ty + val unwrap_term : env -> Dolmen.Std.Term.t -> res -> term + (** Unwrap a result, raising the adequate typing error + if the result if not as expected. *) + + + (** {2 High-level functions} *) + + val decls : + env -> ?attrs:Dolmen.Std.Term.t list -> + Dolmen.Std.Statement.decls -> [ + | `Type_decl of ty_cst + | `Term_decl of term_cst + ] list + (** Parse a list of potentially mutually recursive declarations. *) + + val defs : + env -> ?attrs:Dolmen.Std.Term.t list -> + Dolmen.Std.Statement.defs -> [ + | `Type_def of Dolmen.Std.Id.t * ty_cst * ty_var list * ty + | `Term_def of Dolmen.Std.Id.t * term_cst * ty_var list * term_var list * term + ] list + (** Parse a definition *) + + val parse : term typer + (** Parse a formula *) + +end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/logic.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/logic.ml index 5c5018f190a87aa4560bb0ac1e2773f8c96131ee..89279e0c1ca8bc3a13fdd9bc6b30fe3ff84565f2 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/logic.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/logic.ml @@ -106,12 +106,18 @@ module Smtlib2 = struct (* After the QF, Array and UF theories have been specified, BV can be specified *) and parse_bv c = function - | 'B'::'V'::l -> parse_dt (add_theory `Bitvectors c) l - | l -> parse_dt c l + | 'B'::'V'::l -> parse_dt_or_fp (add_theory `Bitvectors c) l + | l -> parse_dt_or_fp c l + (* DT and FP do not have clear ordering, so we allow to specify them in + any order. *) + and parse_dt_or_fp c = function + | 'D'::'T'::l -> parse_fp (set_dt c) l + | 'F'::'P'::l -> parse_dt (add_theory `Floats c) l + | l -> parse_str c l (* DT *) and parse_dt c = function - | 'D'::'T'::l -> parse_fp (set_dt c) l - | l -> parse_fp c l + | 'D'::'T'::l -> parse_str (set_dt c) l + | l -> parse_str c l (* FP theory *) and parse_fp c = function | 'F'::'P'::l -> parse_str (add_theory `Floats c) l @@ -134,7 +140,9 @@ module Smtlib2 = struct (* End of list *) and parse_end c = function | [] -> Some c - | _ -> None + | l -> + Format.eprintf "%a" (Format.pp_print_list ~pp_sep:(fun fmt () -> Format.fprintf fmt "; ") Format.pp_print_char) l; + None in (* Parse the logic name *) let res = parse_logic default (Misc.Strings.to_list s) in diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/misc.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/misc.ml index 9967ea23d2e281df85659ef372b721fb44fa73b7..34cc1606dfcdddc1f2d7c133721fcfb7a668dfc3 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/misc.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/misc.ml @@ -36,6 +36,18 @@ module Lists = struct in aux [] n l + let rec iter3 f l1 l2 l3 = + match l1, l2, l3 with + | [], [], [] -> () + | a :: r1, b :: r2, c :: r3 -> f a b c; iter3 f r1 r2 r3 + | _ -> raise (Invalid_argument "Misc.Lists.iter3") + + let rec map3 f l1 l2 l3 = + match l1, l2, l3 with + | [], [], [] -> [] + | a :: r1, b :: r2, c :: r3 -> (f a b c) :: (map3 f r1 r2 r3) + | _ -> raise (Invalid_argument "Misc.Lists.map3") + end (* String manipulation *) @@ -68,8 +80,6 @@ module Bitv = struct exception Invalid_char of char - - (* Bitv in binary forms *) let check_bin = function @@ -113,9 +123,6 @@ module Bitv = struct ) s; Bytes.to_string b - - - (* bitv in decimal form *) let int_of_char = function @@ -215,75 +222,3 @@ module Bitv = struct end -(* Fuzzy search maps *) -(* ************************************************************************ *) - -module Fuzzy_Map = struct - - module S = Spelll - module I = S.Index - - (** We use fuzzy maps in order to give suggestions in case of typos. - Since such maps are not trivial to extend to Dolmen identifiers, - we map strings (identifier names) to list of associations. *) - type 'a t = (Dolmen.Std.Id.t * 'a) list I.t - - let eq id (x, _) = Dolmen.Std.Id.equal id x - - let empty = I.empty - - let rec seq_to_list_ s = match s() with - | Seq.Nil -> [] - | Seq.Cons (x,y) -> x :: seq_to_list_ y - - let get t id = - let s = Dolmen.Std.Id.(id.name) in - match seq_to_list_ (I.retrieve ~limit:0 t s) with - | [l] -> l - | [] -> [] - | _ -> assert false - - let mem t id = - List.exists (eq id) (get t id) - - let find t id = - snd @@ List.find (eq id) (get t id) - - let add t id v = - let l = get t id in - let l' = - if List.exists (eq id) (get t id) then l - else (id, v) :: l - in - I.add t Dolmen.Std.Id.(id.name) l' - - (** Return a list of suggestions for an identifier. *) - let suggest t ~limit id = - let s = Dolmen.Std.Id.(id.name) in - let l = seq_to_list_ (I.retrieve ~limit t s) in - List.flatten @@ List.map (List.map fst) l - -end - -(* Fuzzy search hashtables *) -(* ************************************************************************ *) - -module Fuzzy_Hashtbl = struct - - (** Fuzzy hashtables are just references to fuzzy maps. *) - type 'a t = 'a Fuzzy_Map.t ref - - let create () = - ref Fuzzy_Map.empty - - let find r id = - Fuzzy_Map.find !r id - - let add r id v = - r := Fuzzy_Map.add !r id v - - let suggest r ~limit id = - Fuzzy_Map.suggest !r ~limit id - -end - diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/misc.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/misc.mli index bed5b44751c7d8356f50e4bd03b5e1204d9c5760..b1b75d8aec3ae802a36ed5ff78295fe8f5fcd14b 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/misc.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/misc.mli @@ -35,6 +35,12 @@ module Lists : sig the rest. @raise Invalid_argument if [l] has less than [n] elements. *) + val iter3 : ('a -> 'b -> 'c -> unit) -> 'a list -> 'b list -> 'c list -> unit + (** Same as {!List.iter2} but for 3 lists. *) + + val map3 : ('a -> 'b -> 'c -> 'd) -> 'a list -> 'b list -> 'c list -> 'd list + (** Same as {!List.map2} but for 3 lists. *) + end (** String helper *) @@ -48,56 +54,6 @@ module Strings : sig end -(** Fuzzy search maps *) -module Fuzzy_Map : sig - - type 'a t - (** The type of Fuzzy maps from Dolmen identifiers - to values of type ['a] *) - - val empty : _ t - (** The empty fuzzy search map. *) - - val mem : 'a t -> Dolmen.Std.Id.t -> bool - (** Test whether the given id is bound in the map. *) - - val find : 'a t -> Dolmen.Std.Id.t -> 'a - (** Find the value bound to an id. - @raise Not_found if the id is not bound. *) - - val add : 'a t -> Dolmen.Std.Id.t -> 'a -> 'a t - (** Add a new binding to the map. *) - - val suggest : 'a t -> limit:int -> Dolmen.Std.Id.t -> Dolmen.Std.Id.t list - (** Return a list of bound identifiers in the map that are close - (up to [limit] in terms of edition distance) to the given id. *) - -end - - -(** Fuzzy search Hashtables *) -module Fuzzy_Hashtbl : sig - - type 'a t - (** The type of fuzzy hashtables from Dolmen identifiers - to values of type ['a]. *) - - val create : unit -> _ t - (** Create a new fuzzy hashtable. *) - - val find : 'a t -> Dolmen.Std.Id.t -> 'a - (** Find the value bound to the given Identifier - @raise Not_found if the id is not bound. *) - - val add : 'a t -> Dolmen.Std.Id.t -> 'a -> unit - (** Add a new binding to the hashtable. *) - - val suggest : 'a t -> limit:int -> Dolmen.Std.Id.t -> Dolmen.Std.Id.t list - (** Return a list of bound identifiers in the hashtbl that are close - (up to [limit] in terms of edition distance) to the given id. *) - -end - (** Bitvector helpers *) module Bitv : sig @@ -105,6 +61,10 @@ module Bitv : sig (** Excpetion raised by functions in this module when a non-valid character is encountered in the parsing functions. *) + val check_bin : char -> unit + (** [check_bin c] Checks if [c] is '0' or '1', if it's neither, + raises [Invalid_char c].*) + val parse_binary : string -> string (** Parse a string of the form "#bXXXXXXX" (with X a binary character, i.e. either '1' or '0'), into a binary diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/strings.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/strings.ml index 8348d2a216b9dd1ead21443e7edc2015a77c2f5b..2fc758a28504ad59e3ca87df6080e011fa5e75a1 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/strings.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/strings.ml @@ -95,120 +95,124 @@ module Smtlib2 = struct let parse _version env s = match s with (* Types *) - | Type.Id { Id.ns = Id.Sort; name = "Int"; } -> - `Ty (Base.app0 (module Type) env "String" Ty.int) - | Type.Id { Id.ns = Id.Sort; name = "String"; } -> - `Ty (Base.app0 (module Type) env "String" Ty.string) - | Type.Id { Id.ns = Id.Sort; name = "RegLan"; } -> - `Ty (Base.app0 (module Type) env "RegLan" Ty.string_reg_lang) + | Type.Id { ns = Sort; name = Simple "Int"; } -> + `Ty (Base.app0 (module Type) env s Ty.int) + | Type.Id { ns = Sort; name = Simple "String"; } -> + `Ty (Base.app0 (module Type) env s Ty.string) + | Type.Id { ns = Sort; name = Simple "RegLan"; } -> + `Ty (Base.app0 (module Type) env s Ty.string_reg_lang) (* String literals *) - | Type.Id { Id.ns = Value String; name; } -> + | Type.Id { Id.ns = Value String; name = Simple name; } as symbol -> `Term (fun ast args -> let s = parse_ustring env ast name in - Base.app0 (module Type) env name (T.String.of_ustring s) ast args + Base.app0 (module Type) env symbol (T.String.of_ustring s) ast args ) (* Terms *) - | Type.Id ({ Id.ns = Id.Term; name; } as id) -> + | Type.Id { ns = Term; name = Simple name; } -> begin match name with (* String Functions *) | "str.len" -> - `Term (Base.term_app1 (module Type) env name T.String.length) + `Term (Base.term_app1 (module Type) env s T.String.length) | "str.at" -> - `Term (Base.term_app2 (module Type) env name T.String.at) + `Term (Base.term_app2 (module Type) env s T.String.at) | "str.is_digit" -> - `Term (Base.term_app1 (module Type) env name T.String.is_digit) + `Term (Base.term_app1 (module Type) env s T.String.is_digit) | "str._to_code" -> - `Term (Base.term_app1 (module Type) env name T.String.to_code) + `Term (Base.term_app1 (module Type) env s T.String.to_code) | "str.from_code" -> - `Term (Base.term_app1 (module Type) env name T.String.of_code) + `Term (Base.term_app1 (module Type) env s T.String.of_code) | "str.to_int" -> - `Term (Base.term_app1 (module Type) env name T.String.to_int) + `Term (Base.term_app1 (module Type) env s T.String.to_int) | "str.from_int" -> - `Term (Base.term_app1 (module Type) env name T.String.of_int) + `Term (Base.term_app1 (module Type) env s T.String.of_int) | "str.++" -> - `Term (Base.term_app_left (module Type) env name T.String.concat) + `Term (Base.term_app_left (module Type) env s T.String.concat) | "str.substr" -> - `Term (Base.term_app3 (module Type) env name T.String.sub) + `Term (Base.term_app3 (module Type) env s T.String.sub) | "str.indexof" -> - `Term (Base.term_app3 (module Type) env name T.String.index_of) + `Term (Base.term_app3 (module Type) env s T.String.index_of) | "str.replace" -> - `Term (Base.term_app3 (module Type) env name T.String.replace) + `Term (Base.term_app3 (module Type) env s T.String.replace) | "str.replace_all" -> - `Term (Base.term_app3 (module Type) env name T.String.replace_all) + `Term (Base.term_app3 (module Type) env s T.String.replace_all) | "str.replace_re" -> - `Term (Base.term_app3 (module Type) env name T.String.replace_re) + `Term (Base.term_app3 (module Type) env s T.String.replace_re) | "str.replace_re_all" -> - `Term (Base.term_app3 (module Type) env name T.String.replace_re_all) + `Term (Base.term_app3 (module Type) env s T.String.replace_re_all) | "str.prefixof" -> - `Term (Base.term_app2 (module Type) env name T.String.is_prefix) + `Term (Base.term_app2 (module Type) env s T.String.is_prefix) | "str.suffixof" -> - `Term (Base.term_app2 (module Type) env name T.String.is_suffix) + `Term (Base.term_app2 (module Type) env s T.String.is_suffix) | "str.contains" -> - `Term (Base.term_app2 (module Type) env name T.String.contains) + `Term (Base.term_app2 (module Type) env s T.String.contains) | "str.<" -> - `Term (Base.term_app2 (module Type) env name T.String.lt) + `Term (Base.term_app2 (module Type) env s T.String.lt) | "str.<=" -> - `Term (Base.term_app2 (module Type) env name T.String.leq) + `Term (Base.term_app2 (module Type) env s T.String.leq) (* String/RegLan functions *) | "str.to_re" -> - `Term (Base.term_app1 (module Type) env name T.String.RegLan.of_string) + `Term (Base.term_app1 (module Type) env s T.String.RegLan.of_string) | "re.range" -> - `Term (Base.term_app2 (module Type) env name T.String.RegLan.range) + `Term (Base.term_app2 (module Type) env s T.String.RegLan.range) | "str.in_re" -> - `Term (Base.term_app2 (module Type) env name T.String.in_re) + `Term (Base.term_app2 (module Type) env s T.String.in_re) (* RegLan functions *) | "re.none" -> - `Term (Base.app0 (module Type) env name T.String.RegLan.empty) + `Term (Base.app0 (module Type) env s T.String.RegLan.empty) | "re.all" -> - `Term (Base.app0 (module Type) env name T.String.RegLan.all) + `Term (Base.app0 (module Type) env s T.String.RegLan.all) | "re.allchar" -> - `Term (Base.app0 (module Type) env name T.String.RegLan.allchar) + `Term (Base.app0 (module Type) env s T.String.RegLan.allchar) | "re.++" -> - `Term (Base.term_app_left (module Type) env name T.String.RegLan.concat) + `Term (Base.term_app_left (module Type) env s T.String.RegLan.concat) | "re.union" -> - `Term (Base.term_app_left (module Type) env name T.String.RegLan.union) + `Term (Base.term_app_left (module Type) env s T.String.RegLan.union) | "re.inter" -> - `Term (Base.term_app_left (module Type) env name T.String.RegLan.inter) + `Term (Base.term_app_left (module Type) env s T.String.RegLan.inter) | "re.*" -> - `Term (Base.term_app1 (module Type) env name T.String.RegLan.star) + `Term (Base.term_app1 (module Type) env s T.String.RegLan.star) | "re.comp" -> - `Term (Base.term_app1 (module Type) env name T.String.RegLan.complement) + `Term (Base.term_app1 (module Type) env s T.String.RegLan.complement) | "re.diff" -> - `Term (Base.term_app2 (module Type) env name T.String.RegLan.diff) + `Term (Base.term_app2 (module Type) env s T.String.RegLan.diff) | "re.+" -> - `Term (Base.term_app1 (module Type) env name T.String.RegLan.cross) + `Term (Base.term_app1 (module Type) env s T.String.RegLan.cross) | "re.opt" -> - `Term (Base.term_app1 (module Type) env name T.String.RegLan.option) - - (* Indexed identifiers *) - | _ -> Base.parse_id id [ - "char", `Unary (fun s -> `Term (fun ast args -> - let s' = parse_uchar_hexa env ast s in - Base.app0 (module Type) env name (T.String.of_ustring s') ast args - )); - "re.^", `Unary (fun s -> - let n = int_of_string s in - `Term (Base.term_app1 (module Type) env name (T.String.RegLan.power n)) - ); - "re.loop", `Binary (fun s s' -> - let n1 = int_of_string s in - let n2 = int_of_string s' in - `Term (Base.term_app1 (module Type) env name (T.String.RegLan.loop n1 n2)) - ); - ] ~err:(Base.bad_term_index_arity (module Type) env) - ~k:(fun _ -> `Not_found) + `Term (Base.term_app1 (module Type) env s T.String.RegLan.option) + + | _ -> `Not_found end + (* Indexed identifiers *) + | Type.Id { ns = Term; name = Indexed { basename; indexes; }; } as symbol -> + Base.parse_indexed basename indexes (function + | "char" -> `Unary (fun s -> `Term (fun ast args -> + let s' = parse_uchar_hexa env ast s in + Base.app0 (module Type) env symbol (T.String.of_ustring s') ast args + )) + | "re.^" -> `Unary (fun s -> + let n = int_of_string s in + `Term (Base.term_app1 (module Type) env symbol (T.String.RegLan.power n)) + ) + | "re.loop" -> `Binary (fun s s' -> + let n1 = int_of_string s in + let n2 = int_of_string s' in + `Term (Base.term_app1 (module Type) env symbol (T.String.RegLan.loop n1 n2)) + ) + | _ -> `Not_indexed + ) ~err:(Base.bad_term_index_arity (module Type) env) + ~k:(fun _ -> `Not_found) + | _ -> `Not_found end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/tff.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/tff.ml index 4d96855238f88aa01701da0581f5d9610c7dcd17..070b204eb2bf9e47974a7dd037de91c89e5b7fb8 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/tff.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/tff.ml @@ -9,1512 +9,40 @@ module type S = Tff_intf.S module Make (Tag: Dolmen.Intf.Tag.S) (Ty: Dolmen.Intf.Ty.Tff - with type 'a tag := 'a Tag.t) + with type 'a tag := 'a Tag.t + and type path := Dolmen.Std.Path.t) (T: Dolmen.Intf.Term.Tff with type ty := Ty.t and type ty_var := Ty.Var.t and type ty_const := Ty.Const.t - and type 'a tag := 'a Tag.t) + and type 'a tag := 'a Tag.t + and type path := Dolmen.Std.Path.t) = struct - (* Module aliases *) - (* ************************************************************************ *) + include Thf.Make(Tag) + (struct + include Ty + let arrow _ _ = assert false + let pi _ _ = assert false + end) + (struct + include T + let apply _ _ _ = assert false + end) - (* These are exported *) - module T = T - module Ty = Ty - module Tag = Tag - - (* Non-exported module alias to avoid confusing - untyped Terms and typed terms *) - module Id = Dolmen.Std.Id - module Ast = Dolmen.Std.Term - module Stmt = Dolmen.Std.Statement - module Loc = Dolmen.Std.Loc - - (* Types *) - (* ************************************************************************ *) - - (* Different behavior of polymorphism *) - type poly = - | Explicit - | Implicit - | Flexible - - (* The type of potentially expected result type for parsing an expression *) - type expect = - | Nothing - | Type - | Typed of Ty.t - - (* The type returned after parsing an expression. *) - type tag = - | Any : 'a Tag.t * 'a -> tag - - (* Result of parsing an expression *) - type res = - | Ttype - | Ty of Ty.t - | Term of T.t - | Tags of tag list - - - (* Things that can be inferred *) - type inferred = - | Ty_fun of Ty.Const.t - | Term_fun of T.Const.t - - (* Wrapper around potential function symbols in Dolmen *) - type symbol = - | Id of Id.t - | Builtin of Ast.builtin - - (* Not found result *) - type not_found = [ `Not_found ] - - (* Variable that can be bound to a dolmen identifier *) - type var = [ - | `Ty_var of Ty.Var.t - | `Term_var of T.Var.t - | `Letin of Ast.t * T.Var.t * T.t - ] - - (* Constants that can be bound to a dolmen identifier. *) - type cst = [ - | `Cstr of T.Cstr.t - | `Field of T.Field.t - | `Ty_cst of Ty.Const.t - | `Term_cst of T.Const.t - ] - - (* Result of parsing a symbol by the theory *) - type builtin_res = [ - | `Ttype of (Ast.t -> Ast.t list -> unit) - | `Ty of (Ast.t -> Ast.t list -> Ty.t) - | `Term of (Ast.t -> Ast.t list -> T.t) - | `Tags of (Ast.t -> Ast.t list -> tag list) - ] - - (* Names that are bound to a dolmen identifier by the builtins *) - type builtin = [ - | `Builtin of builtin_res - ] - - (* Either a bound variable or a bound constant *) - type bound = [ var | cst | builtin ] - - type reason = - | Builtin - | Bound of Loc.file * Ast.t - | Inferred of Loc.file * Ast.t - | Defined of Loc.file * Stmt.def - | Declared of Loc.file * Stmt.decl - (** The type of reasons for constant typing *) - - type binding = [ - | `Not_found - | `Builtin of [ - | `Ttype - | `Ty - | `Term - | `Tag - ] - | `Variable of [ - | `Ty of Ty.Var.t * reason option - | `Term of T.Var.t * reason option - ] - | `Constant of [ - | `Ty of Ty.Const.t * reason option - | `Cstr of T.Cstr.t * reason option - | `Term of T.Const.t * reason option - | `Field of T.Field.t * reason option - ] - ] - (** The bindings that can occur. *) - - (* Maps & Hashtbls *) - (* ************************************************************************ *) - - module M = Map.Make(Id) - - module E = Map.Make(Ty.Var) - module F = Map.Make(T.Var) - module R = Map.Make(Ty.Const) - module S = Map.Make(T.Const) - module U = Map.Make(T.Cstr) - module V = Map.Make(T.Field) - - (* Warnings & Errors *) - (* ************************************************************************ *) - - (* Fragments of input that represent the sources of warnings/errors *) - type _ fragment = - | Ast : Ast.t -> Ast.t fragment - | Def : Stmt.def -> Stmt.def fragment - | Defs : Stmt.defs -> Stmt.defs fragment - | Decl : Stmt.decl -> Stmt.decl fragment - | Decls : Stmt.decls -> Stmt.decls fragment - | Located : Loc.t -> Loc.t fragment - - let decl_loc d = - match (d : Stmt.decl) with - | Record { loc; _ } - | Abstract { loc; _ } - | Inductive { loc; _ } -> loc - - (* Warnings *) - (* ******** *) - - (* Warnings, parameterized by the type of fragment they can trigger on *) - type _ warn = .. - - type _ warn += - | Unused_type_variable : Ty.Var.t -> Ast.t warn - (* Unused quantified type variable *) - | Unused_term_variable : T.Var.t -> Ast.t warn - (* Unused quantified term variable *) - | Error_in_attribute : exn -> Ast.t warn - (* An error occurred wile parsing an attribute *) - | Superfluous_destructor : Id.t * Id.t * T.Const.t -> Ast.t warn - (* The user implementation of typed terms returned a destructor where - was asked for. This warning can very safely be ignored. *) - - (* Special case for shadowing, as it can happen both from a term but also - a declaration, hence why the type variable of [warn] is left wild. *) - type _ warn += - | Shadowing : Id.t * binding * binding -> _ warn - (* Shadowing of the given identifier, together with the old and current - binding. *) - - - (* Errors *) - (* ****** *) - - (* Errors, parameterized by the kind of fragment they can trigger on *) - type _ err = .. - - (* Errors that occur on declaration(s) *) - type _ err += - | Not_well_founded_datatypes : Stmt.decl list -> Stmt.decls err - (* Not well-dounded datatypes definitions. *) - - (* Errors that occur on term fragments, i.e. Ast.t fragments *) - type _ err += - | Infer_type_variable : Ast.t err - | Expected : string * res option -> Ast.t err - | Bad_index_arity : string * int * int -> Ast.t err - | Bad_ty_arity : Ty.Const.t * int -> Ast.t err - | Bad_op_arity : string * int list * int -> Ast.t err - | Bad_cstr_arity : T.Cstr.t * int list * int -> Ast.t err - | Bad_term_arity : T.Const.t * int list * int -> Ast.t err - | Repeated_record_field : T.Field.t -> Ast.t err - | Missing_record_field : T.Field.t -> Ast.t err - | Mismatch_record_type : T.Field.t * Ty.Const.t -> Ast.t err - | Var_application : T.Var.t -> Ast.t err - | Ty_var_application : Ty.Var.t -> Ast.t err - | Type_mismatch : T.t * Ty.t -> Ast.t err - | Quantified_var_inference : Ast.t err - | Unhandled_builtin : Ast.builtin -> Ast.t err - | Cannot_tag_tag : Ast.t err - | Cannot_tag_ttype : Ast.t err - | Cannot_find : Id.t -> Ast.t err - | Type_var_in_type_constructor : Ast.t err - | Forbidden_quantifier : Ast.t err - | Missing_destructor : Id.t -> Ast.t err - | Type_def_rec : Stmt.def -> Stmt.defs err - | Higher_order_application : Ast.t err - | Higher_order_type : Ast.t err - | Unbound_variables : Ty.Var.t list * T.Var.t list * T.t -> Ast.t err - | Uncaught_exn : exn * Printexc.raw_backtrace -> Ast.t err - | Unhandled_ast : Ast.t err - - - (* State & Environment *) - (* ************************************************************************ *) - - (* Global, mutable state. *) - type state = { - - mutable csts : cst M.t; - (* association between dolmen ids and types/terms constants. *) - - mutable ttype_locs : reason R.t; - (* stores reasons for typing of type constructors *) - mutable const_locs : reason S.t; - (* stores reasons for typing of constants *) - mutable cstrs_locs : reason U.t; - (* stores reasons for typing constructors *) - mutable field_locs : reason V.t; - (* stores reasons for typing record fields *) - } - - (* The local environments used for type-checking. *) - type env = { - - (* current file *) - file : Loc.file; - - (* global state *) - st : state; - - (* Map from term variables to the reason of its type *) - type_locs : reason E.t; - term_locs : reason F.t; - - (* bound variables *) - vars : var M.t; - - (* The current builtin symbols *) - builtins : builtin_symbols; - - (* warnings *) - warnings : warning -> unit; - - (* Additional typing info *) - poly : poly; - quants : bool; - - expect : expect; - infer_base : Ty.t option; - infer_hook : env -> inferred -> unit; - - } - - (* Builtin symbols, i.e symbols understood by some theories, - but which do not have specific syntax, so end up as special - cases of application. *) - and builtin_symbols = env -> symbol -> [ builtin_res | not_found ] - - (* Existencial wrapper for wranings. *) - and warning = - | Warning : env * 'a fragment * 'a warn -> warning - - (* Exitencial wrapper around errors *) - and error = - | Error : env * 'a fragment * 'a err -> error - - (* Convenient alias *) - type 'a typer = env -> Ast.t -> 'a - - - (* Exceptions *) - (* ************************************************************************ *) - - (* Internal exception *) - exception Found of Ast.t * res - - (* Exception for typing errors *) - exception Typing_error of error - - - (* Warnings/Error helpers *) - (* ************************************************************************ *) - - let _warn env fragment w = - env.warnings (Warning (env, fragment, w)) - - let _error env fragment e = - raise (Typing_error (Error (env, fragment, e))) - - let fragment_loc : - type a. env -> a fragment -> Loc.full = fun env fg -> - let loc = - match fg with - | Ast { loc; _ } -> loc - | Def d -> d.loc - | Defs { contents = []; _ } -> Loc.no_loc - | Defs { contents = d :: _; _ } -> d.loc - | Decl d -> decl_loc d - | Decls { contents = []; _ } -> Loc.no_loc - | Decls { contents = d :: _; _ } -> decl_loc d - | Located l -> l - in - { file = env.file; - loc = loc; } - - - (* Convenience functions *) - (* ************************************************************************ *) - - let _expected env s t res = - _error env (Ast t) (Expected (s, res)) - - let _bad_op_arity env s n m t = - _error env (Ast t) (Bad_op_arity (s, [n], m)) - - let _bad_ty_arity env f n t = - _error env (Ast t) (Bad_ty_arity (f, n)) - - let _bad_term_arity env f expected actual t = - _error env (Ast t) (Bad_term_arity (f, expected, actual)) - - let _bad_cstr_arity env c expected actual t = - _error env (Ast t) (Bad_cstr_arity (c, expected, actual)) - - let _ty_var_app env v t = - _error env (Ast t) (Ty_var_application v) - - let _var_app env v t = - _error env (Ast t) (Var_application v) - - let _type_mismatch env t ty ast = - _error env (Ast ast) (Type_mismatch (t, ty)) - - let _record_type_mismatch env f ty_c ast = - _error env (Ast ast) (Mismatch_record_type (f, ty_c)) - - let _field_repeated env f ast = - _error env (Ast ast) (Repeated_record_field f) - - let _field_missing env f ast = - _error env (Ast ast) (Missing_record_field f) - - let _cannot_infer_quant_var env t = - _error env (Ast t) (Quantified_var_inference) - - let _unknown_builtin env ast b = - _error env (Ast ast) (Unhandled_builtin b) - - let _uncaught_exn env ast exn bt = - _error env (Ast ast) (Uncaught_exn (exn, bt)) - - let _cannot_find env ast s = - _error env (Ast ast) (Cannot_find s) - - let _wrap env ast f arg = - try f arg - with - | T.Wrong_type (t, ty) -> - _type_mismatch env t ty ast - | T.Wrong_record_type (f, c) -> - _record_type_mismatch env f c ast - | T.Field_repeated f -> - _field_repeated env f ast - | T.Field_missing f -> - _field_missing env f ast - | (Typing_error _) as exn -> - raise exn - | exn -> - let bt = Printexc.get_raw_backtrace () in - _uncaught_exn env ast exn bt - - let _wrap2 env ast f a b = - _wrap env ast (fun () -> f a b) () - - let _wrap3 env ast f a b c = - _wrap env ast (fun () -> f a b c) () - - (* Binding lookups *) - (* ************************************************************************ *) - - let find_reason env (v : bound) = - try - let r = - match v with - | `Builtin _ -> Builtin - | `Ty_var v -> E.find v env.type_locs - | `Term_var v -> F.find v env.term_locs - | `Letin (_, v, _) -> F.find v env.term_locs - | `Ty_cst c -> R.find c env.st.ttype_locs - | `Term_cst c -> S.find c env.st.const_locs - | `Cstr c -> U.find c env.st.cstrs_locs - | `Field f -> V.find f env.st.field_locs - in - Some r - with Not_found -> assert false - - let with_reason reason bound : binding = - match (bound : [ bound | not_found ]) with - | `Not_found -> `Not_found - | `Builtin `Ttype _ -> `Builtin `Ttype - | `Builtin `Ty _ -> `Builtin `Ty - | `Builtin `Term _ -> `Builtin `Term - | `Builtin `Tags _ -> `Builtin `Tag - | `Ty_var v -> `Variable (`Ty (v, reason)) - | `Term_var v -> `Variable (`Term (v, reason)) - | `Letin (_, v, _) -> `Variable (`Term (v, reason)) - | `Ty_cst c -> `Constant (`Ty (c, reason)) - | `Term_cst c -> `Constant (`Term (c, reason)) - | `Cstr c -> `Constant (`Cstr (c, reason)) - | `Field f -> `Constant (`Field (f, reason)) - - let binding_reason binding : reason option = - match (binding : binding) with - | `Not_found -> assert false - | `Builtin _ -> Some Builtin - | `Variable `Ty (_, reason) - | `Variable `Term (_, reason) - | `Constant `Ty (_, reason) - | `Constant `Term (_, reason) - | `Constant `Cstr (_, reason) - | `Constant `Field (_, reason) - -> reason - - let _shadow env fragment id - (old : bound) reason (bound : [< bound]) = - let old_binding = - with_reason (find_reason env old) (old :> [bound | not_found]) - in - let new_binding = with_reason (Some reason) (bound :> [bound | not_found]) in - _warn env fragment (Shadowing (id, old_binding, new_binding)) - - - let find_var env name : [var | not_found] = - match M.find name env.vars with - | #var as res -> res - | exception Not_found -> `Not_found - - let find_global env id : [cst | not_found] = - try (M.find id env.st.csts :> [cst | not_found]) - with Not_found -> `Not_found - - let find_builtin env id : [builtin | not_found] = - match env.builtins env (Id id) with - | `Not_found -> `Not_found - | #builtin_res as res -> `Builtin res - - let find_bound env id : [ bound | not_found ] = - match find_var env id with - | #var as res -> (res :> [ bound | not_found ]) - | `Not_found -> - begin match find_global env id with - | #cst as res -> (res :> [ bound | not_found ]) - | `Not_found -> - (find_builtin env id :> [ bound | not_found ]) - end - - - (* Global Environment *) - (* ************************************************************************ *) - - let new_state () = { - csts = M.empty; - ttype_locs = R.empty; - const_locs = S.empty; - cstrs_locs = U.empty; - field_locs = V.empty; - } - - let copy_state st = { - csts = st.csts; - ttype_locs = st.ttype_locs; - const_locs = st.const_locs; - cstrs_locs = st.cstrs_locs; - field_locs = st.field_locs; - } - - - let add_global env fragment id reason (v : cst) = - begin match find_bound env id with - | `Not_found -> () - | #bound as old -> _shadow env fragment id old reason v - end; - env.st.csts <- M.add id v env.st.csts - - (* Symbol declarations *) - let decl_ty_const env fg id c reason = - add_global env fg id reason (`Ty_cst c); - env.st.ttype_locs <- R.add c reason env.st.ttype_locs - - let decl_term_const env fg id c reason = - add_global env fg id reason (`Term_cst c); - env.st.const_locs <- S.add c reason env.st.const_locs - - let decl_term_cstr env fg id c reason = - add_global env fg id reason (`Cstr c); - env.st.cstrs_locs <- U.add c reason env.st.cstrs_locs - - let decl_term_field env fg id f reason = - add_global env fg id reason (`Field f); - env.st.field_locs <- V.add f reason env.st.field_locs - - - (* Local Environment *) - (* ************************************************************************ *) - - let global = new_state () - - (* Make a new empty environment *) let empty_env - ?(st=global) - ?(expect=Nothing) - ?(infer_hook=(fun _ _ -> ())) - ?infer_base - ?(poly=Flexible) - ?(quants=true) - ~warnings ~file - builtins = { - file; st; builtins; warnings; - poly; quants; expect; infer_hook; infer_base; - vars = M.empty; - type_locs = E.empty; - term_locs = F.empty; - } - - let expect ?(force=false) env expect = - if env.expect = Nothing && not force then env - else { env with expect = expect } - - (* Generate new fresh names for shadowed variables *) - let new_name pre = - let i = ref 0 in - (fun () -> incr i; pre ^ (string_of_int !i)) - - let new_ty_name = new_name "ty#" - let new_term_name = new_name "term#" - - (* Add local variables to environment *) - let add_type_var env id v ast = - let reason = Bound (env.file, ast) in - let v' = - match find_bound env id with - | `Not_found -> v - | #bound as old -> - let v' = Ty.Var.mk (new_ty_name ()) in - _shadow env (Ast ast) id old reason (`Ty_var v'); - v' - in - v', { env with - vars = M.add id (`Ty_var v') env.vars; - type_locs = E.add v' reason env.type_locs; - } - - let add_type_vars env l = - let l', env' = List.fold_left (fun (l, acc) (id, v, ast) -> - let v', acc' = add_type_var acc id v ast in - v' :: l, acc') ([], env) l in - List.rev l', env' - - let add_term_var env id v ast = - let reason = Bound (env.file, ast) in - let v' = - match find_bound env id with - | `Not_found -> v - | #bound as old -> - let v' = T.Var.mk (new_term_name ()) (T.Var.ty v) in - _shadow env (Ast ast) id old reason (`Term_var v'); - v' - in - v', { env with - vars = M.add id (`Term_var v') env.vars; - term_locs = F.add v' reason env.term_locs; - } - - let bind_term_var env id e v t ast = - let reason = Bound (env.file, ast) in - let v' = - match find_bound env id with - | `Not_found -> v - | #bound as old -> - let v' = T.Var.mk (new_term_name ()) (T.Var.ty v) in - _shadow env (Ast ast) id old reason (`Term_var v'); - v' - in - let t' = T.bind v' t in - v', { env with - vars = M.add id (`Letin (e, v', t')) env.vars; - term_locs = F.add v' reason env.term_locs; - } - - - (* Typo suggestion *) - (* ************************************************************************ *) - - let suggest ~limit env id = - let automaton = Spelll.of_string ~limit Id.(id.name) in - let aux id _ acc = - if Spelll.match_with automaton Id.(id.name) - then id :: acc - else acc - in - M.fold aux env.st.csts (M.fold aux env.vars []) - - - (* Typing explanation *) - (* ************************************************************************ *) - - let _unused_type env v = - match E.find v env.type_locs with - (* Variable bound or inferred *) - | Bound (_, t) | Inferred (_, t) -> - _warn env (Ast t) (Unused_type_variable v) - (* variables should not be declare-able nor builtin *) - | Builtin | Declared _ | Defined _ -> - assert false - - let _unused_term env v = - match F.find v env.term_locs with - (* Variable bound or inferred *) - | Bound (_, t) | Inferred (_, t) -> - _warn env (Ast t) (Unused_term_variable v) - (* variables should not be declare-able nor builtin *) - | Builtin | Declared _ | Defined _ -> - assert false - - - (* Wrappers for expression building *) - (* ************************************************************************ *) - - (* unwrap results *) - let unwrap_ty env ast = function - | Ty ty -> ty - | res -> _expected env "type" ast (Some res) - - let unwrap_term env ast = function - | Term t -> t - | res -> _expected env "term" ast (Some res) - - (* Split arguments of a function/constructor application *) - let split_args env n_ty n_t args = - let n_args = List.length args in - match env.poly with - | Explicit -> - if n_args = n_ty + n_t then - `Ok (Misc.Lists.take_drop n_ty args) - else - `Bad_arity ([n_ty + n_t], n_args) - | Implicit -> - if n_args = n_t then - `Ok (Misc.Lists.init n_ty (fun _ -> Ast.wildcard ()), args) - else - `Bad_arity ([n_t], n_args) - | Flexible -> - if n_args = n_ty + n_t then - `Ok (Misc.Lists.take_drop n_ty args) - else if n_args = n_t then - `Ok (Misc.Lists.init n_ty (fun _ -> Ast.wildcard ()), args) - else - `Bad_arity ([n_t; n_ty + n_t], n_args) - - - (* wrapper for builtin application *) - let builtin_apply env b ast args : res = - match (b : builtin_res) with - | `Ttype f -> _wrap2 env ast f ast args; Ttype - | `Ty f -> Ty (_wrap2 env ast f ast args) - | `Term f -> Term (_wrap2 env ast f ast args) - | `Tags f -> Tags (_wrap2 env ast f ast args) - - (* Wrapper around record creation *) - let create_record env ast l = - _wrap env ast T.record l - - let create_record_with env ast t l = - _wrap2 env ast T.record_with t l - - let create_record_access env ast t field = - _wrap2 env ast T.apply_field field t - - let make_eq env ast_term a b = - _wrap2 env ast_term T.eq a b - - let ty_var_equal v v' = Ty.Var.compare v v' = 0 - let t_var_equal v v' = T.Var.compare v v' = 0 - - let mk_quant env ast mk (ty_vars, t_vars) body = - if not env.quants then - _error env (Ast ast) Forbidden_quantifier - else begin - let fv_ty, fv_t = T.fv body in - (* Emit warnings for quantified variables that are unused *) - List.iter (fun v -> - if not @@ List.exists (ty_var_equal v) fv_ty then _unused_type env v - ) ty_vars; - List.iter (fun v -> - if not @@ List.exists (t_var_equal v) fv_t then _unused_term env v - ) t_vars; - (* Filter quantified variables from free_variables *) - let fv_ty = List.filter (fun v -> - not (List.exists (ty_var_equal v) ty_vars)) fv_ty in - let fv_t = List.filter (fun v -> - not (List.exists (t_var_equal v) t_vars)) fv_t in - (* Create the quantified formula *) - _wrap3 env ast mk (fv_ty, fv_t) (ty_vars, t_vars) body - end - - let infer env ast s args s_ast = - if Id.(s.ns = Var) then - _error env (Ast ast) Infer_type_variable; - match env.expect, env.infer_base with - | Nothing, _ -> None - | Type, _ -> - let n = List.length args in - let ret = Ty.Const.mk (Id.full_name s) n in - let res = Ty_fun ret in - env.infer_hook env res; - decl_ty_const env (Ast ast) s ret (Inferred (env.file, s_ast)); - Some res - | Typed _, None -> None - | Typed ty, Some base -> - let n = List.length args in - let ret = T.Const.mk - (Id.full_name s) [] (Misc.Lists.replicate n base) ty - in - let res = Term_fun ret in - env.infer_hook env res; - decl_term_const env (Ast ast) s ret (Inferred (env.file, s_ast)); - Some res - - - (* Tag application *) - (* ************************************************************************ *) - - let apply_tag env ast tag v res = - match (res : res) with - | Ttype -> _error env (Ast ast) Cannot_tag_ttype - | Tags _ -> _error env (Ast ast) Cannot_tag_tag - | Ty ty -> Ty.tag ty tag v - | Term t -> T.tag t tag v - - (* Expression parsing *) - (* ************************************************************************ *) - - let expect_base env = - match env.infer_base with - | None -> env - | Some ty -> expect env (Typed ty) - - let expect_prop env = - expect env (Typed Ty.prop) - - let rec parse_expr (env : env) t : res = - let res : res = match t with - - (* Ttype & builtin types *) - | { Ast.term = Ast.Builtin Ast.Ttype; _ } -> - Ttype - | { Ast.term = Ast.Builtin Ast.Prop; _ } -> - Ty Ty.prop - - (* Wildcards should only occur in place of types *) - | { Ast.term = Ast.Builtin Ast.Wildcard; _ } -> - Ty (Ty.wildcard ()) - - (* Basic formulas *) - | { Ast.term = Ast.App ({ Ast.term = Ast.Builtin Ast.True; _ }, []); _ } - | { Ast.term = Ast.Builtin Ast.True; _ } -> - Term T._true - - | { Ast.term = Ast.App ({ Ast.term = Ast.Builtin Ast.False; _ }, []); _ } - | { Ast.term = Ast.Builtin Ast.False; _ } -> - Term T._false - - | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.And; _ }, l); _ } -> - Term (_wrap env t T._and (List.map (parse_prop env) l)) - - | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Or; _ }, l); _ } -> - Term (_wrap env t T._or (List.map (parse_prop env) l)) - - | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Xor; _}, l); _ } as t -> - begin match l with - | [p; q] -> - let f = parse_prop env p in - let g = parse_prop env q in - Term (_wrap2 env t T.xor f g) - | _ -> _bad_op_arity env "xor" 2 (List.length l) t - end - - | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Nand; _}, l); _ } as t -> - begin match l with - | [p; q] -> - let f = parse_prop env p in - let g = parse_prop env q in - Term (_wrap2 env t T.nand f g) - | _ -> _bad_op_arity env "nand" 2 (List.length l) t - end - - | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Nor; _}, l); _ } as t -> - begin match l with - | [p; q] -> - let f = parse_prop env p in - let g = parse_prop env q in - Term (_wrap2 env t T.nor f g) - | _ -> _bad_op_arity env "nor" 2 (List.length l) t - end - - | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Imply; _ }, l); _ } as t -> - begin match l with - | [p; q] -> - let f = parse_prop env p in - let g = parse_prop env q in - Term (_wrap2 env t T.imply f g) - | _ -> _bad_op_arity env "=>" 2 (List.length l) t - end - - | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Implied; _ }, l); _ } as t -> - begin match l with - | [q; p] -> - let f = parse_prop env p in - let g = parse_prop env q in - Term (_wrap2 env t T.imply f g) - | _ -> _bad_op_arity env "<=" 2 (List.length l) t - end - - | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Equiv; _}, l); _ } as t -> - begin match l with - | [p; q] -> - let f = parse_prop env p in - let g = parse_prop env q in - Term (_wrap2 env t T.equiv f g) - | _ -> _bad_op_arity env "<=>" 2 (List.length l) t - end - - | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Not; _}, l); _ } as t -> - begin match l with - | [p] -> - Term (_wrap env t T.neg (parse_prop env p)) - | _ -> _bad_op_arity env "not" 1 (List.length l) t - end - - (* Binders *) - | { Ast.term = Ast.Binder (Ast.All, _, _); _ } -> - parse_quant T.all Ast.All env t [] [] t - - | { Ast.term = Ast.Binder (Ast.Ex, _, _); _ } -> - parse_quant T.ex Ast.Ex env t [] [] t - - (* Pattern matching *) - | { Ast.term = Ast.Match (scrutinee, branches); _ } -> - parse_match env t scrutinee branches - - (* (Dis)Equality *) - | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Eq; _ }, l); _ } as t -> - begin match l with - | [a; b] -> - let env = expect_base env in - begin match parse_expr env a, parse_expr env b with - | Term t1, Term t2 -> Term (make_eq env t t1 t2) - | _ -> _expected env "two terms" t None - end - | _ -> _bad_op_arity env "=" 2 (List.length l) t - end - - | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Distinct; _}, args); _ } -> - let l' = List.map (parse_term env) args in - Term (_wrap env t T.distinct l') - - (* General case: application *) - | { Ast.term = Ast.Symbol s; _ } as ast -> - parse_app env ast s ast [] - | { Ast.term = Ast.App ( - { Ast.term = Ast.Symbol s; _ } as s_ast, l); _ } as ast -> - parse_app env ast s s_ast l - - (* If-then-else *) - | { Ast.term = Ast.App ({ Ast.term = Ast.Builtin Ast.Ite; _}, l); _ } as ast -> - parse_ite env ast l - - (* Record creation *) - | { Ast.term = Ast.App ({ Ast.term = Ast.Builtin Ast.Record; _ }, l); _ } as ast -> - parse_record env ast l - - | { Ast.term = Ast.App ({ Ast.term = Ast.Builtin Ast.Record_with; _ }, l); _ } as ast -> - parse_record_with env ast l - - | { Ast.term = Ast.App ({ Ast.term = Ast.Builtin Ast.Record_access; _ }, l); _ } as ast -> - parse_record_access env ast l - - (* Builtin application not treated directly, but instead - routed to a semantic extension through builtin_symbols. *) - | { Ast.term = Ast.Builtin b; _ } as ast -> - parse_builtin env ast b [] - | { Ast.term = Ast.App ({ Ast.term = Ast.Builtin b; _ }, l); _ } as ast -> - parse_builtin env ast b l - - (* Local bindings *) - | { Ast.term = Ast.Binder (Ast.Let, vars, f); _ } -> - parse_let env [] f vars - - (* Type annotations *) - | { Ast.term = Ast.Colon (a, expected); _ } -> - parse_ensure env a expected - - (* Sometimes parser creates extra applications *) - | { Ast.term = Ast.App (t, []); _ } -> - parse_expr env t - - (* Explicitly catch higher-order application. *) - | { Ast.term = Ast.App ({ Ast.term = Ast.App _; _ }, _); _ } as ast -> - _error env (Ast ast) Higher_order_application - - (* Other cases *) - | ast -> _error env (Ast ast) Unhandled_ast - in - apply_attr env res t t.Ast.attr - - and apply_attr env res ast l = - let () = List.iter (function - | Any (tag, v) -> - apply_tag env ast tag v res; - ) (parse_attrs env ast [] l) in - res - - and parse_attr_and env ast = - match ast with - | { Ast.term = Ast.App ({ Ast.term = Ast.Builtin Ast.And; _ }, l); _ } -> - parse_attrs env ast [] l - | _ -> parse_attrs env ast [] [ast] - - and parse_attrs env ast acc = function - | [] -> acc - | a :: r -> - begin match parse_expr (expect env Nothing) a with - | Tags l -> - parse_attrs env ast (l @ acc) r - | res -> - _expected env "tag" a (Some res) - | exception (Typing_error Error (_, Ast t, _) as exn) -> - _warn env (Ast t) (Error_in_attribute exn); - parse_attrs env ast acc r - | exception exn -> - _warn env (Ast a) (Error_in_attribute exn); - parse_attrs env ast acc r - end - - and parse_var env = function - | { Ast.term = Ast.Colon ({ Ast.term = Ast.Symbol s; _ }, e); _ } -> - begin match parse_expr env e with - | Ttype -> `Ty (s, Ty.Var.mk (Id.full_name s)) - | Ty ty -> `Term (s, T.Var.mk (Id.full_name s) ty) - | res -> _expected env "type (or Ttype)" e (Some res) - end - | { Ast.term = Ast.Symbol s; _ } as t -> - begin match env.expect with - | Nothing -> _cannot_infer_quant_var env t - | Type -> `Ty (s, Ty.Var.mk (Id.full_name s)) - | Typed ty -> `Term (s, T.Var.mk (Id.full_name s) ty) - end - | t -> _expected env "(typed) variable" t None - - and parse_quant_vars env l = - let ttype_vars, typed_vars, env' = List.fold_left ( - fun (l1, l2, acc) v -> - match parse_var acc v with - | `Ty (id, v') -> - let v'', acc' = add_type_var acc id v' v in - (v'' :: l1, l2, acc') - | `Term (id, v') -> - let v'', acc' = add_term_var acc id v' v in - (l1, v'' :: l2, acc') - ) ([], [], env) l in - List.rev ttype_vars, List.rev typed_vars, env' - - and parse_quant mk b env ast ttype_acc ty_acc = function - | { Ast.term = Ast.Binder (b', vars, f); _ } when b = b' -> - let ttype_vars, ty_vars, env' = parse_quant_vars (expect_base env) vars in - parse_quant mk b env' ast (ttype_acc @ ttype_vars) (ty_acc @ ty_vars) f - | body_ast -> - let body = parse_prop env body_ast in - let f = mk_quant env ast mk (ttype_acc, ty_acc) body in - Term f - - and parse_match env ast scrutinee branches = - let t = parse_term env scrutinee in - let l = List.map (parse_branch (T.ty t) env) branches in - Term (_wrap2 env ast T.pattern_match t l) - - and parse_branch ty env (pattern, body) = - let p, env = parse_pattern ty env pattern in - let b = parse_term env body in - (p, b) - - and parse_pattern ty env t = - match t with - | { Ast.term = Ast.Symbol s; _ } as ast_s -> - parse_pattern_app ty env t ast_s s [] - | { Ast.term = Ast.App ( - ({ Ast.term = Ast.Symbol s; _ } as ast_s), args); _ } -> - parse_pattern_app ty env t ast_s s args - | _ -> _expected env "pattern" t None - - and parse_pattern_app ty env ast ast_s s args = - match find_bound env s with - | `Cstr c -> parse_pattern_app_cstr ty env ast c args - | _ -> - begin match args with - | [] -> parse_pattern_var ty env ast_s s - | _ -> _expected env "a variable (or an ADT constructor)" ast_s None - end - - and parse_pattern_var ty env ast s = - let v = T.Var.mk (Id.full_name s) ty in - let v, env = add_term_var env s v ast in - T.of_var v, env - - and parse_pattern_app_cstr ty env t c args = - (* Inlined version of parse_app_cstr *) - let n_ty, n_t = T.Cstr.arity c in - let ty_l, t_l = - match split_args env n_ty n_t args with - | `Ok (l, l') -> l, l' - | `Bad_arity (expected, actual) -> - _bad_cstr_arity env c expected actual t - in - (* We can't allow binding new type variables here *) - let ty_args = List.map (parse_ty env) ty_l in - (* Compute the expected types of arguments *) - let ty_arity = _wrap3 env t T.Cstr.pattern_arity c ty ty_args in - (* Pattern args are allowed to introduce new variables *) - let t_args, env = parse_pattern_app_cstr_args env t_l ty_arity in - let res = _wrap3 env t T.apply_cstr c ty_args t_args in - res, env - - and parse_pattern_app_cstr_args env args args_ty = - let l, env = - List.fold_left2 (fun (l, env) arg ty -> - let arg, env = parse_pattern ty env arg in - (arg :: l, env) - ) ([], env) args args_ty - in - List.rev l, env - - and parse_let env acc f = function - | [] -> (* TODO: use continuation to avoid stack overflow on packs of let-bindings ? *) - let l = List.rev acc in - begin match parse_expr env f with - | Term t -> Term (T.letin l t) - | res -> _expected env "term of formula" f (Some res) - end - | x :: r -> - begin match x with - | { Ast.term = Ast.Colon ({ Ast.term = Ast.Symbol s; _ } as w, e); _ } - | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Eq; _}, [ - { Ast.term = Ast.Symbol s; _ } as w; e]); _ } - | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Equiv; _}, [ - { Ast.term = Ast.Symbol s; _ } as w; e]); _ } -> - let t = parse_term env e in - let v = T.Var.mk (Id.full_name s) (T.ty t) in - let v', env' = bind_term_var env s e v t w in - parse_let env' ((v', t) :: acc) f r - | t -> _expected env "variable binding" t None - end - - and parse_record_field env ast = - match ast with - | { Ast.term = Ast.Symbol s; _ } - | { Ast.term = Ast.App ({ Ast.term = Ast.Symbol s; _ }, []); _} -> - begin match find_bound env s with - | `Field f -> f - | `Not_found -> _cannot_find env ast s - | _ -> _expected env "record field" ast None - end - | _ -> - _expected env "record field name" ast None - - and parse_record_field_binding env ast = - match ast with - | { Ast.term = Ast.App ( - {Ast.term = Ast.Builtin Ast.Eq; _ }, [field; value] ); _ } -> - let f = parse_record_field env field in - let t = parse_term env value in - f, t - | _ -> - _expected env "record field_binding" ast None - - and parse_record env ast = function - | [] -> - _expected env "at least one field binding" ast None - | l -> - let l' = List.map (parse_record_field_binding env) l in - Term (create_record env ast l') - - and parse_record_with env ast = function - | [] -> - _expected env "term" ast None - | t :: l -> - let t' = parse_term env t in - let l' = List.map (parse_record_field_binding env) l in - Term (create_record_with env ast t' l') - - and parse_record_access env ast = function - | [ t; f ] -> - let t = parse_term env t in - let field = parse_record_field env f in - Term (create_record_access env ast t field) - | l -> - _bad_op_arity env "field record access" 2 (List.length l) ast - - and parse_app env ast s s_ast args = - match find_bound env s with - | `Ty_var v -> - if args = [] then Ty (Ty.of_var v) - else _ty_var_app env v ast - | `Term_var v -> - if args = [] then Term (T.of_var v) - else _var_app env v ast - | `Letin (_, v, t) -> - if args = [] then Term t - else _var_app env v ast - | `Ty_cst f -> - parse_app_ty env ast f args - | `Term_cst f -> - parse_app_term env ast f args - | `Cstr c -> - parse_app_cstr env ast c args - | `Field _f -> - _expected env "not a field name" s_ast None - | `Builtin b -> - builtin_apply env b ast args - | `Not_found -> - begin match infer env ast s args s_ast with - | Some Ty_fun f -> parse_app_ty env ast f args - | Some Term_fun f -> parse_app_term env ast f args - | None -> _cannot_find env ast s - end - - and parse_app_ty env ast f args = - if List.length args <> Ty.Const.arity f then - _bad_ty_arity env f (List.length args) ast; - let l = List.map (parse_ty env) args in - Ty (Ty.apply f l) - - and parse_app_term env ast f args = - let n_ty, n_t = T.Const.arity f in - let ty_l, t_l = - match split_args env n_ty n_t args with - | `Ok (l, l') -> l, l' - | `Bad_arity (expected, actual) -> - _bad_term_arity env f expected actual ast - in - let ty_args = List.map (parse_ty env) ty_l in - let t_args = List.map (parse_term env) t_l in - Term (_wrap3 env ast T.apply f ty_args t_args) - - and parse_app_cstr env ast c args = - let n_ty, n_t = T.Cstr.arity c in - let ty_l, t_l = - match split_args env n_ty n_t args with - | `Ok (l, l') -> l, l' - | `Bad_arity (expected, actual) -> - _bad_cstr_arity env c expected actual ast - in - let ty_args = List.map (parse_ty env) ty_l in - let t_args = List.map (parse_term env) t_l in - Term (_wrap3 env ast T.apply_cstr c ty_args t_args) - - and parse_ite env ast = function - | [c; a; b] -> - let cond = parse_prop env c in - let then_t = parse_term env a in - let else_t = parse_term env b in - Term (_wrap3 env ast T.ite cond then_t else_t) - | args -> - _bad_op_arity env "#ite" 3 (List.length args) ast - - and parse_ensure env a expected = - let t = parse_term env a in - let ty = parse_ty env expected in - Term (T.ensure t ty) - - and parse_builtin env ast b args = - match env.builtins env (Builtin b) with - | `Not_found -> _unknown_builtin env ast b - | #builtin_res as b -> builtin_apply env b ast args - - and parse_ty env ast = - unwrap_ty env ast (parse_expr (expect env Type) ast) - - and parse_term env ast = - unwrap_term env ast (parse_expr (expect_base env) ast) - - and parse_prop env ast = - match parse_expr (expect_prop env) ast with - | Term t -> t - | res -> _expected env "term/prop" ast (Some res) - - let parse_ttype_var env t = - match parse_var (expect ~force:true env Type) t with - | `Ty (id, v) -> (id, v, t) - | `Term (_, v) -> - _expected env "type variable" t (Some (Term (T.of_var v))) - - let rec parse_sig_quant env = function - | { Ast.term = Ast.Binder (Ast.Pi, vars, t); _ } -> - let ttype_vars = List.map (parse_ttype_var env) vars in - let ttype_vars', env' = add_type_vars env ttype_vars in - let l = List.combine vars ttype_vars' in - parse_sig_arrow l [] env' t - | t -> - parse_sig_arrow [] [] env t - - and parse_sig_arrow ttype_vars (ty_args: (Ast.t * res) list) env = function - | { Ast.term = Ast.Binder (Ast.Arrow, args, ret); _ } -> - let t_args = parse_sig_args env args in - parse_sig_arrow ttype_vars (ty_args @ t_args) env ret - | t -> - begin match parse_expr env t with - | Ttype -> - begin match ttype_vars with - | (h, _) :: _ -> - _error env (Ast h) Type_var_in_type_constructor - | [] -> - let aux n arg = - match (arg : _ * res) with - | (_, Ttype) -> n + 1 - | (ast, res) -> raise (Found (ast, res)) - in - begin - match List.fold_left aux 0 ty_args with - | n -> `Ty_cstr n - | exception Found (err, _) -> - _error env (Ast err) Type_var_in_type_constructor - end - end - | Ty ret -> - let aux acc arg = - match (arg : _ * res) with - | (_, Ty t) -> t :: acc - | (ast, res) -> raise (Found (ast, res)) - in - begin - match List.fold_left aux [] ty_args with - | exception Found (err, res) -> _expected env "type" err (Some res) - | l -> `Fun_ty (List.map snd ttype_vars, List.rev l, ret) - end - | res -> _expected env "Ttype of type" t (Some res) - end - - and parse_sig_args env l = - List.flatten @@ List.map (parse_sig_arg env) l - - and parse_sig_arg env = function - | { Ast.term = Ast.App ({ Ast.term = Ast.Builtin Ast.Product; _}, l); _ } -> - List.flatten @@ List.map (parse_sig_arg env) l - | { Ast.term = Ast.Binder (Ast.Arrow, _, _); _ } as ast -> - _error env (Ast ast) Higher_order_type - | t -> - [t, parse_expr env t] - - let parse_sig = parse_sig_quant - - let rec parse_fun ty_args t_args env = function - | { Ast.term = Ast.Binder (Ast.Fun, l, ret); _ } -> - let ty_args', t_args', env' = parse_quant_vars env l in - parse_fun (ty_args @ ty_args') (t_args @ t_args') env' ret - | ast -> - begin match parse_expr env ast with - | (Ty body) as res -> - if t_args = [] then `Ty (ty_args, body) - else _expected env "non_dependant type (or a term)" ast (Some res) - | Term body -> `Term (ty_args, t_args, body) - | res -> _expected env "term or a type" ast (Some res) - end - - let parse_inductive_arg env = function - | { Ast.term = Ast.Colon ({ Ast.term = Ast.Symbol s; _ }, e); _ } -> - let ty = parse_ty env e in - ty, Some s - | t -> - let ty = parse_ty env t in - ty, None - - - (* Typechecking mutually recursive datatypes *) - (* ************************************************************************ *) - - let decl_id t = - match (t : Stmt.decl) with - | Abstract { id; _ } - | Record { id; _ } - | Inductive { id; _ } -> id - - let appears_in s t = - let mapper = - { Ast.unit_mapper with - symbol = (fun _ ~attr:_ ~loc:_ id -> - if Id.equal s id then raise Exit); - } - in - try Ast.map mapper t; false - with Exit -> true - - let well_founded_aux l t = - match (t : Stmt.decl) with - | Abstract _ -> true - | Inductive { cstrs; _ } -> - List.exists (fun (_, args) -> - List.for_all (fun t -> - not (List.exists (fun i -> - appears_in (decl_id i) t - ) l) - ) args - ) cstrs - | Record { fields; _ } -> - List.for_all (fun (_, t) -> - not (List.exists (fun i -> - appears_in (decl_id i) t - ) l) - ) fields - - let rec check_well_founded env d l = - match (l : Stmt.decl list) with - | [] -> () - | _ -> - let has_progressed = ref false in - let l' = List.filter (fun t -> - let b = well_founded_aux l t in - if b then has_progressed := true; - not b - ) l in - if !has_progressed then - check_well_founded env d l' - else - _error env (Decls d) (Not_well_founded_datatypes l') - - let record env d ty_cst { Stmt.vars; fields; _ } = - let ttype_vars = List.map (parse_ttype_var env) vars in - let ty_vars, env = add_type_vars env ttype_vars in - let l = List.map (fun (id, t) -> - let ty = parse_ty env t in - Id.full_name id, ty - ) fields in - let field_list = T.define_record ty_cst ty_vars l in - List.iter2 (fun (id, _) field -> - decl_term_field env (Decl d) id field (Declared (env.file, d)) - ) fields field_list - - let inductive env d ty_cst { Stmt.id; vars; cstrs; _ } = - (* Parse the type variables *) - let ttype_vars = List.map (parse_ttype_var env) vars in - let ty_vars, env = add_type_vars env ttype_vars in - (* Parse the constructors *) - let cstrs_with_ids = List.map (fun (id, args) -> - id, List.map (fun t -> - let ty, dstr = parse_inductive_arg env t in - t, ty, dstr - ) args - ) cstrs in - (* Constructors with strings for names *) - let cstrs_with_strings = List.map (fun (id, args) -> - Id.full_name id, List.map (fun (_, ty, dstr) -> - ty, Misc.Options.map Id.full_name dstr - ) args - ) cstrs_with_ids in - (* Call the T module to define the adt and get the typed constructors - and destructors. *) - let defined_cstrs = T.define_adt ty_cst ty_vars cstrs_with_strings in - (* Register the constructors and destructors in the global env. *) - List.iter2 (fun (cid, pargs) (c, targs) -> - decl_term_cstr env (Decl d) cid c (Declared (env.file, d)); - List.iter2 (fun (t, _, dstr) (_, o) -> - match dstr, o with - | None, None -> () - | None, Some c -> - _warn env (Ast t) (Superfluous_destructor (id, cid, c)) - | Some id, Some const -> - decl_term_const env (Decl d) id const (Declared (env.file, d)) - | Some id, None -> - _error env (Ast t) (Missing_destructor id) - ) pargs targs - ) cstrs_with_ids defined_cstrs - - let define_decl env (_, cst) t = - match cst, (t : Stmt.decl) with - | _, Abstract _ -> () - | `Term_decl _, Inductive _ -> assert false - | `Type_decl c, Inductive i -> inductive env t c i - | `Term_decl _, Record _ -> assert false - | `Type_decl c, Record r -> record env t c r - - let parse_decl env tags (t : Stmt.decl) = - match t with - | Abstract { id; ty; _ } -> - begin match parse_sig env ty with - | `Ty_cstr n -> - let c = Ty.Const.mk (Id.full_name id) n in - List.iter (fun (Any (tag, v)) -> Ty.Const.tag c tag v) tags; - id, `Type_decl c - | `Fun_ty (vars, args, ret) -> - let f = T.Const.mk (Id.full_name id) vars args ret in - List.iter (fun (Any (tag, v)) -> T.Const.tag f tag v) tags; - id, `Term_decl f - end - | Record { id; vars; _ } - | Inductive { id; vars; _ } -> - let n = List.length vars in - let c = Ty.Const.mk (Id.full_name id) n in - List.iter (fun (Any (tag, v)) -> Ty.Const.tag c tag v) tags; - id, `Type_decl c - - let record_decl env (id, tdecl) (t : Stmt.decl) = - match tdecl with - | `Type_decl c -> decl_ty_const env (Decl t) id c (Declared (env.file, t)) - | `Term_decl f -> decl_term_const env (Decl t) id f (Declared (env.file, t)) - - let decls env ?attr (d: Stmt.decl Stmt.group) = - let tags = match attr with - | None -> [] - | Some a -> parse_attr_and env a - in - if d.recursive then begin - (* Check well-foundedness *) - check_well_founded env d d.contents; - (* First pre-parse all definitions and generate the typed symbols for them *) - let parsed = List.map (parse_decl env tags) d.contents in - (* Then, since the decls are recursive, register in the global env the type - const for each decl before fully parsing and defining them. *) - let () = List.iter2 (record_decl env) parsed d.contents in - (* Then parse the complete type definition and define them. - TODO: parse (and thus define them with T) in the topological order - defined by the well-founded check ? *) - List.iter2 (define_decl env) parsed d.contents; - (* Return the defined types *) - List.map snd parsed - end else begin - List.map (fun t -> - (* First pre-parse all definitions and generate the typed symbols for them *) - let parsed = parse_decl env tags t in - (* Then parse the complete type definition and define them. *) - let () = define_decl env parsed t in - (* Finally record them in the state *) - let () = record_decl env parsed t in - (* return *) - snd parsed - ) d.contents - end - - (* Definitions *) - (* ************************************************************************ *) - - let parse_def_sig env tags (d: Stmt.def) = - parse_decl env tags (Abstract { id = d.id; ty = d.ty; loc = d.loc; }) - - let record_def d env (id, tdecl) (t : Stmt.def) = - match tdecl with - | `Type_decl _ -> _error env (Defs d) (Type_def_rec t) - | `Term_decl f -> decl_term_const env (Def t) id f (Defined (env.file, t)) - - let parse_def env (_, ssig) (d : Stmt.def) = - match ssig, parse_fun [] [] env d.body with - | `Type_decl c, `Ty (ty_args, body) -> - `Type_def (d.id, c, ty_args, body) - | `Term_decl f, `Term (ty_args, t_args, body) -> - `Term_def (d.id, f, ty_args, t_args, body) - | `Term_decl _, `Ty _ -> assert false - | `Type_decl _, `Term _ -> assert false - - let defs env ?attr (d : Stmt.defs) = - let tags = match attr with - | None -> [] - | Some a -> parse_attr_and env a - in - if d.recursive then begin - let sigs = List.map (parse_def_sig env tags) d.contents in - let () = List.iter2 (record_def d env) sigs d.contents in - List.map2 (parse_def env) sigs d.contents - end else begin - List.map (fun t -> - parse_def env (parse_def_sig env tags t) t - ) d.contents - end - - (* High-level parsing function *) - (* ************************************************************************ *) - - let parse env ast = - let res = parse_prop env ast in - match T.fv res with - | [], [] -> res - | tys, ts -> _error env (Ast ast) (Unbound_variables (tys, ts, res)) + ?st ?expect ?var_infer ?sym_infer + ?(order=First_order) ?poly ?quants + ?free_wildcards ~warnings ~file builtin_symbols = + let env = + empty_env ?st + ?expect ?var_infer ?sym_infer + ~order ?poly ?quants ?free_wildcards + ~warnings ~file builtin_symbols + in + match order with + | First_order -> env + | Higher_order -> + _error env (Located Dolmen.Std.Loc.no_loc) Higher_order_env_in_tff_typechecker end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/tff.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/tff.mli index 73756a7ca94a0ae614453cb71cb3d44b04c2e51e..428f9b57e6a9c61abadfe8125f1e1e2269f45261 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/tff.mli +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/tff.mli @@ -9,13 +9,14 @@ module type S = Tff_intf.S module Make (Tag: Dolmen.Intf.Tag.S) (Ty: Dolmen.Intf.Ty.Tff - with type 'a tag := 'a Tag.t) + with type 'a tag := 'a Tag.t + and type path := Dolmen.Std.Path.t) (T: Dolmen.Intf.Term.Tff with type ty := Ty.t and type ty_var := Ty.Var.t and type ty_const := Ty.Const.t - and type 'a tag := 'a Tag.t) + and type 'a tag := 'a Tag.t + and type path := Dolmen.Std.Path.t) : S with module Tag = Tag and module Ty = Ty and module T = T - diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/tff_intf.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/tff_intf.ml index 86fbd96e34c87e452170cd20bcc6e7ea5a91cb75..3c8d3203750fc10940826eafcb0f5b45394bb586 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/tff_intf.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/tff_intf.ml @@ -1,7 +1,7 @@ (* This file is free software, part of dolmen. See file "LICENSE" for more information *) -(** External Typechecker interface +(** External Typechecker interface for TFF This module defines the external typechcker interface, that is, the interface of an instantiated typechecker. *) @@ -15,379 +15,23 @@ module type S = sig module Tag: Dolmen.Intf.Tag.S module Ty: Dolmen.Intf.Ty.Tff with type 'a tag := 'a Tag.t + and type path := Dolmen.Std.Path.t module T: Dolmen.Intf.Term.Tff with type ty := Ty.t and type ty_var := Ty.Var.t and type ty_const := Ty.Const.t and type 'a tag := 'a Tag.t + and type path := Dolmen.Std.Path.t - (** {2 Type definitions} *) - - type poly = - | Explicit - (** Type arguments must be explicitly given in funciton applications *) - | Implicit - (** Type arguments are not given in funciton applications, and instead - type annotations/coercions are used to disambiguate applications - of polymorphic symbols. *) - | Flexible - (** Mix between explicit and implicit: depending on the arity of a - symbol and the number of arguments provided, either the provided - type arguments are used, or wildcards are generated for all of them, - and later instantiated when needed. *) - (** The various polymorphism mode for the typechecker *) - - type expect = - | Nothing - | Type - | Typed of Ty.t - (** The type of expected result when typing an expression, used to infer - non-declared symbols. *) - - type tag = Any : 'a Tag.t * 'a -> tag - (** Existencial wrapper around tags *) - - type res = - | Ttype - | Ty of Ty.t - | Term of T.t - | Tags of tag list (**) - (** The results of parsing an untyped term. *) - - type builtin_res = [ - | `Ttype of (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> unit) - | `Ty of (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> Ty.t) - | `Term of (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> T.t) - | `Tags of (Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> tag list) - ] - (** The result of parsing a symbol by the theory *) - - type not_found = [ `Not_found ] - (** Not bound bindings *) - - type var = [ - | `Ty_var of Ty.Var.t - | `Term_var of T.Var.t - | `Letin of Dolmen.Std.Term.t * T.Var.t * T.t - ] - (** Variable bindings *) - - type cst = [ - | `Cstr of T.Cstr.t - | `Field of T.Field.t - | `Ty_cst of Ty.Const.t - | `Term_cst of T.Const.t - ] - (** Constant bindings *) - - type builtin = [ - | `Builtin of builtin_res - ] - (** Builtin binding *) - - type bound = [ var | cst | builtin ] - (* All internal bindings *) - - type inferred = - | Ty_fun of Ty.Const.t - | Term_fun of T.Const.t (**) - (** The type for inferred symbols. *) - - type reason = - | Builtin - | Bound of Dolmen.Std.Loc.file * Dolmen.Std.Term.t - | Inferred of Dolmen.Std.Loc.file * Dolmen.Std.Term.t - | Defined of Dolmen.Std.Loc.file * Dolmen.Std.Statement.def - | Declared of Dolmen.Std.Loc.file * Dolmen.Std.Statement.decl - (** The type of reasons for constant typing *) - - type binding = [ - | `Not_found - | `Builtin of [ - | `Ttype - | `Ty - | `Term - | `Tag - ] - | `Variable of [ - | `Ty of Ty.Var.t * reason option - | `Term of T.Var.t * reason option - ] - | `Constant of [ - | `Ty of Ty.Const.t * reason option - | `Cstr of T.Cstr.t * reason option - | `Term of T.Const.t * reason option - | `Field of T.Field.t * reason option - ] - ] - (** The bindings that can occur. *) - - - (** {2 Errors and warnings} *) - - type _ fragment = - | Ast : Dolmen.Std.Term.t -> Dolmen.Std.Term.t fragment - | Def : Dolmen.Std.Statement.def -> Dolmen.Std.Statement.def fragment - | Defs : Dolmen.Std.Statement.defs -> Dolmen.Std.Statement.defs fragment - | Decl : Dolmen.Std.Statement.decl -> Dolmen.Std.Statement.decl fragment - | Decls : Dolmen.Std.Statement.decls -> Dolmen.Std.Statement.decls fragment - | Located : Dolmen.Std.Loc.t -> Dolmen.Std.Loc.t fragment (**) - (** Fragments of input that represent the sources of warnings/errors *) - - type _ warn = .. - (** The type of warnings, parameterized by the type of fragment they can - trigger on *) - - type _ warn += - | Unused_type_variable : Ty.Var.t -> Dolmen.Std.Term.t warn - (** Unused quantified type variable *) - | Unused_term_variable : T.Var.t -> Dolmen.Std.Term.t warn - (** Unused quantified term variable *) - | Error_in_attribute : exn -> Dolmen.Std.Term.t warn - (** An error occurred wile parsing an attribute *) - | Superfluous_destructor : - Dolmen.Std.Id.t * Dolmen.Std.Id.t * T.Const.t -> Dolmen.Std.Term.t warn - (** The user implementation of typed terms returned a destructor where - was asked for. This warning can very safely be ignored. *) - (** Warnings that cna trigger on regular parsed terms. *) - - type _ warn += - | Shadowing : Dolmen.Std.Id.t * binding * binding -> _ warn - (** Shadowing of the given identifier, - together with the old and current binding. *) - (** Special case of warnings for shadowing, as it can happen both from a - term but also a declaration, hence why the type variable of [warn] is - left wild. *) - - type _ err = .. - (** The type of errors, parameterized by the type of fragment they can - trigger on *) - - type _ err += - | Not_well_founded_datatypes : - Dolmen.Std.Statement.decl list -> Dolmen.Std.Statement.decls err - (** Not well-dounded datatypes definitions. *) - (** Errors that occur on declaration(s) *) - - type _ err += - | Infer_type_variable : Dolmen.Std.Term.t err - (** The type of a bound variable had to be inferred which is forbidden. *) - | Expected : string * res option -> Dolmen.Std.Term.t err - (** The parsed term didn't match the expected shape *) - | Bad_index_arity : string * int * int -> Dolmen.Std.Term.t err - (** [Bad_index_arity (name, expected, actual)] denotes an error where - an indexed family of operators (based on [name]) expect to be indexed - by [expected] arguments but got [actual] instead. *) - | Bad_ty_arity : Ty.Const.t * int -> Dolmen.Std.Term.t err - (** [Bad_ty_arity (cst, actual)] denotes a type constant that was applied - to [actual] arguments, but which has a different arity (which should - be accessible by getting its type/sort/arity). *) - | Bad_op_arity : string * int list * int -> Dolmen.Std.Term.t err - (** [Bad_op_arity (name, expected, actual)] denotes a named operator - (which may be a builtin operator, a top-level defined constant which - is being subtituted, etc...) expecting a number of arguments among - the [expected] list, but instead got [actual] number of arguments. *) - | Bad_cstr_arity : T.Cstr.t * int list * int -> Dolmen.Std.Term.t err - (** [Bad_cstr_arity (cstr, expected, actual)] denotes an ADT constructor, - which was expecting one of [expected] arguments, but which was applied - to [actual] arguments. *) - | Bad_term_arity : T.Const.t * int list * int -> Dolmen.Std.Term.t err - (** [Bad_term_arity (func, expected, actual)] denotes a funciton symbol, - which was expecting one of [expected] arguments, but which was applied - to [actual] arguments. *) - | Repeated_record_field : T.Field.t -> Dolmen.Std.Term.t err - (** [Repeated_record_field f] denotes an error within an expression - that builds a record by giving values to all fields, but where the - field [f] appears more than once. *) - | Missing_record_field : T.Field.t -> Dolmen.Std.Term.t err - (** [Missing_record_field f] denotes an error within an expression - that builds a record by giving values to all fields, but where the - field [f] does not appear. *) - | Mismatch_record_type : T.Field.t * Ty.Const.t -> Dolmen.Std.Term.t err - (** [Mismatch_record_type (f, r)] denotes an error where while building - a record expression for a record of type [c], a field [f] belonging - to another record type was used. *) - | Var_application : T.Var.t -> Dolmen.Std.Term.t err - (** [Var_application v] denotes a variable which was applied to other - terms, which is forbidden in first-order formulas. *) - | Ty_var_application : Ty.Var.t -> Dolmen.Std.Term.t err - (** [Ty_var_application v] denotes a type variable which was applied to - other terms, which is forbidden in first-order formulas. *) - | Type_mismatch : T.t * Ty.t -> Dolmen.Std.Term.t err - (** *) - | Quantified_var_inference : Dolmen.Std.Term.t err - (** Quantified variable without a type *) - | Unhandled_builtin : Dolmen.Std.Term.builtin -> Dolmen.Std.Term.t err - (** *) - | Cannot_tag_tag : Dolmen.Std.Term.t err - (** *) - | Cannot_tag_ttype : Dolmen.Std.Term.t err - (** *) - | Cannot_find : Dolmen.Std.Id.t -> Dolmen.Std.Term.t err - (** *) - | Type_var_in_type_constructor : Dolmen.Std.Term.t err - (** *) - | Forbidden_quantifier : Dolmen.Std.Term.t err - (** *) - | Missing_destructor : Dolmen.Std.Id.t -> Dolmen.Std.Term.t err - (** *) - | Type_def_rec : Dolmen.Std.Statement.def -> Dolmen.Std.Statement.defs err - (** *) - | Higher_order_application : Dolmen.Std.Term.t err - (** *) - | Higher_order_type : Dolmen.Std.Term.t err - (** *) - | Unbound_variables : Ty.Var.t list * T.Var.t list * T.t -> Dolmen.Std.Term.t err - (** *) - | Uncaught_exn : exn * Printexc.raw_backtrace -> Dolmen.Std.Term.t err - (** *) - | Unhandled_ast : Dolmen.Std.Term.t err - (** *) - (** Errors that occur on regular parsed terms. *) - - - (** {2 Global State} *) - - type state - (** The type of mutable state for typechecking. *) - - val new_state : unit -> state - (** Create a new state. *) - - val copy_state : state -> state - (** Make a copy of the global state included in the env *) - - - (** {2 Typing Environment} *) - - type env - (** The type of environments for typechecking. *) - - type 'a typer = env -> Dolmen.Std.Term.t -> 'a - (** A general type for typers. Takes a local environment and the current untyped term, - and return a value. The typer may need additional information for parsing, - in which case the return value will be a function. - @raise Typing_error *) - - type symbol = - | Id of Dolmen.Std.Id.t - | Builtin of Dolmen.Std.Term.builtin - (** Wrapper around potential function symbols from the Dolmen AST. *) - - type builtin_symbols = env -> symbol -> [ builtin_res | not_found ] - (** The type of a typer for builtin symbols. Given the environment and a symbol, - the theory should return a typing function if the symbol belongs to the - theory. This typing function takes first the ast term of the whole - application that is beign typechecked, and the list of arguments to the - symbol. *) - - type warning = - | Warning : env * 'a fragment * 'a warn -> warning (**) - (** Existential wrapper around warnings *) - - type error = - | Error : env * 'a fragment * 'a err -> error (**) - (** Existential wrapper around errors *) - - exception Typing_error of error - (** Exception for typing errors *) - - val empty_env : - ?st:state -> ?expect:expect -> - ?infer_hook:(env -> inferred -> unit) -> - ?infer_base:Ty.t -> ?poly:poly -> ?quants:bool -> - warnings:(warning -> unit) -> - file:Dolmen.Std.Loc.file -> - builtin_symbols -> env - (** Create a new environment. *) - - val expect : ?force:bool -> env -> expect -> env - (** Returns the same environment but with the given expectation, - except if the environnement already except [Nothing]. *) - - - (** {2 Location helpers} *) - - val fragment_loc : env -> _ fragment -> Dolmen.Std.Loc.full - (** Convenient function to get the location of a fragment. *) - - val binding_reason : binding -> reason option - (** Extract the reason from a binding - @raise Invalid_argument if the binding is [`Not_found] *) - - - (** {2 Builtin helpers} *) - - val find_var : env -> Dolmen.Std.Id.t -> [ var | not_found ] - (** Try and find the given id in the set of locally bound variables. *) - - val find_global : env -> Dolmen.Std.Id.t -> [ cst | not_found ] - (** Try and find the given id in the set of globally bound constants. *) - - val find_builtin : env -> Dolmen.Std.Id.t -> [ builtin | not_found ] - (** Try and find the given id in the set of bound builtin symbols. *) - - val find_bound : env -> Dolmen.Std.Id.t -> [ bound | not_found ] - (** Try and find a bound identifier in the env, whetehr it be locally bound - (such as bound variables), constants bound at top-level, or builtin - symbols bound by the builtin theory. *) - - - (** {2 Errors & Warnings} *) - - val _warn : env -> 'a fragment -> 'a warn -> unit - (** Emit a warning *) - - val _error : env -> 'a fragment -> 'a err -> _ - (** Raise an error *) - - val suggest : limit:int -> env -> Dolmen.Std.Id.t -> Dolmen.Std.Id.t list - (** From a dolmen identifier, return a list of existing bound identifiers - in the env that are up to [~limit] in terms of distance of edition. *) - - - (** {2 Parsing functions} *) - - val parse_expr : res typer - (** Main parsing function. *) - - val parse_ty : Ty.t typer - val parse_term : T.t typer - val parse_prop : T.t typer - (** Wrappers around {parse_expr} to set the expect field of the env, - and unwrap an expected return value. *) - - val parse_app_ty : (Ty.Const.t -> Dolmen.Std.Term.t list -> res) typer - val parse_app_term : (T.Const.t -> Dolmen.Std.Term.t list -> res) typer - (** Function used for parsing applications. The first dolmen term given - is the application term being parsed (used for reporting errors). *) - - val unwrap_ty : env -> Dolmen.Std.Term.t -> res -> Ty.t - val unwrap_term : env -> Dolmen.Std.Term.t -> res -> T.t - (** Unwrap a result, raising the adequate typing error - if the result if not as expected. *) - - - (** {2 High-level functions} *) - - val decls : - env -> ?attr:Dolmen.Std.Term.t -> - Dolmen.Std.Statement.decls -> [ - | `Type_decl of Ty.Const.t - | `Term_decl of T.Const.t - ] list - (** Parse a list of potentially mutually recursive declarations. *) - - val defs : - env -> ?attr:Dolmen.Std.Term.t -> - Dolmen.Std.Statement.defs -> [ - | `Type_def of Dolmen.Std.Id.t * Ty.Const.t * Ty.Var.t list * Ty.t - | `Term_def of Dolmen.Std.Id.t * T.Const.t * Ty.Var.t list * T.Var.t list * T.t - ] list - (** Parse a definition *) - - val parse : T.t typer - (** Parse a formula *) + include Intf.Formulas + with type ty := Ty.t + and type ty_var := Ty.Var.t + and type ty_cst := Ty.Const.t + and type term := T.t + and type term_var := T.Var.t + and type term_cst := T.Const.t + and type term_cstr := T.Cstr.t + and type term_field := T.Field.t + and type 'a ast_tag := 'a Tag.t end - diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/thf.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/thf.ml new file mode 100644 index 0000000000000000000000000000000000000000..7a30582df0999fa38cc67888ea009e6af17eb313 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/thf.ml @@ -0,0 +1,2178 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +module type S = Thf_intf.S + +(* Typechecking functor *) +(* ************************************************************************ *) + +module Make + (Tag: Dolmen.Intf.Tag.S) + (Ty: Dolmen.Intf.Ty.Thf + with type 'a tag := 'a Tag.t + and type path := Dolmen.Std.Path.t) + (T: Dolmen.Intf.Term.Thf + with type ty := Ty.t + and type ty_var := Ty.Var.t + and type ty_const := Ty.Const.t + and type 'a tag := 'a Tag.t + and type path := Dolmen.Std.Path.t) += struct + + (* Module aliases *) + (* ************************************************************************ *) + + (* These are exported *) + module T = T + module Ty = Ty + module Tag = Tag + + (* Non-exported module alias to avoid confusing + untyped Terms and typed terms *) + module Id = Dolmen.Std.Id + module Ast = Dolmen.Std.Term + module Stmt = Dolmen.Std.Statement + module Loc = Dolmen.Std.Loc + + (* Custom alias for convenience *) + module Hmap = Dolmen.Std.Tag + + (* Types *) + (* ************************************************************************ *) + + (* FO vs HO *) + type order = + | First_order + | Higher_order + + (* Different behavior of polymorphism *) + type poly = + | Explicit + | Implicit + | Flexible + + (* The source of a wildcard. *) + type sym_inference_source = { + symbol : Id.t; + symbol_loc : Loc.t; + mutable inferred_ty : Ty.t; + } + + type var_inference_source = { + variable : Id.t; + variable_loc : Loc.t; + mutable inferred_ty : Ty.t; + } + + type wildcard_source = + | Arg_of of wildcard_source + | Ret_of of wildcard_source + | From_source of Ast.t + | Added_type_argument of Ast.t + | Symbol_inference of sym_inference_source + | Variable_inference of var_inference_source + + type wildcard_shape = + | Forbidden + | Any_in_scope + | Any_base of { + allowed : Ty.t list; + preferred : Ty.t; + } + | Arrow of { + arg_shape : wildcard_shape; + ret_shape : wildcard_shape; + } + + type infer_unbound_var_scheme = + | No_inference (* the only sane default *) + | Unification_type_variable + + type infer_term_scheme = + | No_inference + | Wildcard of wildcard_shape + + type var_infer = { + infer_unbound_vars : infer_unbound_var_scheme; + infer_type_vars_in_binding_pos : bool; + infer_term_vars_in_binding_pos : infer_term_scheme; + } + + type sym_infer = { + infer_type_csts : bool; + infer_term_csts : infer_term_scheme; + } + + type free_wildcards = + | Forbidden (* the reasonable default *) + | Implicitly_universally_quantified + + type expect = + | Type + | Term + | Anything + + (* The type returned after parsing an expression. *) + type tag = + | Set : 'a Tag.t * 'a -> tag + | Add : 'a list Tag.t * 'a -> tag + + (* Result of parsing an expression *) + type res = + | Ttype + | Ty of Ty.t + | Term of T.t + | Tags of tag list + + + (* Things that can be inferred *) + type inferred = + | Ty_fun of Ty.Const.t + | Term_fun of T.Const.t + + (* Wrapper around potential function symbols in Dolmen *) + type symbol = Intf.symbol = + | Id of Id.t + | Builtin of Ast.builtin + + (* Not found result *) + type not_found = [ `Not_found ] + + (* Variable that can be bound to a dolmen identifier *) + type ty_var = [ + | `Ty_var of Ty.Var.t + ] + + type term_var = [ + | `Term_var of T.Var.t + ] + + type 'env let_var = [ + | `Letin of 'env * Ast.t * T.Var.t * T.t + ] + + type 'env bound_var = [ + | ty_var | term_var | 'env let_var + ] + + (* Constants that can be bound to a dolmen identifier. *) + type cst = [ + | `Cstr of T.Cstr.t + | `Field of T.Field.t + | `Ty_cst of Ty.Const.t + | `Term_cst of T.Const.t + ] + + (* Result of parsing a symbol by the theory *) + type builtin_res = [ + | `Ttype of (Ast.t -> Ast.t list -> unit) + | `Ty of (Ast.t -> Ast.t list -> Ty.t) + | `Term of (Ast.t -> Ast.t list -> T.t) + | `Tags of (Ast.t -> Ast.t list -> tag list) + ] + + (* Names that are bound to a dolmen identifier by the builtins *) + type builtin = [ + | `Builtin of builtin_res + ] + + type reason = + | Builtin + | Bound of Loc.file * Ast.t + | Inferred of Loc.file * Ast.t + | Defined of Loc.file * Stmt.def + | Declared of Loc.file * Stmt.decl + (** The type of reasons for constant typing *) + + type binding = [ + | `Not_found + | `Builtin of [ + | `Ttype + | `Ty + | `Term + | `Tag + ] + | `Variable of [ + | `Ty of Ty.Var.t * reason option + | `Term of T.Var.t * reason option + ] + | `Constant of [ + | `Ty of Ty.Const.t * reason option + | `Cstr of T.Cstr.t * reason option + | `Term of T.Const.t * reason option + | `Field of T.Field.t * reason option + ] + ] + (** The bindings that can occur. *) + + + (* Maps & Hashtbls *) + (* ************************************************************************ *) + + module M = Id.Map + + module E = Map.Make(Ty.Var) + module F = Map.Make(T.Var) + module R = Map.Make(Ty.Const) + module S = Map.Make(T.Const) + module U = Map.Make(T.Cstr) + module V = Map.Make(T.Field) + + + (* Warnings & Errors *) + (* ************************************************************************ *) + + (* Fragments of input that represent the sources of warnings/errors *) + type _ fragment = + | Ast : Ast.t -> Ast.t fragment + | Def : Stmt.def -> Stmt.def fragment + | Defs : Stmt.defs -> Stmt.defs fragment + | Decl : Stmt.decl -> Stmt.decl fragment + | Decls : Stmt.decls -> Stmt.decls fragment + | Located : Loc.t -> Loc.t fragment + + let decl_loc d = + match (d : Stmt.decl) with + | Record { loc; _ } + | Abstract { loc; _ } + | Inductive { loc; _ } -> loc + + (* Warnings *) + (* ******** *) + + (* Warnings, parameterized by the type of fragment they can trigger on *) + type _ warn = .. + + type _ warn += + | Unused_type_variable : + [ `Quantified | `Letbound ] * Ty.Var.t -> Ast.t warn + (* Unused bound type variable *) + | Unused_term_variable : + [ `Quantified | `Letbound ] * T.Var.t -> Ast.t warn + (* Unused bound term variable *) + | Error_in_attribute : exn -> Ast.t warn + (* An error occurred wile parsing an attribute *) + | Superfluous_destructor : Id.t * Id.t * T.Const.t -> Ast.t warn + (* The user implementation of typed terms returned a destructor where + was asked for. This warning can very safely be ignored. *) + + (* Special case for shadowing, as it can happen both from a term but also + a declaration, hence why the type variable of [warn] is left wild. *) + type _ warn += + | Shadowing : Id.t * binding * binding -> _ warn + (* Shadowing of the given identifier, together with the old and current + binding. *) + + + (* Errors *) + (* ****** *) + + (* Errors, parameterized by the kind of fragment they can trigger on *) + type _ err = .. + + (* Errors that occur on declaration(s) *) + type _ err += + | Not_well_founded_datatypes : Stmt.decl list -> Stmt.decls err + (* Not well-dounded datatypes definitions. *) + + (* Errors that occur on term fragments, i.e. Ast.t fragments *) + type _ err += + | Expected : string * res option -> Ast.t err + | Bad_index_arity : string * int * int -> Ast.t err + | Bad_ty_arity : Ty.Const.t * int -> Ast.t err + | Bad_op_arity : symbol * int list * int -> Ast.t err + | Bad_cstr_arity : T.Cstr.t * int list * int -> Ast.t err + | Bad_term_arity : T.Const.t * int list * int -> Ast.t err + | Bad_poly_arity : Ty.Var.t list * Ty.t list -> Ast.t err + | Over_application : T.t list -> Ast.t err + | Repeated_record_field : T.Field.t -> Ast.t err + | Missing_record_field : T.Field.t -> Ast.t err + | Mismatch_record_type : T.Field.t * Ty.Const.t -> Ast.t err + | Mismatch_sum_type : T.Cstr.t * Ty.t -> Ast.t err + | Var_application : T.Var.t -> Ast.t err + | Ty_var_application : Ty.Var.t -> Ast.t err + | Type_mismatch : T.t * Ty.t -> Ast.t err + | Var_in_binding_pos_underspecified : Ast.t err + | Unhandled_builtin : Ast.builtin -> Ast.t err + | Cannot_tag_tag : Ast.t err + | Cannot_tag_ttype : Ast.t err + | Cannot_find : Id.t * string -> Ast.t err + | Forbidden_quantifier : Ast.t err + | Missing_destructor : Id.t -> Ast.t err + | Type_def_rec : Stmt.def -> Stmt.defs err + | Higher_order_application : Ast.t err + | Higher_order_type : Ast.t err + | Higher_order_env_in_tff_typechecker : Loc.t err + | Polymorphic_function_argument : Ast.t err + | Non_prenex_polymorphism : Ty.t -> Ast.t err + | Inference_forbidden : + Ty.Var.t * wildcard_source * Ty.t -> Ast.t err + | Inference_conflict : + Ty.Var.t * wildcard_source * Ty.t * Ty.t list -> Ast.t err + | Inference_scope_escape : + Ty.Var.t * wildcard_source * Ty.Var.t * reason option -> Ast.t err + | Unbound_type_wildcards : + (Ty.Var.t * wildcard_source list) list -> Ast.t err + | Uncaught_exn : exn * Printexc.raw_backtrace -> Ast.t err + | Unhandled_ast : Ast.t err + + + (* State & Environment *) + (* ************************************************************************ *) + + type wildcard_hook = { + src : wildcard_source; + shape : wildcard_shape; + bound : reason E.t; + } + + (* Global, mutable state. *) + type state = { + + mutable csts : cst M.t; + (* association between dolmen ids and types/terms constants. *) + mutable ttype_locs : reason R.t; + (* stores reasons for typing of type constructors *) + mutable const_locs : reason S.t; + (* stores reasons for typing of constants *) + mutable cstrs_locs : reason U.t; + (* stores reasons for typing constructors *) + mutable field_locs : reason V.t; + (* stores reasons for typing record fields *) + + mutable custom : Hmap.map; + (* heterogeneous map for theories to store custom information, + all the while being kept in sync with changes in the global state. *) + + } + + (* The local environments used for type-checking. *) + type env = { + + (* current file *) + file : Loc.file; + + (* global state *) + st : state; + + (* Regular bound variables *) + vars : env bound_var M.t; + type_locs : reason E.t; + term_locs : reason F.t; + + (* inferred variables *) + inferred_vars : ty_var M.t ref; + inferred_ty_locs : reason E.t ref; + + (* wildcards *) + wildcards : wildcard_hook list E.t ref; + + (* The current builtin symbols *) + builtins : builtin_symbols; + + (* warnings *) + warnings : warning -> unit; + + (* Typechecke configuration *) + order : order; + poly : poly; + quants : bool; + expect : expect; + var_infer : var_infer; + sym_infer : sym_infer; + free_wildcards : free_wildcards; + } + + (* Builtin symbols, i.e symbols understood by some theories, + but which do not have specific syntax, so end up as special + cases of application. *) + and builtin_symbols = env -> symbol -> [ builtin_res | not_found ] + + (* Existencial wrapper for wranings. *) + and warning = + | Warning : env * 'a fragment * 'a warn -> warning + + (* Exitencial wrapper around errors *) + and error = + | Error : env * 'a fragment * 'a err -> error + + (* Convenient alias *) + type 'a typer = env -> Ast.t -> 'a + + (* Convenient aliases *) + type var = env bound_var + type bound = [ var | cst | builtin ] + + + (* Exceptions *) + (* ************************************************************************ *) + + (* Internal exceptions *) + exception Found of Ast.t * res + exception Wildcard_bad_scope of Ty.Var.t * wildcard_source * Ty.Var.t + exception Wildcard_bad_base of Ty.Var.t * wildcard_source * Ty.t * Ty.t list + exception Wildcard_forbidden of Ty.Var.t * wildcard_source * Ty.t + + (* Exception for typing errors *) + exception Typing_error of error + + + (* Warnings/Error helpers *) + (* ************************************************************************ *) + + let _warn env fragment w = + env.warnings (Warning (env, fragment, w)) + + let _error env fragment e = + raise (Typing_error (Error (env, fragment, e))) + + let loc env loc : Loc.full = + { file = env.file; loc; } + + let fragment_loc : + type a. env -> a fragment -> Loc.full = fun env fg -> + let loc = + match fg with + | Ast { loc; _ } -> loc + | Def d -> d.loc + | Defs { contents = []; _ } -> Loc.no_loc + | Defs { contents = d :: _; _ } -> d.loc + | Decl d -> decl_loc d + | Decls { contents = []; _ } -> Loc.no_loc + | Decls { contents = d :: _; _ } -> decl_loc d + | Located l -> l + in + { file = env.file; + loc = loc; } + + (* Binding lookups *) + (* ************************************************************************ *) + + let find_reason env (v : bound) = + try + let r = + match v with + | `Builtin _ -> Builtin + | `Ty_var v -> E.find v env.type_locs + | `Term_var v -> F.find v env.term_locs + | `Letin (_, _, v, _) -> F.find v env.term_locs + | `Ty_cst c -> R.find c env.st.ttype_locs + | `Term_cst c -> S.find c env.st.const_locs + | `Cstr c -> U.find c env.st.cstrs_locs + | `Field f -> V.find f env.st.field_locs + in + Some r + with Not_found -> None + + let with_reason reason bound : binding = + match (bound : [ bound | not_found ]) with + | `Not_found -> `Not_found + | `Builtin `Ttype _ -> `Builtin `Ttype + | `Builtin `Ty _ -> `Builtin `Ty + | `Builtin `Term _ -> `Builtin `Term + | `Builtin `Tags _ -> `Builtin `Tag + | `Ty_var v -> `Variable (`Ty (v, reason)) + | `Term_var v -> `Variable (`Term (v, reason)) + | `Letin (_, _, v, _) -> `Variable (`Term (v, reason)) + | `Ty_cst c -> `Constant (`Ty (c, reason)) + | `Term_cst c -> `Constant (`Term (c, reason)) + | `Cstr c -> `Constant (`Cstr (c, reason)) + | `Field f -> `Constant (`Field (f, reason)) + + let binding_reason binding : reason option = + match (binding : binding) with + | `Not_found -> assert false + | `Builtin _ -> Some Builtin + | `Variable `Ty (_, reason) + | `Variable `Term (_, reason) + | `Constant `Ty (_, reason) + | `Constant `Term (_, reason) + | `Constant `Cstr (_, reason) + | `Constant `Field (_, reason) + -> reason + + let _shadow env fragment id + (old : bound) reason (bound : [< bound]) = + let old_binding = + with_reason (find_reason env old) (old :> [bound | not_found]) + in + let new_binding = with_reason (Some reason) (bound :> [bound | not_found]) in + _warn env fragment (Shadowing (id, old_binding, new_binding)) + + let find_var env name : [var | not_found] = + match M.find_opt name env.vars with + | Some (#var as res) -> res + | None -> + begin match M.find_opt name !(env.inferred_vars) with + | Some (#ty_var as res) -> res + | None -> `Not_found + end + + let find_global env id : [cst | not_found] = + match M.find_opt id env.st.csts with + | Some res -> (res :> [cst | not_found]) + | None -> `Not_found + + let find_builtin env id : [builtin | not_found] = + match env.builtins env (Id id) with + | `Not_found -> `Not_found + | #builtin_res as res -> `Builtin res + + let find_bound env id : [ bound | not_found ] = + match find_var env id with + | #var as res -> (res :> [ bound | not_found ]) + | `Not_found -> + begin match find_global env id with + | #cst as res -> (res :> [ bound | not_found ]) + | `Not_found -> + (find_builtin env id :> [ bound | not_found ]) + end + + + (* Convenience functions *) + (* ************************************************************************ *) + + let _expected env s t res = + _error env (Ast t) (Expected (s, res)) + + let _bad_op_arity env s n m t = + _error env (Ast t) (Bad_op_arity (s, [n], m)) + + let _bad_ty_arity env f n t = + _error env (Ast t) (Bad_ty_arity (f, n)) + + let _bad_term_arity env f expected actual t = + _error env (Ast t) (Bad_term_arity (f, expected, actual)) + + let _bad_cstr_arity env c expected actual t = + _error env (Ast t) (Bad_cstr_arity (c, expected, actual)) + + let _bad_poly_arity env ast ty_vars tys = + _error env (Ast ast) (Bad_poly_arity (ty_vars, tys)) + + let _over_application env ast over_args = + _error env (Ast ast) (Over_application over_args) + + let _ty_var_app env v t = + _error env (Ast t) (Ty_var_application v) + + let _var_app env v t = + _error env (Ast t) (Var_application v) + + let _type_mismatch env t ty ast = + _error env (Ast ast) (Type_mismatch (t, ty)) + + let _wrong_sum_type env ast cstr ty = + _error env (Ast ast) (Mismatch_sum_type (cstr, ty)) + + let _record_type_mismatch env f ty_c ast = + _error env (Ast ast) (Mismatch_record_type (f, ty_c)) + + let _field_repeated env f ast = + _error env (Ast ast) (Repeated_record_field f) + + let _field_missing env f ast = + _error env (Ast ast) (Missing_record_field f) + + let _cannot_infer_var_in_binding_pos env t = + _error env (Ast t) (Var_in_binding_pos_underspecified) + + let _unknown_builtin env ast b = + _error env (Ast ast) (Unhandled_builtin b) + + let _uncaught_exn env ast exn bt = + _error env (Ast ast) (Uncaught_exn (exn, bt)) + + let _cannot_find ?(hint="") env ast s = + _error env (Ast ast) (Cannot_find (s, hint)) + + let _non_prenex_polymorphism env ast ty = + _error env (Ast ast) (Non_prenex_polymorphism ty) + + let _scope_escape_in_wildcard env ast w w_src v = + let r = find_reason env (`Ty_var v) in + _error env (Ast ast) (Inference_scope_escape (w, w_src, v, r)) + + let _inference_conflict env ast w w_src inferred allowed = + _error env (Ast ast) (Inference_conflict (w, w_src, inferred, allowed)) + + let _inference_forbidden env ast w w_src inferred = + _error env (Ast ast) (Inference_forbidden (w, w_src, inferred)) + + let _wrap_exn env ast = function + | Ty.Prenex_polymorphism ty -> + _non_prenex_polymorphism env ast ty + | T.Wrong_type (t, ty) -> + _type_mismatch env t ty ast + | T.Wrong_sum_type (cstr, ty) -> + _wrong_sum_type env ast cstr ty + | T.Wrong_record_type (f, c) -> + _record_type_mismatch env f c ast + | T.Field_repeated f -> + _field_repeated env f ast + | T.Field_missing f -> + _field_missing env f ast + | T.Over_application over_args -> + _over_application env ast over_args + | T.Bad_poly_arity (vars, args) -> + _bad_poly_arity env ast vars args + | Wildcard_bad_scope (w, w_src, v) -> + _scope_escape_in_wildcard env ast w w_src v + | Wildcard_bad_base (w, w_src, inferred, allowed) -> + _inference_conflict env ast w w_src inferred allowed + | Wildcard_forbidden (w, w_src, inferred) -> + _inference_forbidden env ast w w_src inferred + | (Typing_error _) as exn -> + raise exn + | exn -> + let bt = Printexc.get_raw_backtrace () in + _uncaught_exn env ast exn bt + + let[@inline] _wrap env ast f arg = + try f arg with exn -> _wrap_exn env ast exn + + let[@inline] _wrap2 env ast f a b = + let[@inline] aux () = f a b in + (_wrap[@inlined]) env ast aux () + + let[@inline] _wrap3 env ast f a b c = + let[@inline] aux () = f a b c in + (_wrap[@inlined]) env ast aux () + + + (* Global Environment *) + (* ************************************************************************ *) + + let new_state () = { + csts = M.empty; + ttype_locs = R.empty; + const_locs = S.empty; + cstrs_locs = U.empty; + field_locs = V.empty; + custom = Hmap.empty; + } + + let copy_state st = { + csts = st.csts; + custom = st.custom; + ttype_locs = st.ttype_locs; + const_locs = st.const_locs; + cstrs_locs = st.cstrs_locs; + field_locs = st.field_locs; + } + + (* Var/Const creation *) + let var_name _env name = + match (name : Dolmen.Std.Name.t) with + | Simple name -> name + (* TODO: proper errors *) + | Indexed _ -> assert false + | Qualified _ -> assert false + + let cst_path _env name = + match (name : Dolmen.Std.Name.t) with + | Indexed _ -> assert false + | Simple name -> + Dolmen.Std.Path.global name + | Qualified { path; basename; } -> + Dolmen.Std.Path.absolute path basename + + let mk_ty_var env name = + Ty.Var.mk (var_name env name) + + let mk_term_var env name ty = + T.Var.mk (var_name env name) ty + + let mk_ty_cst env name arity = + Ty.Const.mk (cst_path env name) arity + + let mk_term_cst env name ty = + T.Const.mk (cst_path env name) ty + + + (* Const declarations *) + let add_global env fragment id reason (v : cst) = + begin match find_bound env id with + | `Not_found -> () + | #bound as old -> _shadow env fragment id old reason v + end; + env.st.csts <- M.add id v env.st.csts + + let decl_ty_const env fg id c reason = + add_global env fg id reason (`Ty_cst c); + env.st.ttype_locs <- R.add c reason env.st.ttype_locs + + let decl_term_const env fg id c reason = + add_global env fg id reason (`Term_cst c); + env.st.const_locs <- S.add c reason env.st.const_locs + + let decl_term_cstr env fg id c reason = + add_global env fg id reason (`Cstr c); + env.st.cstrs_locs <- U.add c reason env.st.cstrs_locs + + let decl_term_field env fg id f reason = + add_global env fg id reason (`Field f); + env.st.field_locs <- V.add f reason env.st.field_locs + + + (* Custom theory data in the global state *) + let get_global_custom env key = + Hmap.get env.st.custom key + + let set_global_custom env key value = + env.st.custom <- Hmap.set env.st.custom key value + + + (* Local Environment *) + (* ************************************************************************ *) + + let global = new_state () + + (* Make a new empty environment *) + let empty_env + ?(st=global) + ?(expect=Anything) + ?(var_infer={ + infer_unbound_vars = No_inference; + infer_type_vars_in_binding_pos = true; + infer_term_vars_in_binding_pos = Wildcard Any_in_scope; + }) + ?(sym_infer={ + infer_type_csts = true; + infer_term_csts = Wildcard Any_in_scope; + }) + ?(order=Higher_order) + ?(poly=Flexible) + ?(quants=true) + ?(free_wildcards=Forbidden) + ~warnings ~file + builtins = + let inferred_vars = ref M.empty in + let inferred_ty_locs = ref E.empty in + let wildcards = ref E.empty in + { + file; st; builtins; warnings; + + vars = M.empty; + type_locs = E.empty; + term_locs = F.empty; + + inferred_vars; inferred_ty_locs; wildcards; + + order; poly; quants; + var_infer; sym_infer; + expect; free_wildcards; + } + + let split_env_for_def env = + let inferred_vars = ref M.empty in + let inferred_ty_locs = ref E.empty in + let wildcards = ref E.empty in + { env with inferred_vars; inferred_ty_locs; wildcards; } + + (* add a global inferred var *) + let add_inferred_type_var env id v ast = + let reason = Inferred (env.file, ast) in + begin match find_bound env id with + | `Not_found -> () + | #bound as old -> + _shadow env (Ast ast) id old reason (`Ty_var v) + end; + env.inferred_vars := M.add id (`Ty_var v) !(env.inferred_vars); + env.inferred_ty_locs := E.add v reason !(env.inferred_ty_locs); + () + + (* Add local variables to environment *) + let add_type_var env id v ast = + let reason = Bound (env.file, ast) in + begin match find_bound env id with + | `Not_found -> () + | #bound as old -> + _shadow env (Ast ast) id old reason (`Ty_var v) + end; + { env with + vars = M.add id (`Ty_var v) env.vars; + type_locs = E.add v reason env.type_locs; + } + + let add_type_vars env l = + let env = + List.fold_left (fun acc (id, v, ast) -> + add_type_var acc id v ast + ) env l + in + let l' = List.map (fun (_, v, _) -> v) l in + l', env + + let add_term_var env id v ast = + let reason = Bound (env.file, ast) in + begin match find_bound env id with + | `Not_found -> () + | #bound as old -> + _shadow env (Ast ast) id old reason (`Term_var v) + end; + { env with + vars = M.add id (`Term_var v) env.vars; + term_locs = F.add v reason env.term_locs; + } + + let bind_term_var env id e v t ast = + let reason = Bound (env.file, ast) in + begin match find_bound env id with + | `Not_found -> () + | #bound as old -> + _shadow env (Ast ast) id old reason (`Term_var v) + end; + let t' = T.bind v t in + { env with + vars = M.add id (`Letin (env, e, v, t')) env.vars; + term_locs = F.add v reason env.term_locs; + } + + + (* Typo suggestion *) + (* ************************************************************************ *) + + let suggest ~limit env id = + let name id = Format.asprintf "%a" Id.print id in + let automaton = Spelll.of_string ~limit (name id) in + let aux id _ acc = + if Spelll.match_with automaton (name id) + then id :: acc + else acc + in + M.fold aux env.st.csts (M.fold aux env.vars []) + + + (* Typing explanation *) + (* ************************************************************************ *) + + let find_ty_var_reason env v = + try E.find v env.type_locs + with Not_found -> E.find v !(env.inferred_ty_locs) + + let _unused_type env kind v = + match find_ty_var_reason env v with + (* Variable bound or inferred *) + | Bound (_, t) | Inferred (_, t) -> + _warn env (Ast t) (Unused_type_variable (kind, v)) + (* variables should not be declare-able nor builtin *) + | Builtin | Declared _ | Defined _ -> + assert false + + let find_term_var_reason env v = + F.find v env.term_locs + + let _unused_term env kind v = + match find_term_var_reason env v with + (* Variable bound or inferred *) + | Bound (_, t) | Inferred (_, t) -> + _warn env (Ast t) (Unused_term_variable (kind, v)) + (* variables should not be declare-able nor builtin *) + | Builtin | Declared _ | Defined _ -> + assert false + + + (* Type inference and wildcards *) + (* ************************************************************************ *) + + let get_allowed_shapes w_map_ref v = + match E.find v !w_map_ref with + | shapes -> shapes + | exception Not_found -> [] + + let is_shape_redundant shapes ({ shape; src = _; bound; } as mark) = + let subsumes_new ({ shape = shape'; src = _; bound = bound'; } as mark') = + mark == mark' || + (shape == shape' && bound == bound') || + match shape, shape' with + | Forbidden, Forbidden -> true + | Any_in_scope, Any_in_scope -> bound == bound' + | Any_base { allowed = l; preferred = p; }, + Any_base { allowed = l'; preferred = p'; } -> + Ty.equal p p' && + List.for_all (fun ty -> List.exists (Ty.equal ty) l') l + | _ -> false + in + List.exists subsumes_new shapes + + let rec wildcard_hook w_map_ref w ty = + let shapes = get_allowed_shapes w_map_ref w in + w_map_ref := E.remove w !w_map_ref; + check_shapes w_map_ref w ty shapes + + and check_shapes w_map_ref w ty shapes = + let fv = lazy (Ty.fv ty) in + List.iter (check_shape w_map_ref fv w ty) shapes + + and check_shape w_map_ref fv w ty = function + | { shape = Forbidden; src; bound = _; } -> + raise (Wildcard_forbidden (w, src, ty)) + | { shape = Any_in_scope; src; bound = bound_vars; } as mark -> + List.iter (fun v -> + if Ty.Var.is_wildcard v then begin + transfer_hook w_map_ref mark v + end else if not (E.mem v bound_vars) then + raise (Wildcard_bad_scope (w, src, v)) + ) (Lazy.force fv) + | { shape = Any_base { allowed; preferred = _; }; src; bound = _; } as mark -> + begin match Ty.view ty with + | `Wildcard v -> transfer_hook w_map_ref mark v + | _ -> + if List.exists (Ty.equal ty) allowed then () + else raise (Wildcard_bad_base (w, src, ty, allowed)) + end + | { shape = Arrow { arg_shape; ret_shape; }; src; bound = bound_vars; } as mark -> + begin match Ty.view ty with + | `Wildcard v -> transfer_hook w_map_ref mark v + | `Arrow (ty_args, ty_ret) -> + List.iter (transfer_shape w_map_ref arg_shape (Arg_of src) bound_vars) ty_args; + transfer_shape w_map_ref ret_shape (Ret_of src) bound_vars ty_ret + | _ -> transfer_shape w_map_ref ret_shape src bound_vars ty + end + + and transfer_shape w_map_ref shape src bound ty = + let mark = { shape; src; bound; } in + match Ty.view ty with + | `Wildcard w -> transfer_hook w_map_ref mark w + | _ -> + let w = Ty.Var.wildcard () in + check_shapes w_map_ref w ty [mark] + + and transfer_hook w_map_ref mark v = + let l = get_allowed_shapes w_map_ref v in + if l = [] then Ty.add_wildcard_hook ~hook:(wildcard_hook w_map_ref) v; + if not (is_shape_redundant l mark) then + w_map_ref := E.add v (mark :: l) !w_map_ref + + (* create a wildcard *) + let wildcard_var env src shape = + let w = Ty.Var.wildcard () in + let mark = { shape; src; bound = env.type_locs; } in + env.wildcards := E.add w [mark] !(env.wildcards); + Ty.add_wildcard_hook ~hook:(wildcard_hook env.wildcards) w; + w + + let wildcard env src shape = + let w = wildcard_var env src shape in + Ty.of_var w + + (* Try and set a wildcard according to one of its shapes *) + let rec try_set_wildcard_shape w = function + | [] -> false + | Any_base { preferred; allowed = _; } :: _ -> + Ty.set_wildcard w preferred; true + | Arrow { ret_shape; arg_shape = _; } :: r -> + try_set_wildcard_shape w (ret_shape :: r) + | _ :: r -> + try_set_wildcard_shape w r + + (* "pop" a wildcard out of the set of watched wildcards *) + let pop_wildcard env = + let w, l = E.choose !(env.wildcards) in + env.wildcards := E.remove w !(env.wildcards); + w, l + + (* ensure all wildcards are set *) + let rec set_wildcards_and_return_free_wildcards state acc = + match pop_wildcard state with + | exception Not_found -> acc + | w, l -> + let shapes = List.map (fun { shape; src = _; bound = _; } -> shape) l in + let acc = + if try_set_wildcard_shape w shapes then acc + else begin + let sources = + List.map (fun { src; shape = _; bound = _; } -> src) l + in + ((w, sources) :: acc) + end + in + set_wildcards_and_return_free_wildcards state acc + + + (* Wrappers for expression building *) + (* ************************************************************************ *) + + (* unwrap results *) + let unwrap_ty env ast = function + | Ty ty -> ty + | res -> _expected env "type" ast (Some res) + + let unwrap_term env ast = function + | Term t -> t + | res -> _expected env "term" ast (Some res) + + (* Un-polymorphize a term, by applying it to the adequate + number of type wildcards *) + let monomorphize env ast t = + match Ty.view (T.ty t) with + | `Pi (vars, _) -> + let n_ty = List.length vars in + if n_ty = 0 then t + else begin + let src = Added_type_argument ast in + let ty_l = + Misc.Lists.init n_ty + (fun _ -> wildcard env src Any_in_scope) + in + _wrap3 env ast T.apply t ty_l [] + end + | _ -> t + + let check_not_poly env ast t = + match Ty.view (T.ty t) with + | `Pi _ -> _error env (Ast ast) Polymorphic_function_argument + | _ -> t + + (* Split arguments for first order application *) + let split_fo_args env ast n_ty n_t args = + let n_args = List.length args in + match env.poly with + | Explicit -> + if n_args = n_ty + n_t then + `Ok (Misc.Lists.take_drop n_ty args) + else + `Bad_arity ([n_ty + n_t], n_args) + | Implicit -> + if n_args = n_t then begin + let src = Added_type_argument ast in + let tys = + Misc.Lists.init n_ty + (fun _ -> wildcard env src Any_in_scope) + in + `Fixed (tys, args) + end else + `Bad_arity ([n_t], n_args) + | Flexible -> + if n_args = n_ty + n_t then + `Ok (Misc.Lists.take_drop n_ty args) + else if n_args = n_t then begin + let src = Added_type_argument ast in + let tys = + Misc.Lists.init n_ty + (fun _ -> wildcard env src Any_in_scope) + in + `Fixed (tys, args) + end else + `Bad_arity ([n_t; n_ty + n_t], n_args) + + (* Split arguments for higher order application *) + let split_ho_args env ast n_ty args = + let explicit args = + let rec aux tys acc = function + | [] -> List.rev tys, List.rev acc + | (ast, Term t) :: r -> aux tys ((ast, t) :: acc) r + | (ast, ((Ty _) as res)) :: _ -> + _expected env "a term" ast (Some res) + | (ast, ((Ttype | Tags _) as res)) :: _ -> + _expected env "a type or a term" ast (Some res) + and aux_ty acc = function + | (_, Ty ty) :: r -> aux_ty (ty :: acc) r + | l -> aux acc [] l + in + aux_ty [] args + in + let implicit ast n_ty args = + let src = Added_type_argument ast in + let ty_l = + Misc.Lists.init n_ty + (fun _ -> wildcard env src Any_in_scope) + in + let t_l = List.map (function + | ast, Term t -> ast, t + | ast, res -> _expected env "a term" ast (Some res) + ) args + in + ty_l, t_l + in + let ty_l, t_l = + match env.poly with + | Explicit -> explicit args + | Implicit -> implicit ast n_ty args + | Flexible -> + begin match args with + | (_, Ty _) :: _ -> explicit args + | _ -> implicit ast n_ty args + end + in + let t_l = + match env.poly with + | Explicit -> + List.map (fun (ast, t) -> check_not_poly env ast t) t_l + | Implicit | Flexible -> + List.map (fun (ast, t) -> monomorphize env ast t) t_l + in + ty_l, t_l + + (* wrapper for builtin application *) + let builtin_apply env b ast args : res = + match (b : builtin_res) with + | `Ttype f -> _wrap2 env ast f ast args; Ttype + | `Ty f -> Ty (_wrap2 env ast f ast args) + | `Term f -> Term (_wrap2 env ast f ast args) + | `Tags f -> Tags (_wrap2 env ast f ast args) + + (* Wrapper around record creation *) + let create_record env ast l = + _wrap env ast T.record l + + let create_record_with env ast t l = + _wrap2 env ast T.record_with t l + + let create_record_access env ast t field = + _wrap2 env ast T.apply_field field t + + let used_var_tag = Tag.create () + + (* Emit warnings for quantified variables that are unused *) + let check_used_ty_var ~kind env v = + match Ty.Var.get_tag v used_var_tag with + | Some () -> Ty.Var.unset_tag v used_var_tag + | None -> _unused_type env kind v + + let check_used_term_var ~kind env v = + match T.Var.get_tag v used_var_tag with + | Some () -> T.Var.unset_tag v used_var_tag + | None -> _unused_term env kind v + + (* Wrappers for creating binders *) + let mk_let env ast mk l body = + List.iter (fun (v, _) -> + check_used_term_var ~kind:`Letbound env v + ) l; + _wrap2 env ast mk l body + + let mk_quant env ast mk (ty_vars, t_vars) body = + if not env.quants then + _error env (Ast ast) Forbidden_quantifier + else begin + List.iter (check_used_ty_var ~kind:`Quantified env) ty_vars; + List.iter (check_used_term_var ~kind:`Quantified env) t_vars; + _wrap2 env ast mk (ty_vars, t_vars) body + end + + + let free_wildcards_to_quant_vars = + let wildcard_univ_counter = ref 0 in + (fun wildcards -> + List.map (fun (w, _) -> + incr wildcard_univ_counter; + let v = + Ty.Var.mk (Format.asprintf "w%d" !wildcard_univ_counter) + in + Ty.set_wildcard w (Ty.of_var v); + v + ) wildcards + ) + + let finalize_wildcards env ast = + let free_wildcards = + _wrap2 env ast set_wildcards_and_return_free_wildcards env [] + in + begin match free_wildcards, env.free_wildcards with + | [], _ -> `No_free_wildcards + | tys, Implicitly_universally_quantified -> + (* Ensure that the wildcards are properly instantiated, so that + they cannot be instantiated after we have built the + quantification *) + `Univ (tys, lazy (free_wildcards_to_quant_vars tys)) + | free_wildcards, Forbidden -> + _error env (Ast ast) (Unbound_type_wildcards free_wildcards) + end + + let finalize_wildcards_prop env ast prop = + match finalize_wildcards env ast with + | `No_free_wildcards -> prop + | `Univ (_, lazy vars) -> _wrap2 env ast T.all (vars, []) prop + + let finalize_wildcards_ty env ast ty = + match finalize_wildcards env ast with + | `No_free_wildcards -> ty + | `Univ (_, lazy vars) -> Ty.pi vars ty + + let finalize_wildcards_def env ast = + match finalize_wildcards env ast with + | `No_free_wildcards -> [] + | `Univ (_, lazy vars) -> vars + + let check_no_free_wildcards env ast = + match finalize_wildcards env ast with + | `No_free_wildcards -> () + | `Univ (free_wildcards, _) -> + _error env (Ast ast) (Unbound_type_wildcards free_wildcards) + + + (* Tag application *) + (* ************************************************************************ *) + + let set_tag env ast tag v res = + match (res : res) with + | Ttype -> _error env (Ast ast) Cannot_tag_ttype + | Tags _ -> _error env (Ast ast) Cannot_tag_tag + | Ty ty -> Ty.set_tag ty tag v + | Term t -> T.set_tag t tag v + + let add_tag env ast tag v res = + match (res : res) with + | Ttype -> _error env (Ast ast) Cannot_tag_ttype + | Tags _ -> _error env (Ast ast) Cannot_tag_tag + | Ty ty -> Ty.add_tag ty tag v + | Term t -> T.add_tag t tag v + + (* Expression parsing *) + (* ************************************************************************ *) + + let expect_anything env = + match env.expect with + | Anything -> env + | _ -> { env with expect = Anything; } + + let expect_type env = + match env.expect with + | Type -> env + | _ -> { env with expect = Type; } + + let expect_term env = + match env.expect with + | Term -> env + | _ -> { env with expect = Term; } + + let[@inline] wrap_attr apply_attr env ast f = + match ast.Ast.attr with + | [] -> f ast + | l -> apply_attr env (f ast) ast l + + let rec parse_expr (env : env) t : res = + let[@inline] aux t = parse_expr_aux env t in + (wrap_attr[@inlined]) apply_attr env t aux + + and parse_expr_aux env = function + (* Ttype *) + | { Ast.term = Ast.Builtin Ast.Ttype; _ } -> + Ttype + + (* Wildcards should only occur in place of types *) + | { Ast.term = Ast.Builtin Ast.Wildcard; _ } as ast -> + Ty (wildcard env (From_source ast) Any_in_scope) + + (* Arrows *) + | { Ast.term = Ast.Binder (Ast.Arrow, args, ret); _ } as ast -> + parse_arrow env ast [args] ret + + (* Binders *) + | { Ast.term = Ast.Binder (Ast.Fun, _, _); _ } as ast -> + parse_quant parse_term T.lam Ast.Fun env ast [] [] ast + + | { Ast.term = Ast.Binder (Ast.All, _, _); _ } as ast -> + parse_quant parse_prop T.all Ast.All env ast [] [] ast + + | { Ast.term = Ast.Binder (Ast.Ex, _, _); _ } as ast -> + parse_quant parse_prop T.ex Ast.Ex env ast [] [] ast + + | ({ Ast.term = Ast.Binder (Ast.Let_seq, vars, f); _ } as ast) + | ({ Ast.term = Ast.Binder (Ast.Let_par, ([_] as vars), f); _ } as ast) -> + parse_let_seq env ast [] f vars + + | { Ast.term = Ast.Binder (Ast.Let_par, vars, f); _ } as ast -> + parse_let_par env ast [] f vars + + (* Pattern matching *) + | { Ast.term = Ast.Match (scrutinee, branches); _ } as ast -> + parse_match env ast scrutinee branches + + (* Record creation *) + | { Ast.term = Ast.App ({ Ast.term = Ast.Builtin Ast.Record; _ }, l); _ } as ast -> + parse_record env ast l + + | { Ast.term = Ast.App ({ Ast.term = Ast.Builtin Ast.Record_with; _ }, l); _ } as ast -> + parse_record_with env ast l + + | { Ast.term = Ast.App ({ Ast.term = Ast.Builtin Ast.Record_access; _ }, l); _ } as ast -> + parse_record_access env ast l + + (* Type annotations *) + | { Ast.term = Ast.Colon (a, expected); _ } -> + parse_ensure env a expected + + (* Sometimes parser creates extra applications *) + | { Ast.term = Ast.App (t, []); _ } -> + parse_expr env t + + (* Application *) + | { Ast.term = Ast.App (f, args); _ } as ast -> + parse_app env ast f args + + (* Symbols *) + | { Ast.term = Ast.Symbol s; _ } as ast -> + parse_symbol env ast s ast + + (* Builtin *) + | { Ast.term = Ast.Builtin b; _ } as ast -> + parse_builtin env ast b + + (* Other cases *) + | ast -> _error env (Ast ast) Unhandled_ast + + and apply_attr env res ast l = + let () = List.iter (function + | Set (tag, v) -> set_tag env ast tag v res + | Add (tag, v) -> add_tag env ast tag v res + ) (parse_attrs env [] l) in + res + + and parse_attr env ast = + match parse_expr (expect_anything env) ast with + | Tags l -> l + | res -> _expected env "tag" ast (Some res) + + and parse_attrs env acc = function + | [] -> acc + | a :: r -> + parse_attrs env (parse_attr env a @ acc) r + + and parse_var_in_binding_pos env = function + | { Ast.term = Ast.Symbol s; _ } as t -> + infer_var_in_binding_pos env t s + | { Ast.term = Ast.Colon ({ Ast.term = Ast.Symbol s; _ }, e); _ } -> + begin match parse_expr env e with + | Ttype -> `Ty (s, mk_ty_var env (Id.name s)) + | Ty ty -> `Term (s, mk_term_var env (Id.name s) ty) + | res -> _expected env "type (or Ttype)" e (Some res) + end + | t -> _expected env "(typed) variable" t None + + and parse_arrow env ast acc ret = + match env.order with + | First_order -> _error env (Ast ast) Higher_order_type + | Higher_order -> + let[@inline] aux t = parse_arrow_aux env ast acc t in + (wrap_attr[@inlined]) apply_attr env ret aux + + and parse_arrow_aux env ast acc = function + | { Ast.term = Ast.Binder (Ast.Arrow, args, ret'); _ } -> + parse_arrow env ast (args :: acc) ret' + | ret -> + let args = List.flatten (List.rev acc) in + let args = List.map (parse_ty env) args in + let ret = parse_ty env ret in + Ty (_wrap2 env ast Ty.arrow args ret) + + and parse_quant_vars env l = + let ttype_vars, typed_vars, env' = List.fold_left ( + fun (l1, l2, acc) v -> + match parse_var_in_binding_pos acc v with + | `Ty (id, v') -> + let acc' = add_type_var acc id v' v in + (v' :: l1, l2, acc') + | `Term (id, v') -> + let acc' = add_term_var acc id v' v in + (l1, v' :: l2, acc') + ) ([], [], env) l in + List.rev ttype_vars, List.rev typed_vars, env' + + and parse_quant parse_inner mk b env ast ttype_acc ty_acc body_ast = + let [@inline] aux t = + parse_quant_aux parse_inner mk b env ast ttype_acc ty_acc t + in + (wrap_attr[@inlined]) apply_attr env body_ast aux + + and parse_quant_aux parse_inner mk b env ast ttype_acc ty_acc = function + | { Ast.term = Ast.Binder (b', vars, f); _ } when b = b' -> + let ttype_vars, ty_vars, env' = parse_quant_vars env vars in + parse_quant parse_inner mk b env' ast (ttype_acc @ ttype_vars) (ty_acc @ ty_vars) f + | body_ast -> + let body = parse_inner env body_ast in + let f = mk_quant env ast mk (ttype_acc, ty_acc) body in + Term f + + and parse_match env ast scrutinee branches = + let t = parse_term env scrutinee in + let l = List.map (parse_branch (T.ty t) env) branches in + Term (_wrap2 env ast T.pattern_match t l) + + and parse_branch ty env (pattern, body) = + let p, env = parse_pattern ty env pattern in + let b = parse_term env body in + (p, b) + + and parse_pattern ty env t = + match t with + | { Ast.term = Ast.Symbol s; _ } as ast_s -> + parse_pattern_app ty env t ast_s s [] + | { Ast.term = Ast.App ( + ({ Ast.term = Ast.Symbol s; _ } as ast_s), args); _ } -> + parse_pattern_app ty env t ast_s s args + | _ -> _expected env "pattern" t None + + and parse_pattern_app ty env ast ast_s s args = + match find_bound env s with + | `Cstr c -> parse_pattern_app_cstr ty env ast c args + | _ -> + begin match args with + | [] -> parse_pattern_var ty env ast_s s + | _ -> _expected env "a variable (or an ADT constructor)" ast_s None + end + + and parse_pattern_var ty env ast s = + let v = mk_term_var env (Id.name s) ty in + let env = add_term_var env s v ast in + T.of_var v, env + + and parse_pattern_app_cstr ty env t c args = + (* Inlined version of parse_app_cstr *) + let n_ty, n_t = T.Cstr.arity c in + let ty_args, t_l = + match split_fo_args env t n_ty n_t args with + | `Ok (l, l') -> + (* We can't allow binding new type variables here *) + let ty_args = List.map (parse_ty env) l in + ty_args, l' + | `Fixed (l, l') -> l, l' + | `Bad_arity (expected, actual) -> + _bad_cstr_arity env c expected actual t + in + (* Compute the expected types of arguments *) + let ty_arity = _wrap3 env t T.Cstr.pattern_arity c ty ty_args in + (* Pattern args are allowed to introduce new variables *) + let t_args, env = parse_pattern_app_cstr_args env t_l ty_arity in + let res = _wrap3 env t T.apply_cstr c ty_args t_args in + res, env + + and parse_pattern_app_cstr_args env args args_ty = + let l, env = + List.fold_left2 (fun (l, env) arg ty -> + let arg, env = parse_pattern ty env arg in + (arg :: l, env) + ) ([], env) args args_ty + in + List.rev l, env + + and parse_let_seq_end env ast acc = function + | ({ Ast.term = Ast.Binder (Ast.Let_seq, vars, f'); _ } as f) + | ({ Ast.term = Ast.Binder (Ast.Let_par, ([_] as vars), f'); _ } as f)-> + parse_let_seq env f acc f' vars + | f -> + let l = List.rev acc in + begin match parse_expr env f with + | Term t -> Term (mk_let env ast T.letin l t) + | res -> _expected env "term of formula" f (Some res) + end + + and parse_let_seq env ast acc f = function + | [] -> + let[@inline] aux t = parse_let_seq_end env ast acc t in + (wrap_attr[@inlined]) apply_attr env f aux + | x :: r -> + begin match x with + | { Ast.term = Ast.Colon ({ Ast.term = Ast.Symbol s; _ } as w, e); _ } + | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Eq; _}, [ + { Ast.term = Ast.Symbol s; _ } as w; e]); _ } + | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Equiv; _}, [ + { Ast.term = Ast.Symbol s; _ } as w; e]); _ } -> + let t = parse_term env e in + let v = mk_term_var env (Id.name s) (T.ty t) in + let env' = bind_term_var env s e v t w in + parse_let_seq env' ast ((v, t) :: acc) f r + | t -> _expected env "variable binding" t None + end + + and parse_let_par env ast acc f = function + | [] -> + let env, rev_l = + List.fold_right (fun (s, e, v, t, w) (env, acc) -> + let env' = bind_term_var env s e v t w in + (env', (v, t) :: acc) + ) acc (env, []) + in + let l = List.rev rev_l in + begin match parse_expr env f with + | Term t -> Term (mk_let env ast T.letand l t) + | res -> _expected env "term of formula" f (Some res) + end + | x :: r -> + begin match x with + | { Ast.term = Ast.Colon ({ Ast.term = Ast.Symbol s; _ } as w, e); _ } + | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Eq; _}, [ + { Ast.term = Ast.Symbol s; _ } as w; e]); _ } + | { Ast.term = Ast.App ({Ast.term = Ast.Builtin Ast.Equiv; _}, [ + { Ast.term = Ast.Symbol s; _ } as w; e]); _ } -> + begin match parse_term env e with + | t -> + let v = mk_term_var env (Id.name s) (T.ty t) in + parse_let_par env ast ((s, e, v, t, w) :: acc) f r + (* Try and provide a helpful hints when a parallel let is used as + a sequential let-binding *) + | exception (Typing_error (Error ( + env, fragment, Cannot_find (id, "")))) + when List.exists (fun (s, _, _, _, _) -> Id.equal s id) acc -> + let msg = + "This binding occurs in a parallel let-binding; you cannot refer \ + to other variables defined by the same let-binding in the defining \ + expressions." + in + _error env fragment (Cannot_find (id, msg)) + end + | t -> _expected env "variable binding" t None + end + + and parse_record_field env ast = + match ast with + | { Ast.term = Ast.Symbol s; _ } + | { Ast.term = Ast.App ({ Ast.term = Ast.Symbol s; _ }, []); _} -> + begin match find_bound env s with + | `Field f -> f + | `Not_found -> _cannot_find env ast s + | _ -> _expected env "record field" ast None + end + | _ -> + _expected env "record field name" ast None + + and parse_record_field_binding env ast = + match ast with + | { Ast.term = Ast.App ( + {Ast.term = Ast.Builtin Ast.Eq; _ }, [field; value] ); _ } -> + let f = parse_record_field env field in + let t = parse_term env value in + f, t + | _ -> + _expected env "record field_binding" ast None + + and parse_record env ast = function + | [] -> + _expected env "at least one field binding" ast None + | l -> + let l' = List.map (parse_record_field_binding env) l in + Term (create_record env ast l') + + and parse_record_with env ast = function + | [] -> + _expected env "term" ast None + | t :: l -> + let t' = parse_term env t in + let l' = List.map (parse_record_field_binding env) l in + Term (create_record_with env ast t' l') + + and parse_record_access env ast = function + | [ t; f ] -> + let t = parse_term env t in + let field = parse_record_field env f in + Term (create_record_access env ast t field) + | l -> + _bad_op_arity env (Builtin Ast.Record_access) 2 (List.length l) ast + + and parse_symbol env ast s s_ast = + parse_app_symbol env ast s s_ast [] + + and parse_app env ast f_ast args_asts = + let[@inline] aux t = parse_app_aux env ast args_asts t in + (wrap_attr[@inlined]) apply_attr env f_ast aux + + and parse_app_aux env ast args_asts = function + | { Ast.term = Ast.App (g, inner_args); _ } -> + parse_app env ast g (inner_args @ args_asts) + | { Ast.term = Ast.Symbol s; _ } as f_ast -> + parse_app_symbol env ast s f_ast args_asts + | { Ast.term = Ast.Builtin b; _ } -> + parse_app_builtin env ast b args_asts + | f_ast -> parse_app_ho env ast f_ast args_asts + + and parse_app_ho env ast f_ast args_asts = + match env.order with + | First_order -> + _error env (Ast ast) Higher_order_application + | Higher_order -> + let f = parse_expr env f_ast in + parse_app_ho_generic env ast f f_ast args_asts + + and parse_app_ho_generic env ast f f_ast args = + match f with + | Ttype | Ty _ | Tags _ -> _expected env "a term" f_ast (Some f) + | Term f -> parse_app_ho_term env ast f args + + and parse_app_ho_term env ast f args = + let n_ty = + match Ty.view (T.ty f) with + | `Pi (vars, _) -> List.length vars + | _ -> 0 + in + let args = List.map (fun ast -> ast, parse_expr env ast) args in + let ty_args, t_args = split_ho_args env ast n_ty args in + Term (_wrap3 env ast T.apply f ty_args t_args) + + and parse_app_symbol env ast s s_ast args = + parse_app_resolved env ast s s_ast args (find_bound env s) + + and parse_app_resolved env ast s s_ast args = function + | `Ty_var v -> parse_app_ty_var env ast v s_ast args + | `Term_var v -> parse_app_term_var env ast v s_ast args + | `Letin (_, _, v, t) -> parse_app_letin_var env ast v s_ast t args + | `Ty_cst f -> parse_app_ty_cst env ast f args + | `Term_cst f -> parse_app_term_cst env ast f args + | `Cstr c -> + parse_app_cstr env ast c args + | `Field _f -> + _expected env "not a field name" s_ast None + | `Builtin b -> + builtin_apply env b ast args + | `Not_found -> + infer_sym env ast s args s_ast + + and parse_app_ty_var env ast v _v_ast args = + Ty.Var.set_tag v used_var_tag (); + if args = [] then Ty (Ty.of_var v) + else _ty_var_app env v ast + + and parse_app_term_var env ast v v_ast args = + T.Var.set_tag v used_var_tag (); + match env.order with + | First_order -> + if args = [] then Term (T.of_var v) + else _var_app env v ast + | Higher_order -> + parse_app_ho_generic env ast (Term (T.of_var v)) v_ast args + + and parse_app_letin_var env ast v v_ast t args = + T.Var.set_tag v used_var_tag (); + match env.order with + | First_order -> + if args = [] then Term t + else _var_app env v ast + | Higher_order -> + parse_app_ho_generic env ast (Term t) v_ast args + + and parse_app_ty_cst env ast f args = + if List.length args <> Ty.Const.arity f then + _bad_ty_arity env f (List.length args) ast; + let l = List.map (parse_ty env) args in + Ty (Ty.apply f l) + + and parse_app_term_cst env ast f args = + match env.order with + | First_order -> + let n_ty, n_t = T.Const.arity f in + let ty_args, t_l = + match split_fo_args env ast n_ty n_t args with + | `Ok (l, l') -> + let ty_args = List.map (parse_ty env) l in + ty_args, l' + | `Fixed (l, l') -> l, l' + | `Bad_arity (expected, actual) -> + _bad_term_arity env f expected actual ast + in + let t_args = List.map (parse_term env) t_l in + Term (_wrap3 env ast T.apply_cst f ty_args t_args) + | Higher_order -> + let n_ty, _ = T.Const.arity f in + let args = List.map (fun ast -> ast, parse_expr env ast) args in + let ty_args, t_args = split_ho_args env ast n_ty args in + Term (_wrap3 env ast T.apply_cst f ty_args t_args) + + and parse_app_cstr env ast c args = + let n_ty, n_t = T.Cstr.arity c in + let ty_args, t_l = + match split_fo_args env ast n_ty n_t args with + | `Ok (l, l') -> + let ty_args = List.map (parse_ty env) l in + ty_args, l' + | `Fixed (l, l') -> l, l' + | `Bad_arity (expected, actual) -> + _bad_cstr_arity env c expected actual ast + in + let t_args = List.map (parse_term env) t_l in + Term (_wrap3 env ast T.apply_cstr c ty_args t_args) + + and parse_app_builtin env ast b args = + match env.builtins env (Builtin b) with + | `Not_found -> _unknown_builtin env ast b + | #builtin_res as b -> builtin_apply env b ast args + + and parse_builtin env ast b = + parse_app_builtin env ast b [] + + and parse_ensure env ast expected = + let t = parse_term env ast in + let ty = parse_ty env expected in + Term (_wrap2 env ast T.ensure t ty) + + and parse_ty env ast = + unwrap_ty env ast (parse_expr (expect_type env) ast) + + and parse_term env ast = + unwrap_term env ast (parse_expr (expect_term env) ast) + + and parse_prop env ast = + match parse_expr (expect_term env) ast with + | Term t -> _wrap2 env ast T.ensure t Ty.prop + | res -> _expected env "term/prop" ast (Some res) + + and infer_ty env ast src shape args = + match env.order with + | Higher_order -> wildcard env src shape + | First_order -> + begin match shape with + | Arrow { arg_shape; ret_shape; } -> + let arg_src = Arg_of src in + let ty_args = List.map (infer_ty_arg env ast arg_src arg_shape) args in + let ty_ret = infer_ty env ast (Ret_of src) ret_shape [] in + Ty.arrow ty_args ty_ret + | _ -> wildcard env src shape + end + + and infer_ty_arg env ast src shape arg = + let ty = T.ty arg in + match Ty.view ty with + | `Wildcard w -> + begin match shape with + | Any_base { allowed = [ty]; preferred = _; } -> + _wrap2 env ast Ty.set_wildcard w ty; + ty + | _ -> + let mark = { shape; src; bound = env.type_locs; } in + transfer_hook env.wildcards mark w; + ty + end + | _ -> infer_ty env ast src shape [] + + and infer_var_in_binding_pos env ast s = + match env.expect with + | Anything -> _cannot_infer_var_in_binding_pos env ast + | Type -> + if not env.var_infer.infer_type_vars_in_binding_pos then + _cannot_infer_var_in_binding_pos env ast + else + `Ty (s, mk_ty_var env (Id.name s)) + | Term -> + begin match env.var_infer.infer_term_vars_in_binding_pos with + | No_inference -> _cannot_infer_var_in_binding_pos env ast + | Wildcard shape -> + let var_infer = { + variable = s; + variable_loc = ast.loc; + inferred_ty = Ty.prop; + } in + let ty = infer_ty env ast (Variable_inference var_infer) shape [] in + var_infer.inferred_ty <- ty; + `Term (s, mk_term_var env (Id.name s) ty) + end + + and infer_sym env ast s args s_ast = + (* variables must be bound explicitly *) + if Id.(s.ns = Var) then begin + match env.var_infer.infer_unbound_vars with + | No_inference -> _cannot_find env ast s + | Unification_type_variable -> + let v = wildcard_var env (From_source s_ast) Any_in_scope in + add_inferred_type_var env s v s_ast; + parse_app_ty_var env ast v s_ast args + end else + match env.expect with + | Anything -> _cannot_find env ast s + | Type -> + if not env.sym_infer.infer_type_csts + then _cannot_find env ast s + else begin + let n = List.length args in + let f = mk_ty_cst env (Id.name s) n in + decl_ty_const env (Ast ast) s f (Inferred (env.file, s_ast)); + parse_app_ty_cst env ast f args + end + | Term -> + begin match env.sym_infer.infer_term_csts with + | No_inference -> _cannot_find env ast s + | Wildcard shape -> + let t_args = List.map (parse_term env) args in + let f = + match find_bound env s with + | `Term_cst f -> f + | `Not_found -> + let sym_infer = { + symbol = s; + symbol_loc = s_ast.loc; + inferred_ty = Ty.prop; + } in + let src = Symbol_inference sym_infer in + let f_ty = infer_ty env ast src shape t_args in + sym_infer.inferred_ty <- f_ty; + let f = mk_term_cst env (Id.name s) f_ty in + decl_term_const env (Ast ast) s f (Inferred (env.file, s_ast)); + f + | _ -> assert false + in + Term (_wrap3 env ast T.apply_cst f [] t_args) + end + + let parse_ttype_var_in_binding_pos env t = + match parse_var_in_binding_pos (expect_type env) t with + | `Ty (id, v) -> (id, v, t) + | `Term (_, v) -> + _expected env "type variable" t (Some (Term (T.of_var v))) + + let parse_typed_var_in_binding_pos env t = + match parse_var_in_binding_pos (expect_term env) t with + | `Term (id, v) -> (id, v, t) + | `Ty (_, v) -> + _expected env "typed variable" t (Some (Ty (Ty.of_var v))) + + let rec parse_sig_quant env = function + | { Ast.term = Ast.Binder (Ast.Pi, vars, t); _ } -> + let ttype_vars = List.map (parse_ttype_var_in_binding_pos env) vars in + let ttype_vars, env' = add_type_vars env ttype_vars in + let l = List.combine vars ttype_vars in + parse_sig_arrow l [] env' t + | t -> + parse_sig_arrow [] [] env t + + and parse_sig_arrow ttype_vars (ty_args: (Ast.t * res) list) env = function + | { Ast.term = Ast.Binder (Ast.Arrow, args, ret); _ } -> + let t_args = parse_sig_args env args in + parse_sig_arrow ttype_vars (ty_args @ t_args) env ret + | t -> + begin match parse_expr env t with + | Ttype -> + let n = List.length ttype_vars in + let aux n arg = + match (arg : _ * res) with + | (_, Ttype) -> n + 1 + | (ast, res) -> raise (Found (ast, res)) + in + begin + match List.fold_left aux n ty_args with + | n -> `Ty_cstr n + | exception Found (ast, res) -> + _expected env "tType or a type variable" ast (Some res) + end + | Ty ret -> + let aux acc arg = + match (arg : _ * res) with + | (_, Ty t) -> t :: acc + | (ast, res) -> raise (Found (ast, res)) + in + begin + match List.fold_left aux [] ty_args with + | exception Found (err, res) -> _expected env "type" err (Some res) + | l -> `Fun_ty (List.map snd ttype_vars, List.rev l, ret) + end + | res -> _expected env "Ttype of type" t (Some res) + end + + and parse_sig_args env l = + List.flatten @@ List.map (parse_sig_arg env) l + + and parse_sig_arg env = function + | { Ast.term = Ast.App ({ Ast.term = Ast.Builtin Ast.Product; _}, l); _ } -> + List.flatten @@ List.map (parse_sig_arg env) l + | t -> + [t, parse_expr env t] + + let parse_sig = parse_sig_quant + + let parse_inductive_arg env = function + | { Ast.term = Ast.Colon ({ Ast.term = Ast.Symbol s; _ }, e); _ } -> + let ty = parse_ty env e in + ty, Some s + | t -> + let ty = parse_ty env t in + ty, None + + + (* Typechecking mutually recursive datatypes *) + (* ************************************************************************ *) + + let decl_id t = + match (t : Stmt.decl) with + | Abstract { id; _ } + | Record { id; _ } + | Inductive { id; _ } -> id + + let appears_in s t = + let mapper = + { Ast.unit_mapper with + symbol = (fun _ ~attr:_ ~loc:_ id -> + if Id.equal s id then raise Exit); + } + in + try Ast.map mapper t; false + with Exit -> true + + let well_founded_aux l t = + match (t : Stmt.decl) with + | Abstract _ -> true + | Inductive { cstrs; _ } -> + List.exists (fun (_, args) -> + List.for_all (fun t -> + not (List.exists (fun i -> + appears_in (decl_id i) t + ) l) + ) args + ) cstrs + | Record { fields; _ } -> + List.for_all (fun (_, t) -> + not (List.exists (fun i -> + appears_in (decl_id i) t + ) l) + ) fields + + let rec check_well_founded env d l = + match (l : Stmt.decl list) with + | [] -> () + | _ -> + let has_progressed = ref false in + let l' = List.filter (fun t -> + let b = well_founded_aux l t in + if b then has_progressed := true; + not b + ) l in + if !has_progressed then + check_well_founded env d l' + else + _error env (Decls d) (Not_well_founded_datatypes l') + + let record env d ty_cst { Stmt.vars; fields; _ } = + let ttype_vars = List.map (parse_ttype_var_in_binding_pos env) vars in + let ty_vars, env = add_type_vars env ttype_vars in + let l = List.map (fun (id, t) -> + let ty = parse_ty env t in + check_no_free_wildcards env t; + cst_path env (Id.name id), ty + ) fields in + let field_list = T.define_record ty_cst ty_vars l in + List.iter2 (fun (id, _) field -> + decl_term_field env (Decl d) id field (Declared (env.file, d)) + ) fields field_list + + let inductive env d ty_cst { Stmt.id; vars; cstrs; _ } = + (* Parse the type variables *) + let ttype_vars = List.map (parse_ttype_var_in_binding_pos env) vars in + let ty_vars, env = add_type_vars env ttype_vars in + (* Parse the constructors *) + let cstrs_with_ids = List.map (fun (id, args) -> + id, List.map (fun t -> + let ty, dstr = parse_inductive_arg env t in + check_no_free_wildcards env t; + t, ty, dstr + ) args + ) cstrs in + (* Constructors with strings for names *) + let cstrs_with_strings = List.map (fun (id, args) -> + cst_path env (Id.name id), List.map (fun (_, ty, dstr) -> + ty, Misc.Options.map (fun id -> cst_path env (Id.name id)) dstr + ) args + ) cstrs_with_ids in + (* Call the T module to define the adt and get the typed constructors + and destructors. *) + let defined_cstrs = T.define_adt ty_cst ty_vars cstrs_with_strings in + (* Register the constructors and destructors in the global env. *) + List.iter2 (fun (cid, pargs) (c, targs) -> + decl_term_cstr env (Decl d) cid c (Declared (env.file, d)); + List.iter2 (fun (t, _, dstr) (_, o) -> + match dstr, o with + | None, None -> () + | None, Some c -> + _warn env (Ast t) (Superfluous_destructor (id, cid, c)) + | Some id, Some const -> + decl_term_const env (Decl d) id const (Declared (env.file, d)) + | Some id, None -> + _error env (Ast t) (Missing_destructor id) + ) pargs targs + ) cstrs_with_ids defined_cstrs + + let define_decl env (_, cst) t = + match cst, (t : Stmt.decl) with + | _, Abstract _ -> () + | `Term_decl _, Inductive _ -> assert false + | `Type_decl c, Inductive i -> inductive env t c i + | `Term_decl _, Record _ -> assert false + | `Type_decl c, Record r -> record env t c r + + let parse_decl env tags (t : Stmt.decl) = + match t with + | Abstract { id; ty = ast; _ } -> + begin match parse_sig env ast with + | `Ty_cstr n -> + check_no_free_wildcards env ast; + let c = mk_ty_cst env (Id.name id) n in + List.iter (function + | Set (tag, v) -> Ty.Const.set_tag c tag v + | Add (tag, v) -> Ty.Const.add_tag c tag v + ) tags; + id, `Type_decl c + | `Fun_ty (vars, args, ret) -> + let ty = Ty.pi vars (Ty.arrow args ret) in + let ty = finalize_wildcards_ty env ast ty in + let f = mk_term_cst env (Id.name id) ty in + List.iter (function + | Set (tag, v) -> T.Const.set_tag f tag v + | Add (tag, v) -> T.Const.add_tag f tag v + ) tags; + id, `Term_decl f + end + | Record { id; vars; _ } + | Inductive { id; vars; _ } -> + let n = List.length vars in + let c = mk_ty_cst env (Id.name id) n in + List.iter (function + | Set (tag, v) -> Ty.Const.set_tag c tag v + | Add (tag, v) -> Ty.Const.add_tag c tag v + ) tags; + id, `Type_decl c + + let record_decl env (id, tdecl) (t : Stmt.decl) = + match tdecl with + | `Type_decl c -> decl_ty_const env (Decl t) id c (Declared (env.file, t)) + | `Term_decl f -> decl_term_const env (Decl t) id f (Declared (env.file, t)) + + let decls env ?(attrs=[]) (d: Stmt.decl Stmt.group) = + let tags = + List.flatten @@ List.map (fun ast -> + let l = parse_attr env ast in + check_no_free_wildcards env ast; + l + ) attrs + in + if d.recursive then begin + (* Check well-foundedness *) + check_well_founded env d d.contents; + (* First pre-parse all definitions and generate the typed symbols for them *) + let parsed = List.map (parse_decl env tags) d.contents in + (* Then, since the decls are recursive, register in the global env the type + const for each decl before fully parsing and defining them. *) + let () = List.iter2 (record_decl env) parsed d.contents in + (* Then parse the complete type definition and define them. + TODO: parse (and thus define them with T) in the topological order + defined by the well-founded check ? *) + List.iter2 (define_decl env) parsed d.contents; + (* Return the defined types *) + List.map snd parsed + end else begin + List.map (fun t -> + (* First pre-parse all definitions and generate the typed symbols for them *) + let parsed = parse_decl env tags t in + (* Then parse the complete type definition and define them. *) + let () = define_decl env parsed t in + (* Finally record them in the state *) + let () = record_decl env parsed t in + (* return *) + snd parsed + ) d.contents + end + + (* Definitions *) + (* ************************************************************************ *) + + let parse_def_vars env vars = + let rec aux env acc = function + | [] -> env, List.rev acc + | v :: r -> + let id, v, ast = parse_ttype_var_in_binding_pos env v in + let env = add_type_var env id v ast in + aux env (v :: acc) r + in + aux env [] vars + + let parse_def_params env params = + let rec aux env acc = function + | [] -> env, List.rev acc + | p :: r -> + let id, v, ast = parse_typed_var_in_binding_pos env p in + let env = add_term_var env id v ast in + aux env (v :: acc) r + in + aux env [] params + + let parse_def_sig env (d: Stmt.def) = + let env, vars = parse_def_vars env d.vars in + let env, params = parse_def_params env d.params in + match parse_expr env d.ret_ty with + | Ttype -> + begin match params with + | [] -> + env, vars, [], `Ty_def + | _ :: _ -> + _expected env "non_dependant type (or a term)" d.ret_ty None + end + | Ty ret_ty -> + env, vars, params, `Term_def ret_ty + | (Term _ as res) + | (Tags _ as res) -> + _expected env "ttype or a type" d.ret_ty (Some res) + + let close_wildcards_in_sig (env, vars, params, ssig) (d : Stmt.def) = + let l = finalize_wildcards_def env d.ret_ty in + env, l @ vars, params, ssig + + let create_id_for_def tags (env, vars, params, ssig) (d: Stmt.def) = + match ssig with + | `Ty_def -> + assert (params = []); + let c = mk_ty_cst env (Id.name d.id) (List.length vars) in + List.iter (function + | Set (tag, v) -> Ty.Const.set_tag c tag v + | Add (tag, v) -> Ty.Const.add_tag c tag v + ) tags; + `Ty (d.id, c) + | `Term_def ret_ty -> + let params_tys = List.map (fun p -> T.Var.ty p) params in + let ty = Ty.pi vars (Ty.arrow params_tys ret_ty) in + let f = mk_term_cst env (Id.name d.id) ty in + List.iter (function + | Set (tag, v) -> T.Const.set_tag f tag v + | Add (tag, v) -> T.Const.add_tag f tag v + ) tags; + `Term (d.id, f) + + let record_def group id (env, _, _, _) (d : Stmt.def) = + match id with + | `Ty _ -> _error env (Defs group) (Type_def_rec d) + | `Term (_, f) -> + decl_term_const env (Def d) d.id f (Defined (env.file, d)) + + let parse_def (env, _vars, _params, ssig) (d : Stmt.def) = + match ssig, parse_expr env d.body with + | `Ty_def, Ty body -> d.body, `Ty body + | `Term_def ret_ty, Term body -> + d.body, `Term (_wrap2 env d.body T.ensure body ret_ty) + | _, ((Ttype | Tags _) as ret) -> + _expected env "term or a type" d.body (Some ret) + | _ -> assert false + + let finalize_def id (env, vars, params, _ssig) (ast, ret) = + check_no_free_wildcards env ast; + match id, ret with + | `Ty (id, c), `Ty body -> + assert (params = []); + `Type_def (id, c, vars, body) + | `Term (id, f), `Term body -> + `Term_def (id, f, vars, params, body) + | `Ty _, `Term _ + | `Term _, `Ty _ -> assert false + + let defs env ?(attrs=[]) (d : Stmt.defs) = + let tags = parse_attrs env [] attrs in + if d.recursive then begin + let envs = List.map (fun _ -> split_env_for_def env) d.contents in + let sigs = List.map2 parse_def_sig envs d.contents in + let sigs = List.map2 close_wildcards_in_sig sigs d.contents in + let ids = List.map2 (create_id_for_def tags) sigs d.contents in + let () = Misc.Lists.iter3 (record_def d) ids sigs d.contents in + let defs = List.map2 parse_def sigs d.contents in + Misc.Lists.map3 finalize_def ids sigs defs + end else begin + List.map (fun t -> + let env = split_env_for_def env in + let ssig = parse_def_sig env t in + let def = parse_def ssig t in + let ssig = close_wildcards_in_sig ssig t in + let id = create_id_for_def tags ssig t in + finalize_def id ssig def + ) d.contents + end + + (* High-level parsing function *) + (* ************************************************************************ *) + + let parse env ast = + let res = parse_prop env ast in + finalize_wildcards_prop env ast res + +end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/thf.mli b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/thf.mli new file mode 100644 index 0000000000000000000000000000000000000000..d834716028d3ba39ff5063faed0256b75f9f1e58 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/thf.mli @@ -0,0 +1,22 @@ + +(** Typechecking of standard terms + This module provides functions to typecheck terms from the + untyped syntax tree defined in the standard implementation. *) + +module type S = Thf_intf.S +(** Typechecker external interface *) + +module Make + (Tag: Dolmen.Intf.Tag.S) + (Ty: Dolmen.Intf.Ty.Thf + with type 'a tag := 'a Tag.t + and type path := Dolmen.Std.Path.t) + (T: Dolmen.Intf.Term.Thf + with type ty := Ty.t + and type ty_var := Ty.Var.t + and type ty_const := Ty.Const.t + and type 'a tag := 'a Tag.t + and type path := Dolmen.Std.Path.t) + : S with module Tag = Tag + and module Ty = Ty + and module T = T diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/thf_intf.ml b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/thf_intf.ml new file mode 100644 index 0000000000000000000000000000000000000000..2e0884e6f6b3d461ad54577a9fa98e53fe7e2063 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/src/typecheck/thf_intf.ml @@ -0,0 +1,37 @@ + +(* This file is free software, part of dolmen. See file "LICENSE" for more information *) + +(** External Typechecker interface for THF + + This module defines the external typechcker interface, that is, + the interface of an instantiated typechecker. *) + +(** {1 Typechecker interface} *) + +(** Typechecker interface *) +module type S = sig + + (** {2 Module aliases} *) + module Tag: Dolmen.Intf.Tag.S + module Ty: Dolmen.Intf.Ty.Thf + with type 'a tag := 'a Tag.t + and type path := Dolmen.Std.Path.t + module T: Dolmen.Intf.Term.Thf + with type ty := Ty.t + and type ty_var := Ty.Var.t + and type ty_const := Ty.Const.t + and type 'a tag := 'a Tag.t + and type path := Dolmen.Std.Path.t + + include Intf.Formulas + with type ty := Ty.t + and type ty_var := Ty.Var.t + and type ty_cst := Ty.Const.t + and type term := T.t + and type term_var := T.Var.t + and type term_cst := T.Const.t + and type term_cstr := T.Cstr.t + and type term_field := T.Field.t + and type 'a ast_tag := 'a Tag.t + +end diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/tools/count.ml b/Src/COLIBRI/simplex_ocaml/dolmen/tools/count.ml new file mode 100644 index 0000000000000000000000000000000000000000..11706e92203593df57a76e00e9116e76b1546f74 --- /dev/null +++ b/Src/COLIBRI/simplex_ocaml/dolmen/tools/count.ml @@ -0,0 +1,74 @@ + +(* Log base 2 *) +let rec log2 n = + if n <= 1 then 0 else 1 + log2(n asr 1) + +(* Count number of lines and maximum of columns *) +let count_lines_and_columns file = + let ch = open_in file in + let n_lines = ref 0 in + let max_col = ref 0 in + try while true do + let s = input_line ch in + n_lines := !n_lines + 1; + max_col := max !max_col (String.length s); + () + done; + assert false + with End_of_file -> + close_in ch; + !n_lines, !max_col + +(* Scan a folder *) +let rec iter_folder_rec path f = + let handle = Unix.opendir path in + let rec aux f h = + match Unix.readdir h with + | exception End_of_file -> + Unix.closedir h + | "." | ".." -> + aux f h + | s -> + let s = Filename.concat path s in + let stat = Unix.stat s in + begin match stat.st_kind with + | Unix.S_REG -> f s + | Unix.S_DIR -> iter_folder_rec s f + | _ -> aux f h + end; + aux f h + in + aux f handle + +(* Count and acc for one file *) +let count_one_file (line, col, sum) file = + (* Format.eprintf "counting %s@." file; *) + let lines, cols = count_lines_and_columns file in + let log_l = log2 lines in + let log_c = log2 cols in + max line log_l, max col log_c, max sum (log_l + log_c) + +(* Count and acc for one dir *) +let count_one_dir acc dir = + let r = ref acc in + iter_folder_rec dir + (fun file -> r := count_one_file !r file); + !r + + +(* Main function *) +let main () = + let l = ref [] in + Arg.parse [] (fun s -> l := s :: !l) "count [file | folder]*"; + let (lines, chars, sum) = List.fold_left (fun acc s -> + match (Unix.stat s).st_kind with + | Unix.S_REG -> count_one_file acc s + | Unix.S_DIR -> count_one_dir acc s + | _ -> acc + ) (0, 0, 0) !l + in + Format.printf "%d bits required for lines at max@\n%d bits required for cols at max\n%d bits required for both at max@." + lines chars sum; + () + +let () = main () diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/tools/dune b/Src/COLIBRI/simplex_ocaml/dolmen/tools/dune index 2733a289be42b56dca4d613270081bfa1f752194..92b384c0cc361c126da20a7831013766e07873e3 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/tools/dune +++ b/Src/COLIBRI/simplex_ocaml/dolmen/tools/dune @@ -1,6 +1,13 @@ (executable (name gentests) + (modules gentests) + (libraries unix) +) + +(executable + (name count) + (modules count) (libraries unix) ) diff --git a/Src/COLIBRI/simplex_ocaml/dolmen/tools/gentests.ml b/Src/COLIBRI/simplex_ocaml/dolmen/tools/gentests.ml index 4a4236a2dda3aea8dcebe8443d5476a8c8d4b742..ba4e9cdbe7344cea42d4a4ce345efa2427458dc4 100644 --- a/Src/COLIBRI/simplex_ocaml/dolmen/tools/gentests.ml +++ b/Src/COLIBRI/simplex_ocaml/dolmen/tools/gentests.ml @@ -5,18 +5,27 @@ (* Helper functions *) (* ************************************************************************* *) -let output_of_problem file = - Filename.chop_extension file ^ ".output" +let incr_output_of_problem file = + Filename.chop_extension file ^ ".incremental" + +let full_output_of_problem file = + Filename.chop_extension file ^ ".full" let expected_of_problem file = Filename.chop_extension file ^ ".expected" +let supports_incremental file = + match Filename.extension file with + | ".ae" -> false + | _ -> true + let is_a_pb file = match Filename.extension file with | ".ae" | ".cnf" | ".icnf" | ".smt2" + | ".psmt2" | ".p" | ".zf" -> true @@ -28,11 +37,12 @@ let is_a_pb file = (* ************************************************************************* *) (* touch the file *) -let touch file = +let touch file contents = if Sys.file_exists file then true else let ch = open_out file in + output_string ch contents; let () = close_out ch in false @@ -46,6 +56,37 @@ let cat fmt file = with End_of_file -> Format.fprintf fmt "@." +(* is the file empty ? *) +let is_empty file = + let ch = open_in file in + try + let _ = input_char ch in + close_in ch; + false + with End_of_file -> + close_in ch; + true + +(* Read all the contents of a file *) +let read_all ch = + let b = Buffer.create 113 in + try + while true do + Buffer.add_channel b ch 30 + done; + assert false + with End_of_file -> + Buffer.contents b + +(* grep a string in a file *) +let contains pattern file = + let cmd = Format.asprintf {|grep -q "%s" %s|} pattern file in + let ch = Unix.open_process_in cmd in + let _ = read_all ch in + let res = Unix.close_process_in ch in + match res with + | Unix.WEXITED 0 -> true + | _ -> false (* Scan a folder *) let scan_folder path = @@ -54,7 +95,8 @@ let scan_folder path = match Unix.readdir h with | exception End_of_file -> Unix.closedir h; - files, folders + List.sort String.compare files, + List.sort String.compare folders | "." | ".." -> aux files folders h | s -> @@ -69,54 +111,97 @@ let scan_folder path = aux [] [] handle +(* Exit codes *) +(* ************************************************************************* *) + +type exit_code = + | Any (* Any exit code *) + | Error (* Any non-zero exit code *) + | Success (* Zero exit code *) + +let pp_exit_codes fmt = function + | Success -> Format.fprintf fmt "0" + | Error -> Format.fprintf fmt "(not 0)" + | Any -> Format.fprintf fmt "(or 0 (not 0))" + + (* Base stanza *) (* ************************************************************************* *) -let rec pp_exit_codes fmt = function - | [] -> assert false - | [x] -> Format.fprintf fmt "%d" x - | x :: r -> Format.fprintf fmt "(or %d %a)" x pp_exit_codes r +let pp_deps fmt (pb_file, additional) = + let l = (Format.asprintf "@[<h>(:input %s)@]" pb_file) :: additional in + Format.pp_print_list Format.pp_print_string fmt l + ~pp_sep:Format.pp_print_space + +let test_stanza_aux ?(deps=[]) mode fmt (res_file, pb_file, exit_codes, expected_file) = + Format.fprintf fmt " +@[<v 2>(rule@ \ + (target %s)@ \ + (deps @[<hov>%a@])@ \ + (package dolmen_bin)@ \ + (action @[<hov 1>(chdir %%{workspace_root}@ \ + @[<hov 1>(with-outputs-to %%{target}@ \ + @[<hov 1>(with-accepted-exit-codes %a@ \ + @[<hov 1>(run dolmen --mode=%s --color=never %%{input} %%{read-lines:flags.dune})@]\ + )@]\ + )@]\ + )@]\ + ))@]@\n\ +@[<v 2>(rule@ \ + (alias runtest)@ \ + (package dolmen_bin)@ \ + (action (diff %s %s))@])@\n" + res_file + pp_deps (pb_file, deps) + pp_exit_codes exit_codes + mode expected_file res_file -let test_stanza fmt (exit_codes, pb_file) = - let output_file = output_of_problem pb_file in +let test_stanza_incr ?deps fmt ((_, pb_file, _, _) as data) = + if not (supports_incremental pb_file) then () + else + Format.fprintf fmt "; Incremental test@\n%a@\n" + (test_stanza_aux ?deps "incremental") data + +let test_stanza_full ?deps fmt data = + Format.fprintf fmt "; Full mode test@\n%a@\n" + (test_stanza_aux ?deps "full") data + +let test_stanza ?deps fmt (exit_codes, pb_file) = + let incr_file = incr_output_of_problem pb_file in + let full_file = full_output_of_problem pb_file in let expected_file = expected_of_problem pb_file in - Format.fprintf fmt {| -; Test for %s -(rule - (target %s) - (deps %s) - (package dolmen_bin) - (action (chdir %%{workspace_root} - (with-outputs-to %%{target} - (with-accepted-exit-codes %a - (run dolmen %%{deps} %%{read-lines:flags.dune})))))) -(rule - (alias runtest) - (action (diff %s %s))) -|} - pb_file output_file pb_file - pp_exit_codes exit_codes - expected_file output_file + Format.fprintf fmt "; Test for %s@\n%a%a@\n" pb_file + (test_stanza_incr ?deps) (incr_file, pb_file, exit_codes, expected_file) + (test_stanza_full ?deps) (full_file, pb_file, exit_codes, expected_file) + (* Generating a test case *) (* ************************************************************************* *) -let is_empty_or_create file = - if touch file then - let ch = open_in file in - try - let _ = input_char ch in - false - with End_of_file -> - true +let check_expect_file path = + let default_expect_contents = "run 'make promote' to update this file" in + if touch path default_expect_contents then + if is_empty path then Success + else if contains "Error" path then Error + else Any else - true + Success + +let test_deps path pb = + match Filename.extension pb with + | ".p" -> + if Sys.file_exists (Filename.concat path "Axioms") then + ["(glob_files Axioms/*.ax)"] + else + [] + | _ -> [] let gen_test fmt path pb = let expected_file = Filename.concat path (expected_of_problem pb) in - let exit_codes = if is_empty_or_create expected_file then [0] else [0; 1] in - test_stanza fmt (exit_codes, pb) + let exit_codes = check_expect_file expected_file in + let deps = test_deps path pb in + test_stanza ~deps fmt (exit_codes, pb) (* Generating tests for a folder and its files *) @@ -134,15 +219,14 @@ let is_not_empty_or_delete file = else false - let gen_tests path files = match List.filter is_a_pb files with | [] -> () | pbs -> - let _ = touch (Filename.concat path "flags.dune") in + let _ = touch (Filename.concat path "flags.dune") "" in let ch = open_out (Filename.concat path "dune") in let fmt = Format.formatter_of_out_channel ch in - let () = Format.fprintf fmt "; File auto-generated by gen-dune.ml@\n" in + let () = Format.fprintf fmt "; File auto-generated by gentests.ml@\n@\n" in let () = let templ = Filename.concat path "dune.templ" in if is_not_empty_or_delete templ then begin diff --git a/Src/COLIBRI/simplex_ocaml/parser.ml b/Src/COLIBRI/simplex_ocaml/parser.ml index d4066cac901c4d8e86fcbf6c4c6000c84da7025f..94c868bfae82ff1a31e15a5b90bb5a9cc6f09bbb 100644 --- a/Src/COLIBRI/simplex_ocaml/parser.ml +++ b/Src/COLIBRI/simplex_ocaml/parser.ml @@ -1,52 +1,67 @@ module State = struct - include (Dolmen_loop.State : ((module type of Dolmen_loop.State) with type t := Dolmen_loop.State.t)) + include ( + Dolmen_loop.State : + module type of Dolmen_loop.State with type t := Dolmen_loop.State.t) + (* include Dolmen_loop.State *) type error_state = { warns : string list; - error : [ `No_error | `Error of string ] + error : [ `No_error | `Error of string ]; } type t = error_state state + exception Error of t + + let () = + Printexc.register_printer (function + | Error { solve_state = { error = `No_error; _ }; _ } -> + Some "No error reported" + | Error { solve_state = { error = `Error r; _ }; _ } -> Some r + | _ -> None) + let pp_loc fmt o = match o with | None -> () - | Some loc -> - Format.fprintf fmt "%a:@ " Dolmen.Std.Loc.fmt loc + | Some loc -> Format.fprintf fmt "%a:@ " Dolmen.Std.Loc.fmt loc - let error ?loc state format = + let error ?loc state error v = let loc = Dolmen.Std.Misc.opt_map loc Dolmen.Std.Loc.full_loc in - Format.kasprintf (fun s -> - let s = Str.global_replace (Str.regexp_string "\"") "\"\"" s in - { state with solve_state = { state.solve_state with error = `Error s } } ) - ("Error @[<hov>%a" ^^ format ^^ "@]@.") - pp_loc loc + let s = + Fmt.str "Error @[<hov>%a%a@]@." pp_loc loc Dolmen_loop.Report.Error.print + (error, v) + in + let s = Str.global_replace (Str.regexp_string "\"") "\"\"" s in + { state with solve_state = { state.solve_state with error = `Error s } } - let set_logic st _ = st (** keep ALL *) + (** keep ALL *) + let set_logic st _ = st - let warn ?loc st format = + let warn ?loc st warning v = let loc = Dolmen.Std.Misc.opt_map loc Dolmen.Std.Loc.full_loc in - let aux s = - let s = Str.global_replace (Str.regexp_string "\"") "\"\"" s in - { st with solve_state = { st.solve_state with warns = s::st.solve_state.warns } } + let s = + Fmt.str "@[<v>%a%s @[<hov>%a@]@]@." pp_loc loc "Warning" + Dolmen_loop.Report.Warning.print (warning, v) in - Format.kasprintf aux - ("@[<v>%a%s @[<hov>" ^^ format ^^ "@]@]@.") - pp_loc loc - "Warning" - + let s = Str.global_replace (Str.regexp_string "\"") "\"\"" s in + { + st with + solve_state = { st.solve_state with warns = s :: st.solve_state.warns }; + } end -module Parser = Dolmen_loop.Parser.Pipe(Dolmen.Std.Expr)(State) -module Header = Dolmen_loop.Headers.Pipe(State) +module Parser = Dolmen_loop.Parser.Pipe (Dolmen.Std.Expr) (State) +module Header = Dolmen_loop.Headers.Pipe (State) + module Typer = struct - module T = Dolmen_loop.Typer.Make(State) + module T = Dolmen_loop.Typer.Make (State) include T - include Dolmen_loop.Typer.Pipe(Dolmen.Std.Expr)(State)(T) -end -module Colibri_Builtins = struct + include + Dolmen_loop.Typer.Pipe (Dolmen.Std.Expr) (Dolmen.Std.Expr.Print) (State) (T) +end +module Colibri_Builtins = struct module Type = Typer.T type offset_result = @@ -55,399 +70,422 @@ module Colibri_Builtins = struct | Plus of offset_result list | Minus of offset_result * offset_result - type checks = - | Eq of offset_result * offset_result * string + type checks = Eq of offset_result * offset_result * string type 'offset ty = - | Real | Int | Bool + | Real + | Int + | Bool | Array of 'offset ty * 'offset ty | Bitv of 'offset | Fp of 'offset * 'offset | Var of Dolmen.Std.Expr.Ty.Var.t - type Dolmen.Std.Expr.builtin += - | Colibri_builtin of (string*int array) - + type 'a Dolmen.Std.Builtin.t += Colibri_builtin of (string * int array) type builtin = { - app: Dolmen_loop.Typer.T.env -> Dolmen.Std.Term.t -> Dolmen.Std.Term.t list -> int array -> Dolmen.Std.Expr.term - ; param_explicit_arity: int - ; param_implicit_arity: int + app : + Dolmen_loop.Typer.T.env -> + Dolmen.Std.Term.t -> + Dolmen.Std.Term.t list -> + int array -> + Dolmen.Std.Expr.term; + param_explicit_arity : int; + param_implicit_arity : int; } - type 'a Dolmen_loop.Typer.T.err += - Generic_type_error of string + type 'a Dolmen_loop.Typer.T.err += Generic_type_error of string let registered = Hashtbl.create 10 - let split_id = Dolmen_std.Misc.split_on_char '\000' - let parse_int_pos env ast s = try let i = int_of_string s in if 0 < i then i else raise Exit with Failure _ | Exit -> - let err = Dolmen_loop.Typer.T.Expected ("an integer greater than 0", None) in + let err = + Dolmen_loop.Typer.T.Expected ("an integer greater than 0", None) + in Dolmen_loop.Typer.T._error env (Dolmen_loop.Typer.T.Ast ast) err - let parse_id env id = - match split_id id with - | name :: r -> begin - match Hashtbl.find_opt registered name with - | None -> `Not_found - | Some builtin -> - `Term begin fun ast args -> - let r = Array.map (parse_int_pos env ast) (Array.of_list r) in + let parse_id env (id : Dolmen_std.Name.t) = + let name, r = + match id with + | Simple s -> (s, []) + | Indexed { basename; indexes } -> (basename, indexes) + | _ -> assert false + in + match Hashtbl.find_opt registered name with + | None -> `Not_found + | Some builtin -> + `Term + (fun ast args -> + let r = Array.map (parse_int_pos env ast) (Array.of_list r) in let n = Array.length r in - if n = builtin.param_explicit_arity then - builtin.app env ast args r - else begin - let err = Dolmen_loop.Typer.T.Bad_op_arity (name, [n], builtin.param_explicit_arity) in - Dolmen_loop.Typer.T._error env (Dolmen_loop.Typer.T.Ast ast) err - end - end - end - | [] -> `Not_found - - let parse : Dolmen_loop.Typer.T.builtin_symbols = fun env s -> + if n = builtin.param_explicit_arity then builtin.app env ast args r + else + let err = + Dolmen_loop.Typer.T.Bad_index_arity + (name, n, builtin.param_explicit_arity) + in + Dolmen_loop.Typer.T._error env (Dolmen_loop.Typer.T.Ast ast) err) + + let parse : Dolmen_loop.Typer.T.builtin_symbols = + fun env s -> match s with (* sort *) - | Dolmen_loop.Typer.T.Id { Dolmen.Std.Id.ns = Dolmen.Std.Id.Term; name; } -> - parse_id env name + | Dolmen_loop.Typer.T.Id + { Dolmen.Std.Id.ns = Dolmen.Std.Namespace.Term; name } -> + parse_id env name | _ -> `Not_found let () = Typer.additional_builtins := parse - (* automatic cache *) let with_cache ~cache f x = match Hashtbl.find cache x with | res -> res | exception Not_found -> - let res = f x in - Hashtbl.add cache x res; - res + let res = f x in + Hashtbl.add cache x res; + res - let compute_offset_for_args params off = - params.(off) + let compute_offset_for_args params off = params.(off) let rec compute_offset_for_result params = function | Offset off -> params.(off) | Const i -> i | Plus offl -> - List.fold_left - (fun acc off -> acc + compute_offset_for_result params off) - 0 offl - | Minus (off1,off2) -> - (compute_offset_for_result params off1) - - (compute_offset_for_result params off2) + List.fold_left + (fun acc off -> acc + compute_offset_for_result params off) + 0 offl + | Minus (off1, off2) -> + compute_offset_for_result params off1 + - compute_offset_for_result params off2 let check_offsets env ast params = function - | Eq (off1,off2,s) -> - let off1 = compute_offset_for_result params off1 in - let off2 = compute_offset_for_result params off2 in - if not (off1 = off2) then - let err = - Generic_type_error - (Format.asprintf "Checks %s failed %i <> %i" s off1 off2) - in - Dolmen_loop.Typer.T._error env (Ast ast) err + | Eq (off1, off2, s) -> + let off1 = compute_offset_for_result params off1 in + let off2 = compute_offset_for_result params off2 in + if not (off1 = off2) then + let err = + Generic_type_error + (Format.asprintf "Checks %s failed %i <> %i" s off1 off2) + in + Dolmen_loop.Typer.T._error env (Ast ast) err let rec instantiate_type compute_offset params = function | Int -> Dolmen.Std.Expr.Ty.int | Real -> Dolmen.Std.Expr.Ty.real | Bool -> Dolmen.Std.Expr.Ty.prop - | Array(a,b) -> Dolmen.Std.Expr.Ty.array - (instantiate_type compute_offset params a) - (instantiate_type compute_offset params b) + | Array (a, b) -> + Dolmen.Std.Expr.Ty.array + (instantiate_type compute_offset params a) + (instantiate_type compute_offset params b) | Bitv off -> Dolmen.Std.Expr.Ty.bitv (compute_offset params off) - | Fp (off1,off2) -> Dolmen.Std.Expr.Ty.float - (compute_offset params off1) - (compute_offset params off2) + | Fp (off1, off2) -> + Dolmen.Std.Expr.Ty.float + (compute_offset params off1) + (compute_offset params off2) | Var v -> Dolmen.Std.Expr.Ty.of_var v - let unify_offset env ast params off i = match params.(off) with | None -> params.(off) <- Some i | Some j when i = j -> () | Some j -> - let err = - Generic_type_error - (Format.asprintf "Can't unify type parameters (bitv or fp) %i instead of %i" i j) - in - Dolmen_loop.Typer.T._error env (Ast ast) err + let err = + Generic_type_error + (Format.asprintf + "Can't unify type parameters (bitv or fp) %i instead of %i" i j) + in + Dolmen_loop.Typer.T._error env (Ast ast) err - let rec unify_types env ast params (ty:Dolmen.Std.Expr.Ty.t) arg = + let rec unify_types env ast params (ty : Dolmen.Std.Expr.Ty.t) arg = let error expected = let err = - Dolmen_loop.Typer.T.Expected(expected,Some (Dolmen_loop.Typer.T.Ty ty)) + Dolmen_loop.Typer.T.Expected (expected, Some (Dolmen_loop.Typer.T.Ty ty)) in Dolmen_loop.Typer.T._error env (Ast ast) err in match arg with - | Bitv off -> begin + | Bitv off -> ( + match ty with + | { + ty_descr = TyApp ({ builtin = Dolmen.Std.Builtin.Bitv i; _ }, _); + _; + } -> + unify_offset env ast params off i + | _ -> error "Bitv") + | Fp (off1, off2) -> ( match ty with - | { descr = App ({ builtin = Dolmen.Std.Expr.Bitv i; _ }, _); _ } -> - unify_offset env ast params off i - | _ -> - error "Bitv" - end - | Fp (off1,off2) -> begin + | { + ty_descr = TyApp ({ builtin = Dolmen.Std.Builtin.Float (i, j); _ }, _); + _; + } -> + unify_offset env ast params off1 i; + unify_offset env ast params off2 j + | _ -> error "FloatingPoint") + | Array (a, b) -> ( match ty with - | { descr = App ({ builtin = Dolmen.Std.Expr.Float(i,j); _ }, _); _ } -> - unify_offset env ast params off1 i; - unify_offset env ast params off2 j - | _ -> error "FloatingPoint" - end - | Array(a,b) -> begin - match ty with - | { descr = App ({ builtin = Dolmen.Std.Expr.Array; _ }, [i;j]); _ } -> - unify_types env ast params i a; - unify_types env ast params j b - | _ -> error "array" - end - | Int -> if not (Dolmen.Std.Expr.Ty.(equal ty int)) then error "int" - | Real -> if not (Dolmen.Std.Expr.Ty.(equal ty real)) then error "real" - | Bool -> if not (Dolmen.Std.Expr.Ty.(equal ty prop)) then error "bool" - | Var _ -> () (* will be checked by Dolmen inferred engine *) - - - let register name poly param_explicit_arity param_implicit_arity args res checks = + | { + ty_descr = TyApp ({ builtin = Dolmen.Std.Builtin.Array; _ }, [ i; j ]); + _; + } -> + unify_types env ast params i a; + unify_types env ast params j b + | _ -> error "array") + | Int -> if not Dolmen.Std.Expr.Ty.(equal ty int) then error "int" + | Real -> if not Dolmen.Std.Expr.Ty.(equal ty real) then error "real" + | Bool -> if not Dolmen.Std.Expr.Ty.(equal ty prop) then error "bool" + | Var _ -> () + (* will be checked by Dolmen inferred engine *) + + let register name poly param_explicit_arity param_implicit_arity args res + checks = let symbol = with_cache ~cache:(Hashtbl.create 13) (fun params -> - assert ( Array.length params = param_explicit_arity + param_implicit_arity ); - Dolmen.Std.Expr.Id.const ~name ~builtin:(Colibri_builtin (name,params)) - name poly - (List.map (instantiate_type compute_offset_for_args params) args) - (instantiate_type compute_offset_for_result params res)) + assert ( + Array.length params = param_explicit_arity + param_implicit_arity); + let ty = + Dolmen.Std.Expr.Ty.arrow + (List.map (instantiate_type compute_offset_for_args params) args) + (instantiate_type compute_offset_for_result params res) + in + Dolmen.Std.Expr.Id.mk ~name + ~builtin:(Colibri_builtin (name, params)) + (Dolmen.Std.Path.global name) + (Dolmen.Std.Expr.Ty.pi poly ty)) in let len = List.length args in let app env ast targs instantiation_explicit = - let l = List.length args in - if not (l = len) - then + let l = List.length targs in + if not (l = len) then Dolmen_loop.Typer.T._error env (Dolmen_loop.Typer.T.Ast ast) - (Dolmen_loop.Typer.T.Bad_op_arity (name, [len], l)) + (Dolmen_loop.Typer.T.Bad_index_arity (name, len, l)) else - let instantiation = Array.make (param_explicit_arity + param_implicit_arity) None in - Array.iteri (fun i k -> instantiation.(i) <- Some k) instantiation_explicit; + let instantiation = + Array.make (param_explicit_arity + param_implicit_arity) None + in + Array.iteri + (fun i k -> instantiation.(i) <- Some k) + instantiation_explicit; let targs = List.map (Dolmen_loop.Typer.T.parse_term env) targs in - List.iter2 (fun t arg -> unify_types env ast instantiation (Dolmen.Std.Expr.Term.ty t) arg) targs args; - let instantiation = Array.map (function - | Some x -> x - | None -> - let err = Generic_type_error "Internal error: some parameter are unbounded" in - Dolmen_loop.Typer.T._error env (Ast ast) err - ) instantiation in + List.iter2 + (fun t arg -> + unify_types env ast instantiation (Dolmen.Std.Expr.Term.ty t) arg) + targs args; + let instantiation = + Array.map + (function + | Some x -> x + | None -> + let err = + Generic_type_error + "Internal error: some parameter are unbounded" + in + Dolmen_loop.Typer.T._error env (Ast ast) err) + instantiation + in List.iter (check_offsets env ast instantiation) checks; - Dolmen.Std.Expr.Term.apply - (symbol instantiation) - (List.map (fun _ -> Dolmen.Std.Expr.Ty.wildcard ()) poly) + Dolmen.Std.Expr.Term.apply_cst (symbol instantiation) + (List.map + (fun _ -> Dolmen.Std.Expr.Ty.(of_var @@ Var.wildcard ())) + poly) targs in - Hashtbl.add registered name { app; param_explicit_arity; param_implicit_arity } + Hashtbl.add registered name + { app; param_explicit_arity; param_implicit_arity } end module Errors = struct - let sigint st = State.error st "User Interrupt" - let out_of_time st = State.error st "Time limit reached" - let out_of_space st = State.error st "Memory limit reached" + (* let sigint st = State.error st "User Interrupt" + * let out_of_time st = State.error st "Time limit reached" + * let out_of_space st = State.error st "Memory limit reached" *) (* Converter for input file/stdin *) -let input_to_string = function - | `Stdin -> "<stdin>" - | `Raw _ -> "<raw>" - | `File f -> f - -let mk_error ?loc _ format = - let loc = Dolmen.Std.Misc.opt_map loc Dolmen.Std.Loc.full_loc in - Format.kasprintf (fun s -> - let s = Str.global_replace (Str.regexp_string "\"") "\"\"" s in - s ) - ("Error @[<hov>%a" ^^ format ^^ "@]@.") - State.pp_loc loc - -let exn st = function - (* Parsing errors *) - | Dolmen.Std.Loc.Uncaught (loc, exn) -> - let file = Dolmen_loop.State.input_file_loc st in - mk_error ~loc:{ file; loc; } st "%s" (Printexc.to_string exn) - | Dolmen.Std.Loc.Lexing_error (loc, lex) -> - let file = Dolmen_loop.State.input_file_loc st in - mk_error ~loc:{ file; loc; } st "Lexing error: invalid character '%s'" lex - | Dolmen.Std.Loc.Syntax_error (loc, msg) -> - let file = Dolmen_loop.State.input_file_loc st in - mk_error ~loc: { file; loc; } st "%t@." msg - - - (* Typing errors *) - | Dolmen_loop.Typer.T.Typing_error ( - Dolmen_loop.Typer.T.Error (env, fragment, _err) as error) -> - let loc = Dolmen_loop.Typer.T.fragment_loc env fragment in - if st.context then - Format.eprintf "@[<hv 2>While typing:@ @[<hov>%a@]@]@." - Typer.print_fragment (env, fragment); - mk_error ~loc st "%a" - Typer.report_error error - - (* State errors *) - | Dolmen_loop.State.File_not_found (loc, dir, f) -> - if dir = "." then - mk_error ~loc st "File not found: '%s'" f - else - mk_error ~loc st "File not found: '%s' in directory '%s'" f dir - | Dolmen_loop.State.Input_lang_changed (l, l') -> - mk_error st "Input language changed from %s to %s (probably because of an include statement)" - (Dolmen_loop.Logic.string_of_language l) - (Dolmen_loop.Logic.string_of_language l') - - (* Internal Dolmen Expr errors *) - | Dolmen.Std.Expr.Bad_ty_arity (c, l) -> - let pp_sep fmt () = Format.fprintf fmt ";@ " in - mk_error st "@[<hv>Internal error: Bad arity for type constant '%a',@ which was provided arguments:@ [@[<hv>%a@]]@]" - Dolmen.Std.Expr.Print.ty_const c (Format.pp_print_list ~pp_sep Dolmen.Std.Expr.Ty.print) l - | Dolmen.Std.Expr.Bad_term_arity (c, tys, ts) -> - let pp_sep fmt () = Format.fprintf fmt ";@ " in - mk_error st "@[<hv>Internal error: Bad arity for type constant '%a',@ which was provided arguments:@ [@[<hv>%a;@ %a@]]@]" - Dolmen.Std.Expr.Print.term_const c - (Format.pp_print_list ~pp_sep Dolmen.Std.Expr.Ty.print) tys - (Format.pp_print_list ~pp_sep Dolmen.Std.Expr.Term.print) ts - | Dolmen.Std.Expr.Type_already_defined c -> - mk_error st "@[<hv>Internal error: Type constant '%a' was already defined earlier,@ cannot re-define it.@]" - Dolmen.Std.Expr.Print.id c - - | Dolmen.Std.Expr.Term.Wrong_type (t, ty) -> - mk_error st "@[<hv>Internal error: A term of type@ %a@ was expected but instead got a term of type@ %a@]" - Dolmen.Std.Expr.Ty.print ty Dolmen.Std.Expr.Ty.print (Dolmen.Std.Expr.Term.ty t) - - (* File format auto-detect *) - | Dolmen_loop.Logic.Extension_not_found ext -> - mk_error st "@[<hv>The following extension was not recognized: '%s'.@ %s" ext - "Please use a recognised extension or specify an input language on the command line" - - (* Generic catch-all *) - | e -> mk_error st "@[<hv>Unhandled exception:@ %s@]" (Printexc.to_string e) + let input_to_string = function + | `Stdin -> "<stdin>" + | `Raw _ -> "<raw>" + | `File f -> f + + let mk_error ?loc _ error v = + let loc = Dolmen.Std.Misc.opt_map loc Dolmen.Std.Loc.full_loc in + Format.kasprintf + (fun s -> + let s = Str.global_replace (Str.regexp_string "\"") "\"\"" s in + s) + "Error @[<hov>%a%a@]@." State.pp_loc loc Dolmen_loop.Report.Error.print + (error, v) + + let exn st = function + (* Sigint, potentially wrapped by the typechecker *) + | Dolmen_loop.Pipeline.Sigint -> + Format.pp_print_flush Format.std_formatter (); + Format.pp_print_flush Format.err_formatter (); + mk_error st Dolmen_loop.Report.Error.user_interrupt () + (* Timeout, potentially wrapped by the typechecker *) + | Dolmen_loop.Pipeline.Out_of_time -> + Format.pp_print_flush Format.std_formatter (); + mk_error st Dolmen_loop.Report.Error.timeout () + | Dolmen_loop.Pipeline.Out_of_space -> + Format.pp_print_flush Format.std_formatter (); + Format.pp_print_flush Format.err_formatter (); + mk_error st Dolmen_loop.Report.Error.spaceout () + (* Internal Dolmen Expr errors *) + | Dolmen.Std.Expr.Ty.Bad_arity (c, l) -> + let pp_sep fmt () = Format.fprintf fmt ";@ " in + mk_error st Dolmen_loop.Report.Error.internal_error (fun fmt -> + Format.fprintf fmt + "@[<hv>Internal error: Bad arity for type constant '%a',@ which \ + was provided arguments:@ [@[<hv>%a@]]@]" + Dolmen.Std.Expr.Print.ty_cst c + (Format.pp_print_list ~pp_sep Dolmen.Std.Expr.Ty.print) + l) + | Dolmen.Std.Expr.Type_already_defined c -> + mk_error st Dolmen_loop.Report.Error.internal_error (fun fmt -> + Format.fprintf fmt + "@[<hv>Internal error: Type constant '%a' was already defined \ + earlier,@ cannot re-define it.@]" + Dolmen.Std.Expr.Print.id c) + | Dolmen.Std.Expr.Term.Wrong_type (t, ty) -> + mk_error st Dolmen_loop.Report.Error.internal_error (fun fmt -> + Format.fprintf fmt + "@[<hv>Internal error: A term of type@ %a@ was expected but \ + instead got a term of type@ %a@]" + Dolmen.Std.Expr.Ty.print ty Dolmen.Std.Expr.Ty.print + (Dolmen.Std.Expr.Term.ty t)) + (* Generic catch-all *) + | exn -> + let bt = Printexc.get_raw_backtrace () in + mk_error st Dolmen_loop.Report.Error.uncaught_exn (exn, bt) end type env_pre = { - mutable state: State.t; - read: (State.t -> Dolmen.Std.Statement.t option); - mutable expand_stack: Dolmen.Std.Statement.t Gen.gen list; - mutable next_statement: Typer.typechecked Typer.stmt option option; + mutable state : State.t; + read : State.t -> State.t * Dolmen.Std.Statement.t option; + mutable expand_stack : + (State.t -> State.t * Dolmen.Std.Statement.t option) list; + mutable next_statement : Typer.typechecked Typer.stmt option option; } -type env = - | InitError of string - | Env of env_pre +type env = InitError of string | Env of env_pre let create_env input_source = - Dolmen.Std.Loc.reset_files (); - let input_source = match input_source with - | "-" -> `Stdin - | s -> `File s + let input_source = match input_source with "-" -> `Stdin | s -> `File s in + let st : State.t = + { + time_limit = 300.; + size_limit = 1_000_000_000.; + max_warn = max_int; + cur_warn = 0; + reports = + Dolmen_loop.Report.Conf.mk + ~default:Dolmen_loop.Report.Warning.Status.Disabled; + input_dir = ""; + input_lang = Some (Smtlib2 `V2_6); + input_mode = Some `Full; + input_source; + type_state = Dolmen_loop.Typer.new_state (); + type_check = true; + header_check = true; + header_licenses = []; + header_lang_version = None; + header_state = Dolmen_loop.Headers.empty; + input_file_loc = Dolmen.Std.Loc.mk_file ""; + solve_state = { warns = []; error = `No_error }; + export_lang = []; + debug = false; + loc_style = `Short; + } in - let st : State.t = { - time_limit = 300.; size_limit = 1_000_000_000.; - - max_warn = max_int; cur_warn = 0; - - input_dir=""; input_lang = Some (Smtlib2 `V2_6); - input_mode = Some `Full; input_source; - - type_state = Dolmen_loop.Typer.new_state (); - type_check = true; type_strict = false; - - header_check = true; header_licenses = []; header_lang_version = None; - header_state = Dolmen_loop.Headers.empty; - - input_file_loc = Dolmen.Std.Loc.mk_file ""; - - solve_state = { warns = []; error = `No_error }; - - export_lang = []; - debug = false; - context = false; - } in - try - let st, g = - Parser.parse [] st - in - Env { state = st; read = g; expand_stack = []; next_statement = None } - with exn -> - InitError (Errors.exn st exn) + try + let st, g = Parser.parse [] st in + Env { state = st; read = g; expand_stack = []; next_statement = None } + with exn -> InitError (Errors.exn st exn) let update_env env = let rec nextstmt () = match env.expand_stack with - | [] -> begin match env.read env.state with - | None -> None - | Some stmt -> expand stmt - end - | g::l -> - begin match Gen.get g with - | None -> env.expand_stack <- l; nextstmt () - | Some stmt -> expand stmt - end + | [] -> ( + match env.read env.state with + | state, None -> + env.state <- state; + None + | state, Some stmt -> + env.state <- state; + expand stmt) + | g :: l -> ( + match g env.state with + | state, None -> + env.state <- state; + env.expand_stack <- l; + nextstmt () + | state, Some stmt -> + env.state <- state; + expand stmt) and expand stmt = let st, gen = Parser.expand env.state stmt in env.state <- st; match gen with | `Ok -> Some stmt - | `Gen (_,gen) -> - env.expand_stack <- gen::env.expand_stack; - nextstmt () + | `Gen (_, gen) -> + env.expand_stack <- gen :: env.expand_stack; + nextstmt () in let rec nextexpr () = match nextstmt () with | None -> env.next_statement <- Some None - | Some stmt -> - begin match Typer.typecheck env.state stmt with + | Some stmt -> ( + match Typer.typecheck env.state stmt with | st, `Continue stmt -> - env.state <- st; - env.next_statement <- Some (Some stmt) + env.state <- st; + env.next_statement <- Some (Some stmt) | st, `Done () -> - env.state <- st; - nextexpr () - end + env.state <- st; + nextexpr ()) in nextexpr () let print_typed d = - match d.contents with - | `Type_def _ -> "Type_def" - | `Goal g -> Format.asprintf "Goal %a" Dolmen.Std.Expr.Print.term g - | `Hyp g -> Format.asprintf "Hyp %a" Dolmen.Std.Expr.Print.term g - | `Clause l -> - String.concat ";" (List.map (Format.asprintf "Clause %a" Dolmen.Std.Expr.Print.term) l) - | `Term_def (_,_,_,t) -> Format.asprintf "Term_def %a" Dolmen.Std.Expr.Print.term t - | `Solve [] -> Format.asprintf "Solve []" - | `Solve _ -> Format.asprintf "Solve ..." - | `Executed -> Format.asprintf "Executed (What is this?)" - | `Decls l -> + match d.contents with + | `Type_def _ -> "Type_def" + | `Goal g -> Format.asprintf "Goal %a" Dolmen.Std.Expr.Print.term g + | `Hyp g -> Format.asprintf "Hyp %a" Dolmen.Std.Expr.Print.term g + | `Clause l -> String.concat ";" - (List.map (function - | `Type_decl ty -> Format.asprintf "type_decl %a" Dolmen.Std.Expr.Print.ty_const ty - | `Term_decl t -> Format.asprintf "term_decl %a" Dolmen.Std.Expr.Print.term_const t) l - ) + (List.map (Format.asprintf "Clause %a" Dolmen.Std.Expr.Print.term) l) + | `Term_def (_, _, _, t) -> + Format.asprintf "Term_def %a" Dolmen.Std.Expr.Print.term t + | `Solve [] -> Format.asprintf "Solve []" + | `Solve _ -> Format.asprintf "Solve ..." + | `Executed -> Format.asprintf "Executed (What is this?)" + | `Decls l -> + String.concat ";" + (List.map + (function + | `Type_decl ty -> + Format.asprintf "type_decl %a" Dolmen.Std.Expr.Print.ty_cst ty + | `Term_decl t -> + Format.asprintf "term_decl %a" Dolmen.Std.Expr.Print.term_cst t) + l) module To_Eclipse = struct - - let mk id args = Ocaml_eclipse.Constr(String.lowercase_ascii id, args) - let atom id = Ocaml_eclipse.Constr(String.lowercase_ascii id, [||]) + let mk id args = Ocaml_eclipse.Constr (String.lowercase_ascii id, args) + let atom id = Ocaml_eclipse.Constr (String.lowercase_ascii id, [||]) let string s = Ocaml_eclipse.String s let int32 i = Ocaml_eclipse.Int32 i let int i = Ocaml_eclipse.Int32 (Int32.of_int i) let bool b = if b then Ocaml_eclipse.Int32 1l else Ocaml_eclipse.Int32 0l - let index (i:Dolmen.Std.Expr.index) = int (i:>int) + let index (i : Dolmen.Std.Expr.index) = int (i :> int) let list conv l = Ocaml_eclipse.List (Array.of_list (List.map conv l)) let array conv l = Ocaml_eclipse.List (Array.map conv l) - let rec builtin (x:Dolmen.Std.Expr.builtin) = - let open Dolmen.Std.Expr in + let rec builtin (x : Dolmen.Std.Expr.builtin) = + let open Dolmen.Std.Builtin in match x with | Base -> atom "base" - | Wildcard -> atom "wildcard" + | Wildcard _ -> atom "wildcard" | Prop -> atom "prop" | Unit -> atom "unit" | Univ -> atom "univ" @@ -465,10 +503,10 @@ module To_Eclipse = struct | Imply -> atom "imply" | Equiv -> atom "equiv" | Ite -> atom "ite" - | Tester {cstr} -> mk "tester" [| id cstr |] - | Constructor {adt;case} -> mk "constructor" [| id adt; int case |] - | Destructor {adt;cstr;case;field} -> - mk "destructor" [| id adt; id cstr; int case; int field |] + | Tester { cstr; adt = _; case = _ } -> mk "tester" [| id cstr |] + | Constructor { adt; case } -> mk "constructor" [| id adt; int case |] + | Destructor { adt; cstr; case; field } -> + mk "destructor" [| id adt; id cstr; int case; int field |] | Int -> atom "int" | Integer s -> mk "integer" [| string s |] | Rat -> atom "rat" @@ -504,7 +542,7 @@ module To_Eclipse = struct | Bitv i -> mk "bitv" [| int i |] | Bitvec s -> mk "bitvec" [| string s |] | Bitv_concat -> atom "bitv_concat" - | Bitv_extract (i1,i2) -> mk "bitv_extract" [| int i1 ; int i2 |] + | Bitv_extract (i1, i2) -> mk "bitv_extract" [| int i1; int i2 |] | Bitv_repeat -> atom "bitv_repeat" | Bitv_zero_extend -> atom "bitv_zero_extend" | Bitv_sign_extend -> atom "bitv_sign_extend" @@ -544,45 +582,46 @@ module To_Eclipse = struct | RoundTowardPositive -> atom "roundtowardpositive" | RoundTowardNegative -> atom "roundtowardnegative" | RoundTowardZero -> atom "roundtowardzero" - | Float (e,s) -> mk "float" [| int e; int s |] - | Fp (e,s) -> mk "fp" [| int e; int s |] - | Plus_infinity (e,s) -> mk "plus_infinity" [| int e; int s |] - | Minus_infinity (e,s) -> mk "minus_infinity" [| int e; int s |] - | Plus_zero (e,s) -> mk "plus_zero" [| int e; int s |] - | Minus_zero (e,s) -> mk "minus_zero" [| int e; int s |] - | NaN (e,s) -> mk "nan" [| int e; int s |] - | Fp_abs (e,s) -> mk "fp_abs" [| int e; int s |] - | Fp_neg (e,s) -> mk "fp_neg" [| int e; int s |] - | Fp_add (e,s) -> mk "fp_add" [| int e; int s |] - | Fp_sub (e,s) -> mk "fp_sub" [| int e; int s |] - | Fp_mul (e,s) -> mk "fp_mul" [| int e; int s |] - | Fp_div (e,s) -> mk "fp_div" [| int e; int s |] - | Fp_fma (e,s) -> mk "fp_fma" [| int e; int s |] - | Fp_sqrt (e,s) -> mk "fp_sqrt" [| int e; int s |] - | Fp_rem (e,s) -> mk "fp_rem" [| int e; int s |] - | Fp_roundToIntegral (e,s) -> mk "fp_roundtointegral" [| int e; int s |] - | Fp_min (e,s) -> mk "fp_min" [| int e; int s |] - | Fp_max (e,s) -> mk "fp_max" [| int e; int s |] - | Fp_leq (e,s) -> mk "fp_leq" [| int e; int s |] - | Fp_lt (e,s) -> mk "fp_lt" [| int e; int s |] - | Fp_geq (e,s) -> mk "fp_geq" [| int e; int s |] - | Fp_gt (e,s) -> mk "fp_gt" [| int e; int s |] - | Fp_eq (e,s) -> mk "fp_eq" [| int e; int s |] - | Fp_isNormal (e,s) -> mk "fp_isnormal" [| int e; int s |] - | Fp_isSubnormal (e,s) -> mk "fp_issubnormal" [| int e; int s |] - | Fp_isZero (e,s) -> mk "fp_iszero" [| int e; int s |] - | Fp_isInfinite (e,s) -> mk "fp_isinfinite" [| int e; int s |] - | Fp_isNaN (e,s) -> mk "fp_isnan" [| int e; int s |] - | Fp_isNegative (e,s) -> mk "fp_isnegative" [| int e; int s |] - | Fp_isPositive (e,s) -> mk "fp_ispositive" [| int e; int s |] - | Ieee_format_to_fp (e,s) -> mk "ieee_format_to_fp" [| int e; int s |] - | Fp_to_fp (e1,s1,e2,s2) -> mk "fp_to_fp" [| int e1; int s1; int e2; int s2 |] - | Real_to_fp (e,s) -> mk "real_to_fp" [| int e; int s |] - | Sbv_to_fp (bv,e,s) -> mk "sbv_to_fp" [| int bv; int e; int s |] - | Ubv_to_fp (bv,e,s) -> mk "ubv_to_fp" [| int bv; int e; int s |] - | To_ubv (e,s,bv) -> mk "to_ubv" [| int e; int s; int bv |] - | To_sbv (e,s,bv) -> mk "to_sbv" [| int e; int s; int bv |] - | To_real (e,s) -> mk "to_real" [| int e; int s |] + | Float (e, s) -> mk "float" [| int e; int s |] + | Fp (e, s) -> mk "fp" [| int e; int s |] + | Plus_infinity (e, s) -> mk "plus_infinity" [| int e; int s |] + | Minus_infinity (e, s) -> mk "minus_infinity" [| int e; int s |] + | Plus_zero (e, s) -> mk "plus_zero" [| int e; int s |] + | Minus_zero (e, s) -> mk "minus_zero" [| int e; int s |] + | NaN (e, s) -> mk "nan" [| int e; int s |] + | Fp_abs (e, s) -> mk "fp_abs" [| int e; int s |] + | Fp_neg (e, s) -> mk "fp_neg" [| int e; int s |] + | Fp_add (e, s) -> mk "fp_add" [| int e; int s |] + | Fp_sub (e, s) -> mk "fp_sub" [| int e; int s |] + | Fp_mul (e, s) -> mk "fp_mul" [| int e; int s |] + | Fp_div (e, s) -> mk "fp_div" [| int e; int s |] + | Fp_fma (e, s) -> mk "fp_fma" [| int e; int s |] + | Fp_sqrt (e, s) -> mk "fp_sqrt" [| int e; int s |] + | Fp_rem (e, s) -> mk "fp_rem" [| int e; int s |] + | Fp_roundToIntegral (e, s) -> mk "fp_roundtointegral" [| int e; int s |] + | Fp_min (e, s) -> mk "fp_min" [| int e; int s |] + | Fp_max (e, s) -> mk "fp_max" [| int e; int s |] + | Fp_leq (e, s) -> mk "fp_leq" [| int e; int s |] + | Fp_lt (e, s) -> mk "fp_lt" [| int e; int s |] + | Fp_geq (e, s) -> mk "fp_geq" [| int e; int s |] + | Fp_gt (e, s) -> mk "fp_gt" [| int e; int s |] + | Fp_eq (e, s) -> mk "fp_eq" [| int e; int s |] + | Fp_isNormal (e, s) -> mk "fp_isnormal" [| int e; int s |] + | Fp_isSubnormal (e, s) -> mk "fp_issubnormal" [| int e; int s |] + | Fp_isZero (e, s) -> mk "fp_iszero" [| int e; int s |] + | Fp_isInfinite (e, s) -> mk "fp_isinfinite" [| int e; int s |] + | Fp_isNaN (e, s) -> mk "fp_isnan" [| int e; int s |] + | Fp_isNegative (e, s) -> mk "fp_isnegative" [| int e; int s |] + | Fp_isPositive (e, s) -> mk "fp_ispositive" [| int e; int s |] + | Ieee_format_to_fp (e, s) -> mk "ieee_format_to_fp" [| int e; int s |] + | Fp_to_fp (e1, s1, e2, s2) -> + mk "fp_to_fp" [| int e1; int s1; int e2; int s2 |] + | Real_to_fp (e, s) -> mk "real_to_fp" [| int e; int s |] + | Sbv_to_fp (bv, e, s) -> mk "sbv_to_fp" [| int bv; int e; int s |] + | Ubv_to_fp (bv, e, s) -> mk "ubv_to_fp" [| int bv; int e; int s |] + | To_ubv (e, s, bv) -> mk "to_ubv" [| int e; int s; int bv |] + | To_sbv (e, s, bv) -> mk "to_sbv" [| int e; int s; int bv |] + | To_real (e, s) -> mk "to_real" [| int e; int s |] | String -> atom "String" | Str s -> mk "str" [| string s |] | Str_length -> atom "Str_length" @@ -620,54 +659,78 @@ module To_Eclipse = struct | Re_diff -> atom "Re_diff" | Re_option -> atom "Re_option" | Re_power i -> mk "Re_power" [| int i |] - | Re_loop (i,j) -> mk "Re_loop" [| int i; int j |] - | Colibri_Builtins.Colibri_builtin (s,inst) -> - mk "colibri_builtin" [| string s ; List (Array.map int inst) |] + | Re_loop (i, j) -> mk "Re_loop" [| int i; int j |] + | Colibri_Builtins.Colibri_builtin (s, inst) -> + mk "colibri_builtin" [| string s; List (Array.map int inst) |] | _ -> atom "unknown_should_not_append" - and id : type a. a Dolmen.Std.Expr.id -> Ocaml_eclipse.eclipse = fun id -> - mk "id" [| string id.name; index id.index; builtin id.builtin |] - - let rec ty (t:Dolmen.Std.Expr.ty) = - match t.descr with - | Var i -> mk "Var" [| id i |] - | App (i, l) -> mk "App" [| id i; list ty l |] - - let number_of_args (ft:(unit, Dolmen.Std.Expr.ttype) Dolmen.Std.Expr.function_type) : Ocaml_eclipse.eclipse = - int (List.length ft.Dolmen.Std.Expr.fun_args) - - let function_type (ft:(Dolmen.Std.Expr.ttype, Dolmen.Std.Expr.ty) Dolmen.Std.Expr.function_type) : Ocaml_eclipse.eclipse = - mk "fun" [| list id ft.Dolmen.Std.Expr.fun_vars ; - list ty ft.Dolmen.Std.Expr.fun_args ; - ty ft.Dolmen.Std.Expr.fun_ret |] - - let formal (v:Dolmen.Std.Expr.ty Dolmen.Std.Expr.id) = - mk "formal" [| id v; ty v.ty |] - - let rec term (t:Dolmen.Std.Expr.term) = - let descr = match t.descr with - | Var i -> mk "Var" [| id i|] - | App (i,tyl, args) -> - mk "App" [| id i; list ty tyl; list term args |] - | Binder (b,t) -> - mk "Binder" [| binder b; term t |] - | Match (t,patterns) -> - mk "Match" [| term t; - list (fun (pat,body) -> mk "Pat" [| term pat; term body |]) patterns |] + and id : type a. a Dolmen.Std.Expr.id -> Ocaml_eclipse.eclipse = + fun id -> + let name = + match id.path with Local { name } | Absolute { name; _ } -> name in - mk "term" [| ty t.ty; descr |] + mk "id" [| string name; index id.index; builtin id.builtin |] + + let rec ty (t : Dolmen.Std.Expr.ty) = + let t = Dolmen.Std.Expr.Ty.expand_head t in + match t.ty_descr with + | TyVar i -> mk "Var" [| id i |] + | TyApp (i, l) -> mk "App" [| id i; list ty l |] + | Arrow _ -> invalid_arg "Arrow found, please report" + | Pi _ -> invalid_arg "Pi found, please report" + + let number_of_args (ft : Dolmen_std.Expr.type_fun) : Ocaml_eclipse.eclipse = + int ft.arity + + let function_type ft : Ocaml_eclipse.eclipse = + let vars, args, ret = Dolmen_std.Expr.Ty.poly_sig ft in + mk "fun" [| list id vars; list ty args; ty ret |] + + let formal (v : Dolmen.Std.Expr.ty Dolmen.Std.Expr.id) = + mk "formal" [| id v; ty v.id_ty |] + + let rec term (t : Dolmen.Std.Expr.term) = + let descr = + match t.term_descr with + | Var i -> mk "Var" [| id i |] + | Cst cst -> + mk "App" + [| id cst; Ocaml_eclipse.List [||]; Ocaml_eclipse.List [||] |] + | App ({ term_descr = Cst cst; _ }, tyl, args) -> + mk "App" [| id cst; list ty tyl; list term args |] + | Binder (Let_seq l, t) -> + List.fold_right + (fun b acc -> mk "Binder" [| let_par [ b ]; acc |]) + l (term t) + | Binder (b, t) -> mk "Binder" [| binder b; term t |] + | Match (t, patterns) -> + mk "Match" + [| + term t; + list + (fun (pat, body) -> mk "Pat" [| term pat; term body |]) + patterns; + |] + | _ -> invalid_arg "Higher order term" + in + mk "term" [| ty t.term_ty; descr |] + + and let_par l = + mk "letin" [| list (fun (v, t) -> List [| id v; term t |]) l |] and binder = function - | Dolmen.Std.Expr.Exists (ty_vars,term_vars) -> - mk "exists" [| list id ty_vars; list formal term_vars |] - | Dolmen.Std.Expr.Forall (ty_vars,term_vars) -> - mk "forall" [| list id ty_vars; list formal term_vars |] - | Letin l -> - mk "letin" [| list (fun (v, t) -> List [| id v; term t|]) l |] - - let rec ast (t: Dolmen.Std.Term.t) = + | Dolmen.Std.Expr.Exists (ty_vars, term_vars) -> + mk "exists" [| list id ty_vars; list formal term_vars |] + | Dolmen.Std.Expr.Forall (ty_vars, term_vars) -> + mk "forall" [| list id ty_vars; list formal term_vars |] + | Let_par l -> let_par l + | Let_seq _ -> assert false + | Lambda _ -> invalid_arg "forall present" + + let rec ast (t : Dolmen.Std.Term.t) = match t.term with - | Symbol id -> mk "symbol" [| string id.Dolmen.Std.Id.name |] + | Symbol { name = Simple s; _ } -> mk "symbol" [| string s |] + | Symbol _ -> invalid_arg "unknown symbol" | Builtin Wildcard -> atom "wildcard" | Builtin Ttype -> atom "ttype" | Builtin Unit -> atom "unit" @@ -707,7 +770,7 @@ module To_Eclipse = struct | Builtin Implied -> atom "implied" | Builtin Equiv -> atom "equiv" | Builtin (Bitv i) -> mk "bitv" [| int i |] - | Builtin (Bitv_extract (i,j)) -> mk "bitv_extract" [| int i; int j |] + | Builtin (Bitv_extract (i, j)) -> mk "bitv_extract" [| int i; int j |] | Builtin Bitv_concat -> atom "bitv_concat" | Builtin Array_get -> atom "array_get" | Builtin Array_set -> atom "array_set" @@ -717,170 +780,214 @@ module To_Eclipse = struct | Builtin Record_with -> atom "record_with" | Builtin Record_access -> atom "record_access" | Builtin Maps_to -> atom "maps_to" - | Builtin (In_interval (b1,b2)) -> mk "in_interval" [| bool b1; bool b2 |] + | Builtin (In_interval (b1, b2)) -> mk "in_interval" [| bool b1; bool b2 |] | Builtin Check -> atom "check" | Builtin Cut -> atom "cut" - | Colon (t1,t2) -> mk "colon" [| ast t1; ast t2 |] - | App (f,l) -> mk "app" [| ast f; list ast l |] - | Binder _ - | Match _ -> atom "not_translated_to_eclipse" - + | Colon (t1, t2) -> mk "colon" [| ast t1; ast t2 |] + | App (f, l) -> mk "app" [| ast f; list ast l |] + | Binder _ | Match _ | Builtin _ -> atom "not_translated_to_eclipse" let stmt = function | None -> mk "end" [||] - | Some (d:Typer.typechecked Typer.stmt) -> - match d.Typer.contents with - | `Defs l -> - mk "Def" [| list (function - | `Type_def (_,_,t) -> mk "Type_def" [| ty t |] - | `Term_def (_,fn, ty_vars,vars,t) -> mk "Term_def" [| id fn; function_type fn.ty; list id ty_vars; list formal vars; list id ty_vars; term t |] - ) l |] - | `Goal g -> mk "Goal" [| term g |] - | `Hyp g -> mk "Hyp" [| term g |] - | `Clause l -> mk "Clause" [| list term l |] - | `Solve l -> mk "Solve" [| list term l |] - | `Decls l -> - mk "Decl" [| list (function - | `Type_decl t -> begin - match Dolmen.Std.Expr.Ty.definition t with - | None | Some Abstract -> - mk "type_decl" [| id t; number_of_args t.ty |] - | Some (Adt {ty=_;record=_;cases}) -> - let conv_dstr = function - | None -> atom "nodestructor" - | Some dstr -> mk "destructor" [| id dstr |] - in - let conv_cases {Dolmen.Std.Expr.Ty.cstr; tester; dstrs} = - mk "case" [| id cstr; id tester; array conv_dstr dstrs |] - in - mk "adt_def" [| id t; number_of_args t.ty; array conv_cases cases |] - end - | `Term_decl t -> mk "term_decl" [| id t; function_type t.ty |]) l|] - | `Reset -> mk "reset" [| |] - | `Get_unsat_core -> mk "get_unsat_core" [| |] - | `Plain t -> mk "plain" [| ast t |] - | `Get_assignment -> mk "get_assignment" [| |] - | `Get_unsat_assumptions -> mk "get_unsat_assumptions" [| |] - | `Push i -> mk "push" [| int i |] - | `Get_info s -> mk "get_info" [| string s |] - | `Get_proof -> mk "get_proof" [| |] - | `Get_assertions -> mk "get_assertions" [| |] - | `Pop i -> mk "pop" [| int i |] - | `Set_logic s -> mk "set_logic" [| string s |] - | `Set_info t -> mk "set_info" [| ast t |] - | `Get_value l -> mk "get_value" [| list term l |] - | `Get_option s -> mk "get_option" [| string s |] - | `Get_model -> mk "get_model" [| |] - | `Exit -> mk "exit" [| |] - | `Reset_assertions -> mk "reset_assertions" [| |] - | `Set_option t -> mk "set_option" [| ast t |] - | `Echo s -> mk "echo" [| string s |] - - + | Some (d : Typer.typechecked Typer.stmt) -> ( + match d.Typer.contents with + | `Defs l -> + mk "Def" + [| + list + (function + | `Type_def (_, _, _, t) -> mk "Type_def" [| ty t |] + | `Term_def (_, fn, ty_vars, vars, t) -> + mk "Term_def" + [| + id fn; + function_type fn.id_ty; + list id ty_vars; + list formal vars; + list id ty_vars; + term t; + |]) + l; + |] + | `Goal g -> mk "Goal" [| term g |] + | `Hyp g -> mk "Hyp" [| term g |] + | `Clause l -> mk "Clause" [| list term l |] + | `Solve l -> mk "Solve" [| list term l |] + | `Decls l -> + mk "Decl" + [| + list + (function + | `Type_decl t -> ( + match Dolmen.Std.Expr.Ty.definition t with + | None | Some Abstract -> + mk "type_decl" [| id t; number_of_args t.id_ty |] + | Some (Adt { ty = _; record = _; cases }) -> + let conv_dstr = function + | None -> atom "nodestructor" + | Some dstr -> mk "destructor" [| id dstr |] + in + let conv_cases + { Dolmen.Std.Expr.Ty.cstr; tester; dstrs } = + mk "case" + [| id cstr; id tester; array conv_dstr dstrs |] + in + mk "adt_def" + [| + id t; + number_of_args t.id_ty; + array conv_cases cases; + |]) + | `Term_decl t -> + mk "term_decl" [| id t; function_type t.id_ty |]) + l; + |] + | `Reset -> mk "reset" [||] + | `Get_unsat_core -> mk "get_unsat_core" [||] + | `Plain t -> mk "plain" [| ast t |] + | `Get_assignment -> mk "get_assignment" [||] + | `Get_unsat_assumptions -> mk "get_unsat_assumptions" [||] + | `Push i -> mk "push" [| int i |] + | `Get_info s -> mk "get_info" [| string s |] + | `Get_proof -> mk "get_proof" [||] + | `Get_assertions -> mk "get_assertions" [||] + | `Pop i -> mk "pop" [| int i |] + | `Set_logic s -> mk "set_logic" [| string s |] + | `Set_info t -> mk "set_info" [| ast t |] + | `Get_value l -> mk "get_value" [| list term l |] + | `Get_option s -> mk "get_option" [| string s |] + | `Get_model -> mk "get_model" [||] + | `Exit -> mk "exit" [||] + | `Reset_assertions -> mk "reset_assertions" [||] + | `Set_option t -> mk "set_option" [| ast t |] + | `Echo s -> mk "echo" [| string s |]) end let next = function | InitError eclipse -> - To_Eclipse.mk "InitError" [| To_Eclipse.string eclipse |] + To_Eclipse.mk "InitError" [| To_Eclipse.string eclipse |] | Env env -> - let rec aux () = - match env.state.solve_state.warns, env.state.solve_state.error, env.next_statement with - | [], `No_error, None -> begin - try - update_env env; - aux () - with exn -> - let s = Errors.exn env.state exn in - env.state <- { env.state with solve_state = { env.state.solve_state with error = `Error s } }; - aux () - end - | [], `Error s, _ -> - To_Eclipse.mk "Error" [| To_Eclipse.string s |] - | [], `No_error, Some stmt -> - env.next_statement <- None; - To_Eclipse.stmt stmt - | w::l, _, _ -> - env.state <- { env.state with solve_state = { env.state.solve_state with warns = l } }; - To_Eclipse.mk "Warn" [| To_Eclipse.string w |] - in - aux () + let rec aux () = + match + ( env.state.solve_state.warns, + env.state.solve_state.error, + env.next_statement ) + with + | [], `No_error, None -> ( + try + update_env env; + aux () + with exn -> + let s = Errors.exn env.state exn in + env.state <- + { + env.state with + solve_state = { env.state.solve_state with error = `Error s }; + }; + aux ()) + | [], `Error s, _ -> To_Eclipse.mk "Error" [| To_Eclipse.string s |] + | [], `No_error, Some stmt -> + env.next_statement <- None; + To_Eclipse.stmt stmt + | w :: l, _, _ -> + env.state <- + { + env.state with + solve_state = { env.state.solve_state with warns = l }; + }; + To_Eclipse.mk "Warn" [| To_Eclipse.string w |] + in + aux () exception Add_builtins_wrong_parameter of string let wrong_param s c = - raise (Add_builtins_wrong_parameter - (Format.asprintf "Wrong parameter for %s: %a" s Ocaml_eclipse.pp_eclipse c)) + raise + (Add_builtins_wrong_parameter + (Format.asprintf "Wrong parameter for %s: %a" s Ocaml_eclipse.pp_eclipse + c)) - -let add_builtin (c:Ocaml_eclipse.eclipse) = +let add_builtin (c : Ocaml_eclipse.eclipse) = let open Ocaml_eclipse in let find h c = match c with - | Constr (s, [| |]) -> begin - match Hashtbl.find_opt h s with - | None -> wrong_param (Format.asprintf "param not found %s" s) c - | Some e -> e - end + | Constr (s, [||]) -> ( + match Hashtbl.find_opt h s with + | None -> wrong_param (Format.asprintf "param not found %s" s) c + | Some e -> e) | _ -> wrong_param "finding param" c in let rec convert_offset param_args = function | Int32 i -> Colibri_Builtins.Const (Int32.to_int i) - | Constr (_,[| |]) as c -> Colibri_Builtins.Offset (find param_args c) + | Constr (_, [||]) as c -> Colibri_Builtins.Offset (find param_args c) | Constr ("plus", a) -> - let l = Array.to_list (Array.map (convert_offset param_args) a) in - Colibri_Builtins.Plus l - | Constr ("minus", [| a; b |] ) -> - Colibri_Builtins.Minus (convert_offset param_args a,convert_offset param_args b) + let l = Array.to_list (Array.map (convert_offset param_args) a) in + Colibri_Builtins.Plus l + | Constr ("minus", [| a; b |]) -> + Colibri_Builtins.Minus + (convert_offset param_args a, convert_offset param_args b) | c -> wrong_param "convert type" c in let convert_check param_args = function - | Constr("eq",[| a; b; String s |]) -> - Colibri_Builtins.Eq (convert_offset param_args a, convert_offset param_args b, s) + | Constr ("eq", [| a; b; String s |]) -> + Colibri_Builtins.Eq + (convert_offset param_args a, convert_offset param_args b, s) | c -> wrong_param "convert check" c in let rec convert_type convert_offset type_args param_args = function - | Constr ("int",[| |]) -> Colibri_Builtins.Int - | Constr ("real",[| |]) -> Colibri_Builtins.Real - | Constr ("bool",[| |]) -> Colibri_Builtins.Bool - | Constr ("array",[| a; b|]) -> - Colibri_Builtins.Array - (convert_type convert_offset type_args param_args a, - convert_type convert_offset type_args param_args b) - | Constr ("bitv",[| a|]) -> - Colibri_Builtins.Bitv (convert_offset param_args a) - | Constr ("fp",[| a; b|]) -> - Colibri_Builtins.Fp (convert_offset param_args a,convert_offset param_args b) - | Constr (_,[| |]) as c -> Colibri_Builtins.Var (find type_args c) + | Constr ("int", [||]) -> Colibri_Builtins.Int + | Constr ("real", [||]) -> Colibri_Builtins.Real + | Constr ("bool", [||]) -> Colibri_Builtins.Bool + | Constr ("array", [| a; b |]) -> + Colibri_Builtins.Array + ( convert_type convert_offset type_args param_args a, + convert_type convert_offset type_args param_args b ) + | Constr ("bitv", [| a |]) -> + Colibri_Builtins.Bitv (convert_offset param_args a) + | Constr ("fp", [| a; b |]) -> + Colibri_Builtins.Fp + (convert_offset param_args a, convert_offset param_args b) + | Constr (_, [||]) as c -> Colibri_Builtins.Var (find type_args c) | c -> wrong_param "convert type" c in match c with - | Constr ("builtin",[|String name; List poly; List param_explicit; List param_implicit; List checks; List args; res |]) -> - let type_args = Hashtbl.create 5 in - let convert_type_arg = function - | Constr (s,[| |]) -> - let v = (Dolmen.Std.Expr.Ty.Var.mk s) in - Hashtbl.add type_args s v; v - | c -> wrong_param "convert type arg" c - in - let param_args = Hashtbl.create 5 in - let add_param_arg = function - | Constr (s,[| |]) -> Hashtbl.add param_args s (Hashtbl.length param_args) - | c -> wrong_param "convert type arg" c - in - let poly = Array.to_list (Array.map convert_type_arg poly) in - Array.iter add_param_arg param_explicit; - Array.iter add_param_arg param_implicit; - Colibri_Builtins.register name - poly - (Array.length param_explicit) - (Array.length param_implicit) - (List.map (convert_type find type_args param_args) (Array.to_list args)) - (convert_type convert_offset type_args param_args res) - (List.map (convert_check param_args) (Array.to_list checks)) + | Constr + ( "builtin", + [| + String name; + List poly; + List param_explicit; + List param_implicit; + List checks; + List args; + res; + |] ) -> + let type_args = Hashtbl.create 5 in + let convert_type_arg = function + | Constr (s, [||]) -> + let v = Dolmen.Std.Expr.Ty.Var.mk s in + Hashtbl.add type_args s v; + v + | c -> wrong_param "convert type arg" c + in + let param_args = Hashtbl.create 5 in + let add_param_arg = function + | Constr (s, [||]) -> + Hashtbl.add param_args s (Hashtbl.length param_args) + | c -> wrong_param "convert type arg" c + in + let poly = Array.to_list (Array.map convert_type_arg poly) in + Array.iter add_param_arg param_explicit; + Array.iter add_param_arg param_implicit; + Colibri_Builtins.register name poly + (Array.length param_explicit) + (Array.length param_implicit) + (List.map (convert_type find type_args param_args) (Array.to_list args)) + (convert_type convert_offset type_args param_args res) + (List.map (convert_check param_args) (Array.to_list checks)) | c -> wrong_param "builtin" c -let add_builtins (c:Ocaml_eclipse.eclipse) = +let add_builtins (c : Ocaml_eclipse.eclipse) = match c with - | List l -> - Array.iter add_builtin l + | List l -> Array.iter add_builtin l | c -> wrong_param "list of builtins" c diff --git a/Src/COLIBRI/smt_import.pl b/Src/COLIBRI/smt_import.pl index 1cbeef5348cd04808ebb5e4c732bf2cd8783d233..139904d369d7e4c7b9a9c15526d080bf0c1c37f6 100644 --- a/Src/COLIBRI/smt_import.pl +++ b/Src/COLIBRI/smt_import.pl @@ -11,6 +11,7 @@ % pour activer le scrambler en mode test :- setval(scrambler,0)@eclipse. +:- set_stream_property(error,flush,end_of_line). :- include([new_parser_builtins]). @@ -357,7 +358,7 @@ chainable_op(Op) :- 'fp.eq','fp.leq','fp.lt','fp.geq','fp.gt',bvand,bvor,bvadd,bvmul)). get_col_id('Ite',_,ite) :- !. -get_col_id('Equals',_,=) :- !. +get_col_id('eqs',_,=) :- !. get_col_id('Distinct',_,distinct) :- !. get_col_id('Neg',_,not) :- !. get_col_id('And',_,and) :- !. @@ -679,7 +680,8 @@ check_sat_vars0 :- ; spy_here, call(Goal)), Tag, - (spy_here, + (exit_block(Tag), + spy_here, call(Goal)))), setval(diag_code,(sat,1))@eclipse, % pas de residue,sinon wrong_sat @@ -1361,7 +1363,27 @@ init_binding :- setval(quantifier,0), setval(declared_funs,E-E). -define_smt_func(F,TypedArgs,Type,Expr) :- +define_smt_func(F,TypedArgs,Type,Expr0) :- + % on protege les occurences multiples des TypedArgs + % avec un let qui les renomme dans Expr + (foreach((FV,FType),TypedArgs), + fromto(NLetVars,ONL,INL,[]), + fromto(Expr0,IE,OE,Expr1) do + findall(PFV, + cgiveVarInstancePath(FV,IE,[],PFV), + PathsFV), + (PathsFV = [_,_|_] -> + ONL = [(LetV,FType,as(FV,FType))|INL], + (foreach(PathFV,PathsFV), + fromto(IE,IIE,OIE,OE), + param(LetV) do + creplace_at_path_in_term(PathFV,IIE,LetV,OIE)) + ; ONL = INL, + OE = IE)), + (NLetVars = [_|_] -> + Expr = let(NLetVars,Expr1) + ; Expr = Expr0), + length(TypedArgs,Ar), getval(defined_funcs,Hfuncs), % on extraie les LetVars pour pouvoir les filtrer @@ -1623,7 +1645,6 @@ get_let_var_type(Var,NVar,Type) :- getval(let_vars,HLV), hash_get(HLV,Var,(Type,NVar)). - unsupported_error(Com) :- concat_string(["(error \"Unsupported:",Com,"\")"],Err), writeln(error,Err), @@ -1918,8 +1939,8 @@ smt_interp0('define-fun'(F,TypedVars,Sort,Expr),Decl,bool) :- !, IExpr = isFinite(IArg) and neg(isSubnormal(IArg)) ; try_factorize_in_let(Expr,NExpr), smt_interp(NExpr,IExpr,Type))) - ; try_factorize_in_let(Expr,NExpr), - smt_interp(NExpr,IExpr,Type)), + ; %try_factorize_in_let(Expr,NExpr), + smt_interp(Expr,IExpr,Type)), setval(seen_expr,OSE), reset_let_vars, Decl = true, @@ -2409,14 +2430,16 @@ smt_interp0(colibri_abs_int(A),AIA,Type) :- !, smt_interp0(colibri_abs_real(A),IA,real) :- !, smt_interp(abs(A),IA0,real), add_as(real,IA0,IA). -smt_interp0(colibri_pow_real_int(A,B),IA ^ IB,real) :- !, +%smt_interp0(colibri_pow_real_int(A,B),IA ^ IB,real) :- !, +smt_interp0(colibri_pow_real_int(A,B),colibri_pow_real(IA,IB),real) :- !, smt_interp(A,IA0,real), add_as(real,IA0,IA), (getval(real_for_int,1)@eclipse -> smt_interp(B,IB0,real_int), IB = int_from_real(IB0) ; smt_interp(B,IB,int)). -smt_interp0(colibri_pow_int_int(A,B),IA ^ IB,Type) :- !, +%smt_interp0(colibri_pow_int_int(A,B),IA ^ IB,Type) :- !, +smt_interp0(colibri_pow_int_int(A,B),colibri_pow_int(IA,IB),Type) :- !, (getval(real_for_int,1)@eclipse -> Type = real_int, smt_interp(A,IA0,Type), @@ -3039,7 +3062,11 @@ get_decl_type_from_sort('Int',NVar,Decl,Type) :- !, Decl = real_vars(real_int,NVar) ; Decl = int_vars(int,NVar)). get_decl_type_from_sort('_'('BitVec',N),NVar,Decl,uint(N)) :- !, - Decl = int_vars(uint(N),NVar). + ((getval(unit_tests,1)@eclipse, + N > 256) -> + concat_string([" format (_ BitVec ",N,")"],Mess), + unsupported_error(Mess) + ; Decl = int_vars(uint(N),NVar)). get_decl_type_from_sort('Real',NVar,Decl,real) :- !, Decl = real_vars(real,NVar). get_decl_type_from_sort('_'('FloatingPoint',EB,SB),NVar,Decl,Type) :- !, @@ -3405,7 +3432,8 @@ smt_interp_file(File,NewGoals) :- check_inline_goal(G,OTail), block(smt_interp0(G,Goal,bool), Tag, - (call(spy_here)@eclipse, + (exit_block(Tag), + call(spy_here)@eclipse, smt_interp0(G,Goal,bool))), functor(Goal,FG,_), (FG == array_def -> diff --git a/Src/COLIBRI/solve.pl b/Src/COLIBRI/solve.pl index e13311ab23698b724b92c038aef61b9bf3004e2e..843934ab11f5368139ec3d216bbe15201528843a 100644 --- a/Src/COLIBRI/solve.pl +++ b/Src/COLIBRI/solve.pl @@ -350,8 +350,8 @@ array_vars(TypeI,TypeE0,[T|LT]) ?- !, (var(X) -> (get_variable_type(X,array(TypeI,TypeE)) -> true - ; add_typed_var(X,array(TypeI,TypeE)), - insert_dep_inst(inst_cstr(0,X))) + ; add_typed_var(X,array(TypeI,TypeE))) + %insert_dep_inst(inst_cstr(0,X))) ; true)). array_vars(TypeI,TypeE0,X) :- var(X), @@ -363,8 +363,8 @@ array_vars(TypeI,TypeE0,X) :- ; TypeE = TypeE0), (get_variable_type(X,array(TypeI,TypeE)) -> true - ; add_typed_var(X,array(TypeI,TypeE)), - insert_dep_inst(inst_cstr(0,X))). + ; add_typed_var(X,array(TypeI,TypeE))). + %insert_dep_inst(inst_cstr(0,X))). array_vars(_,_,_). int_vars(sort(S),T) ?- !, @@ -806,7 +806,9 @@ power_real0(Type,A,P,B) :- set_lazy_domain(int,P), mfd:dvar_remove_smaller(P,0), set_lazy_domain(Type,B), - same_float_int_number_status(Type,A,B), + (is_float_int_number(A) -> + launch_float_int_number(B) + ; true), (var(P) -> my_suspend(power_real0(Type,A,P,B),0,P->suspend:inst) ; power_real(Type,A,P,B)). @@ -895,8 +897,10 @@ unfold_int_expr(store(EA,EI,EE), _D, Cstr, Type, R) ?- real_vars(real_int,I) ; let_int_vars(TI,I)), get_array_start(A,Start), - insert_dep_inst(dep(I,0,[E,Start])), - insert_dep_inst(dep(E,0,[I,Start])) +% insert_dep_inst(dep(I,0,[E,Start])), +% insert_dep_inst(dep(E,0,[I,Start])) + insert_dep_inst(dep(I,0,[E])), + insert_dep_inst(dep(E,0,[I])) ; true), blocked_unify(R,storec(A,I,v(u,E,CE))), @@ -914,7 +918,7 @@ unfold_int_expr(const_array(TI,TE,Const), D, Cstr, Type, R) ?- (real_type(TE,_) -> unfold_real_expr(Const,ND,Cstr,TE,NConst) ; unfold_int_expr(Const,ND,Cstr,TE,NConst)), - R = const_array(TI,TE,NConst). + blocked_unify(R,const_array(TI,TE,NConst)). unfold_int_expr(select(EA,EI), D, Cstr, Type, R) ?- ND is D + 1, unfold_int_expr(EA,ND,CA,TA,A), @@ -932,11 +936,15 @@ unfold_int_expr(select(EA,EI), D, Cstr, Type, R) ?- blocked_unify(R0,R), make_conj(CI,CA,CIA), let_int_vars(Type,R), - insert_dep_inst(dep(R,D,[I,Start])), - insert_dep_inst(dep(I,D,[R,Start])), +% insert_dep_inst(dep(R,D,[I,Start])), +% insert_dep_inst(dep(I,D,[R,Start])), + (functor(Type,array,_) -> + insert_dep_inst(inst_cstr(D,I)) + ; insert_dep_inst(dep(R,D,[I])), + insert_dep_inst(dep(I,D,[R]))), make_conj(CIA,CES,Cstr). - + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % Unfolding BV %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -2011,6 +2019,15 @@ unfold_int_expr(EA mod EB,D,Cstr,Type,R) ?- make_conj(CAB,Cond,CABCond), make_conj(CABCond,chk_undef_ediv_mod(Bool,Type,A,B,Q,R),Cstr). +unfold_int_expr(colibri_pow_int(EA,EN),D,Cstr,Type,R) ?- + ND is D + 1, + unfold_int_expr(EN,ND,CN,int,N), + !, + unfold_int_expr(ite(as(N,int) #>= as(0,int), + as(EA,Type)^as(N,int), + uninterp(colibri_pow_int(as(EA,Type),as(N,int)))), + D,Cstr1,Type,R), + make_conj(CN,Cstr1,Cstr). unfold_int_expr(EA ^ EN,D,Cstr,Type,R) ?- ND is D + 1, unfold_int_expr(EA,ND,CA,Type,A), @@ -3784,7 +3801,9 @@ check_alldiffint_card(Bool,Kill,L) :- ; NewL = L), (nonvar(Continue) -> check_alldiffint_card(Bool,Kill,NewL) - ; suspend(check_alldiffint_card(Bool,Kill,NewL),0,(Bool,Kill,NewL)->suspend:constrained))))), + ; (NewL = [_] -> + true + ; suspend(check_alldiffint_card(Bool,Kill,NewL),0,(Bool,Kill,NewL)->suspend:constrained)))))), set_priority(Prio), wake_if_other_scheduled(Prio)). @@ -5670,7 +5689,10 @@ isNaN(F,R) :- ((member((_Susp,isNaN(FF,RR)),LS), F == FF) -> - protected_unify(R,RR) + protected_unify(R,RR), + (R == 0 -> + ensure_not_NaN(F) + ; true) ; my_suspend(isNaN(F,R),0,[trigger(isNaN),(F,R)->suspend:constrained]))), set_priority(Prio), wake_if_other_scheduled(Prio). @@ -6256,8 +6278,12 @@ unfold_real_expr(select(EA,EI), D, Cstr, Type, R) ?- real_vars(Type,R0), eval_select(TA,A,I,R0,Start,CES), blocked_unify(R0,R), - insert_dep_inst(dep(R,D,[I,Start])), - insert_dep_inst(dep(I,D,[R,Start])), +% insert_dep_inst(dep(R,D,[I,Start])), +% insert_dep_inst(dep(I,D,[R,Start])), + (functor(Type,array,_) -> + insert_dep_inst(inst_cstr(D,I)) + ; insert_dep_inst(dep(R,D,[I])), + insert_dep_inst(dep(I,D,[R]))), make_conj(CI,CA,CIA), make_conj(CIA,CES,Cstr). @@ -7230,6 +7256,37 @@ unfold_real_expr(fp_fma(Rnd0,EA,EB,EC),D,Cstr,Type,R) ?- make_conj(CABC,Cond,CABCond), make_conj(CABCond,fp_fma(BCond,Rnd,Type,A,B,C,R),Cstr)). +unfold_real_expr(colibri_pow_int(EA,EN),D,Cstr,Type,R) ?- + % real_int + ND is D + 1, + unfold_int_expr(EN,ND,CN,int,N), + !, + unfold_real_expr(ite(as(N,int) #>= as(0,int), + as(EA,Type)^as(N,int), + uninterp(colibri_pow_int(as(EA,Type),as(N,int)))), + D,Cstr1,Type,R), + make_conj(CN,Cstr1,Cstr). +unfold_real_expr(colibri_pow_real(EA,EN),D,Cstr,Type,R) ?- + ND is D + 1, + unfold_real_expr(EA,ND,_,TypeA,_), + unfold_int_expr(EN,ND,CN,int,N), + !, + (var(Type) -> + Type = TypeA + ; true), + (TypeA == real_int -> + unfold_real_expr(ite(as(N,int) #>= as(0,int), + as(EA,Type)^as(N,int), + uninterp(colibri_pow_real(as(EA,Type),as(N,int)))), + D,Cstr1,Type,R) + ; unfold_real_expr(ite(as(N,int) #> as(0,int), + as(EA,Type)^as(N,int), + ite(as(N,int) #< as(0,int), + as(1.0,real) / (as(EA,Type)^(- as(N,int))), + uninterp(colibri_pow_real(as(EA,Type),as(N,int))))), + D,Cstr1,Type,R)), + make_conj(CN,Cstr1,Cstr). + unfold_real_expr(EA ^ EN,D,Cstr,RType,R) ?- ND is D + 1, unfold_real_expr(EA,ND,CA,RType,A), diff --git a/Src/COLIBRI/test_parser.pl b/Src/COLIBRI/test_parser.pl index 92e8de312eac6f0946fcd75720f1e373ce6dd08d..52713df645acb03e0c6213aa0a1cfbfe03371c34 100644 --- a/Src/COLIBRI/test_parser.pl +++ b/Src/COLIBRI/test_parser.pl @@ -7,4 +7,4 @@ test_read(ENV) :- %% :- p_simplex_ocaml_create_parser("test_smt3.smt2",ENV), %% test_read(ENV). -:- block((p_simplex_ocaml_create_parser("test_poly.smt2", _81), repeat, p_simplex_ocaml_parser_next(_81, _92), writeq(_92), nl, nl, _92 == end, !), _74, true). +:- block((p_simplex_ocaml_create_parser("bug_replay_dolmen.smt2", _81), repeat, p_simplex_ocaml_parser_next(_81, _92), writeq(_92), nl, nl, _92 == end, !), _74, true),block((p_simplex_ocaml_create_parser("bug_replay_dolmen.smt2", _181), repeat, p_simplex_ocaml_parser_next(_181, _192), writeq(_192), nl, nl, _192 == end, !), _174, true). diff --git a/Src/COLIBRI/util.pl b/Src/COLIBRI/util.pl index 01f6d3558fb35437a6abfeb5b867f808db1737b1..f7bcce4f1c3631310b18953384b728d1fc4146c6 100755 --- a/Src/COLIBRI/util.pl +++ b/Src/COLIBRI/util.pl @@ -234,12 +234,42 @@ insert_L(L,Seen,NL) :- % pour contourner le bug eclipse6 :- export protected_unify/2. protected_unify(X,X) ?- !. +protected_unify(X,Y) :- + ground((X,Y)), + !, + fail. +/* +protected_unify(X,Y) :- + (fail,once ((var(X) -> + get_type(X,array(_,_)) + ; functor(X,storec,_)); + (var(Y) -> + get_type(Y,array(_,_)) + ; functor(Y,storec,_))) + -> + call(spy_here)@eclipse, + once get_flag(occur_check,OC), + set_flag(occur_check,on), + block((protected_unify0(X,Y) -> + true + ; set_flag(occur_check,OC), + fail), + Exit, + (set_flag(occur_check,OC), + exit_block(Exit))), + set_flag(occur_check,OC) + ; protected_unify0(X,Y)). +*/ protected_unify(X,Y) :- getval(from_pu,PU)@eclipse, setval(from_pu,1)@eclipse, - block(X = Y, + block((X = Y -> + true + ; setval(from_pu,PU)@eclipse, + fail), Exit, - exit_block(Exit)), + (setval(from_pu,PU)@eclipse, + exit_block(Exit))), setval(from_pu,PU)@eclipse. % un peu de confort diff --git a/tests/sat/bug_55.smt2 b/tests/sat/bug_55.smt2 new file mode 100644 index 0000000000000000000000000000000000000000..3f0ff307a57a1b7093ddcfb17c888f015003f0a8 --- /dev/null +++ b/tests/sat/bug_55.smt2 @@ -0,0 +1,6 @@ +(set-logic ALL) +(set-info :smt-lib-version 2.6) +(declare-const epsilon__ Real) +(assert (= epsilon__ (colibri_pow_real_int (/ 20.0 10.0) (- 26)))) + +(check-sat)