From b6a6049a3803d3ff14a0c9664ba1d1aa650b44bd Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Fran=C3=A7ois=20Bobot?= <francois.bobot@cea.fr>
Date: Wed, 13 Mar 2024 13:37:46 +0100
Subject: [PATCH] [ir] Add new NIER AST.

---
 src/transformations/native_nn_prover.ml | 191 ++++--
 src/transformations/utils.ml            |  37 +-
 src/transformations/utils.mli           |  11 +-
 src/verification.ml                     |   7 +-
 tests/acasxu.t                          | 740 +++++++++++++++++++++++-
 5 files changed, 917 insertions(+), 69 deletions(-)

diff --git a/src/transformations/native_nn_prover.ml b/src/transformations/native_nn_prover.ml
index f7754ae..ba72c84 100644
--- a/src/transformations/native_nn_prover.ml
+++ b/src/transformations/native_nn_prover.ml
@@ -20,36 +20,154 @@
 (*                                                                        *)
 (**************************************************************************)
 
-open Why3
 open Base
 
+type new_output = {
+  old_index : Why3.Number.int_constant;
+  old_nn : Language.nn;
+  new_index : int;
+  old_nn_args : Why3.Term.term list;
+}
+
+(** invariant:
+
+    - input_vars from 0 to its length - 1
+    - outputs from 0 to its length - 1 *)
+(* let create_new_nn input_vars outputs = let id = ref (-1) in let mk ?name ~sh
+   ~op ~op_p ?(pred=[]) ?(succ=[]) ?tensor = incr id; Ir.Nier_cfg.Node.create
+   ~id:(!id) ~name:name ~pred ~succ ~tensor:tensor in let x =
+   Ir.Nier_cfg.NierCFGFloat.add_edge in outputs *)
+
+(** Abstract terms that contains neural network application *)
+let abstract_nn_term =
+  let rec do_simplify (new_index, output_vars) term =
+    match term.Why3.Term.t_node with
+    | Tapp
+        ( get (* [ ] *),
+          [
+            {
+              t_node =
+                Why3.Term.Tapp
+                  ( ls1 (* @@ *),
+                    [
+                      { t_node = Tapp (ls2 (* nn *), _); _ };
+                      {
+                        t_node =
+                          Tapp (ls3 (* input vector *), tl (* input vars *));
+                        _;
+                      };
+                    ] );
+              _;
+            };
+            ({ t_node = Tconst (ConstInt old_index); _ } as _t2);
+          ] )
+      when String.equal get.ls_name.id_string (Why3.Ident.op_get "")
+           && String.equal ls1.ls_name.id_string (Why3.Ident.op_infix "@@") -> (
+      match (Language.lookup_nn ls2, Language.lookup_vector ls3) with
+      | Some ({ nn_nb_inputs; nn_ty_elt; _ } as old_nn), Some vector_length ->
+        assert (nn_nb_inputs = vector_length && vector_length = List.length tl);
+        let ls = Why3.(Term.create_fsymbol (Ident.id_fresh "y") [] nn_ty_elt) in
+        let term = Why3.Term.fs_app ls [] nn_ty_elt in
+        ( ( new_index + 1,
+            ({ old_index; new_index; old_nn; old_nn_args = tl }, ls)
+            :: output_vars ),
+          term )
+      | _, _ ->
+        failwith
+          (Fmt.str "nn application without fixed NeuralNetwork or Arguments: %a"
+             Why3.Pretty.print_term term))
+    | _ -> Why3.Term.t_map_fold do_simplify (new_index, output_vars) term
+  in
+  Why3.Trans.fold_map
+    (fun task_hd (((index, input_vars) as acc), task) ->
+      let tdecl = task_hd.task_decl in
+      match tdecl.td_node with
+      | Decl
+          {
+            d_node =
+              Dparam ({ ls_args = []; ls_constr = 0; ls_proj = false; _ } as ls);
+            _;
+          }
+        when Language.mem_nn ls ->
+        (* All neural networks are removed *) (acc, task)
+      | Decl
+          {
+            d_node =
+              Dparam ({ ls_args = []; ls_constr = 0; ls_proj = false; _ } as ls);
+            _;
+          } ->
+        (* Add meta for input variable declarations. Note that each meta needs
+           to appear before the corresponding declaration in order to be
+           leveraged by prover printers. *)
+        let task =
+          Why3.Task.add_meta task Meta.meta_input [ MAls ls; MAint index ]
+        in
+        let task = Why3.Task.add_tdecl task tdecl in
+        let index = index + 1 in
+        let input_vars = Why3.Term.Mls.add ls index input_vars in
+        ((index, input_vars), task)
+      | Decl { d_node = Dprop (Pgoal, pr, goal); _ } ->
+        let (_, output_vars), goal = do_simplify (0, []) goal in
+        let pr = Why3.Decl.create_prsymbol (Why3.Ident.id_clone pr.pr_name) in
+        let decl = Why3.Decl.create_prop_decl Pgoal pr goal in
+        (* Again, for each output variable, add the meta first, then its actual
+           declaration. *)
+        List.iter output_vars ~f:(fun (out, var) ->
+          ignore out.old_nn;
+          Fmt.epr "%a: %a -> %i: %a@." Why3.Pretty.print_ls var
+            Why3.(Number.print_int_constant Number.full_support)
+            out.old_index out.new_index
+            (Fmt.list ~sep:Fmt.comma Why3.Pretty.print_term)
+            out.old_nn_args);
+        let task =
+          List.fold output_vars ~init:task
+            ~f:(fun task ({ new_index; _ }, output_var) ->
+            let task =
+              Why3.Task.add_meta task Meta.meta_output
+                [ MAls output_var; MAint new_index ]
+            in
+            let decl = Why3.Decl.create_param_decl output_var in
+            Why3.Task.add_decl task decl)
+        in
+        (acc, Why3.Task.add_decl task decl)
+      | Decl { d_node = Dprop (_, _, _); _ } ->
+        (* TODO: check no nn applications *)
+        (acc, Why3.Task.add_tdecl task tdecl)
+      | _ -> (acc, Why3.Task.add_tdecl task tdecl))
+    (0, Why3.Term.Mls.empty) None
+
+(** {2 Old way} *)
+
 (* Creates a list of pairs made of output variables and respective indices in
    the list, for each neural network application to an input vector appearing in
    a task. Such a list stands for the resulting output vector of a neural
    network application to an input vector (ie, something of the form: nn @@ v).
    The creation process is memoized wrt terms corresponding to neural network
    applications to input vectors. *)
-let create_output_vars =
-  let rec do_create mt (term : Term.term) =
+let output_vars =
+  let rec create_output_var mt (term : Why3.Term.term) =
     match term.t_node with
-    | Term.Tapp (ls1 (* @@ *), [ { t_node = Tapp (ls2 (* nn *), _); _ }; _ ])
-      when String.equal ls1.ls_name.id_string (Ident.op_infix "@@") -> (
+    | Why3.Term.Tapp
+        (ls1 (* @@ *), [ { t_node = Tapp (ls2 (* nn *), _); _ }; _ ])
+      when String.equal ls1.ls_name.id_string (Why3.Ident.op_infix "@@") -> (
       match Language.lookup_nn ls2 with
       | Some { nn_nb_outputs; nn_ty_elt; _ } ->
-        if Term.Mterm.mem term mt
+        if Why3.Term.Mterm.mem term mt
         then mt
         else
           let output_vars =
             List.init nn_nb_outputs ~f:(fun index ->
-              (index, Term.create_fsymbol (Ident.id_fresh "y") [] nn_ty_elt))
+              ( index,
+                Why3.Term.create_fsymbol (Why3.Ident.id_fresh "y") [] nn_ty_elt
+              ))
           in
-          Term.Mterm.add term output_vars mt
+          Why3.Term.Mterm.add term output_vars mt
       | None -> mt)
-    | _ -> Term.t_fold do_create mt term
+    | _ -> Why3.Term.t_fold create_output_var mt term
   in
-  Trans.fold_decl
-    (fun decl mt -> Decl.decl_fold do_create mt decl)
-    Term.Mterm.empty
+  Why3.Trans.fold_decl
+    (fun decl mt -> Why3.Decl.decl_fold create_output_var mt decl)
+    Why3.Term.Mterm.empty
 
 (* Simplifies a task goal exhibiting a vector selection on a neural network
    application to an input vector (ie, (nn @@ v)[_]) by the corresponding output
@@ -58,26 +176,27 @@ let create_output_vars =
    all declared, each with a meta that describes the respective index in the
    output vector. *)
 let simplify_nn_application input_vars output_vars =
-  let rec do_simplify (term : Term.term) =
+  let rec do_simplify (term : Why3.Term.term) =
     match term.t_node with
-    | Term.Tapp
+    | Why3.Term.Tapp
         ( ls_get (* [_] *),
           [
             ({ t_node = Tapp (ls_atat (* @@ *), _); _ } as t1);
             ({ t_node = Tconst (ConstInt index); _ } as _t2);
           ] )
-      when String.equal ls_get.ls_name.id_string (Ident.op_get "")
-           && String.equal ls_atat.ls_name.id_string (Ident.op_infix "@@") -> (
-      match Term.Mterm.find_opt t1 output_vars with
-      | None -> Term.t_map do_simplify term
+      when String.equal ls_get.ls_name.id_string (Why3.Ident.op_get "")
+           && String.equal ls_atat.ls_name.id_string (Why3.Ident.op_infix "@@")
+      -> (
+      match Why3.Term.Mterm.find_opt t1 output_vars with
+      | None -> Why3.Term.t_map do_simplify term
       | Some output_vars ->
-        let index = Number.to_small_integer index in
+        let index = Why3.Number.to_small_integer index in
         assert (index < List.length output_vars);
         let ls = Stdlib.List.assoc index output_vars in
-        Term.t_app_infer ls [])
-    | _ -> Term.t_map do_simplify term
+        Why3.Term.t_app_infer ls [])
+    | _ -> Why3.Term.t_map do_simplify term
   in
-  Trans.fold
+  Why3.Trans.fold
     (fun task_hd task ->
       match task_hd.task_decl.td_node with
       | Decl { d_node = Dparam ls; _ } ->
@@ -85,38 +204,40 @@ let simplify_nn_application input_vars output_vars =
            to appear before the corresponding declaration in order to be
            leveraged by prover printers. *)
         let task =
-          match Term.Mls.find_opt ls input_vars with
+          match Why3.Term.Mls.find_opt ls input_vars with
           | None -> task
           | Some index ->
-            Task.add_meta task Meta.meta_input [ MAls ls; MAint index ]
+            Why3.Task.add_meta task Meta.meta_input [ MAls ls; MAint index ]
         in
-        Task.add_tdecl task task_hd.task_decl
+        Why3.Task.add_tdecl task task_hd.task_decl
       | Decl ({ d_node = Dprop (Pgoal, _, _); _ } as decl) ->
-        let decl = Decl.decl_map do_simplify decl in
+        let decl = Why3.Decl.decl_map do_simplify decl in
         let task =
           (* Output variables are not declared yet in the task as they are
              created on the fly for each (different) neural network application
              on an input vector. We add here their declarations in the task. *)
-          Term.Mterm.fold
+          Why3.Term.Mterm.fold
             (fun _t output_vars task ->
               (* Again, for each output variable, add the meta first, then its
                  actual declaration. *)
               List.fold output_vars ~init:task
                 ~f:(fun task (index, output_var) ->
                 let task =
-                  Task.add_meta task Meta.meta_output
+                  Why3.Task.add_meta task Meta.meta_output
                     [ MAls output_var; MAint index ]
                 in
-                let decl = Decl.create_param_decl output_var in
-                Task.add_decl task decl))
+                let decl = Why3.Decl.create_param_decl output_var in
+                Why3.Task.add_decl task decl))
             output_vars task
         in
-        Task.add_decl task decl
+        Why3.Task.add_decl task decl
       | Use _ | Clone _ | Meta _ | Decl _ ->
-        Task.add_tdecl task task_hd.task_decl)
+        Why3.Task.add_tdecl task task_hd.task_decl)
     None
 
 let trans =
-  Trans.bind Utils.input_vars (fun input_vars ->
-    Trans.bind create_output_vars (fun output_vars ->
-      simplify_nn_application input_vars output_vars))
+  Why3.Trans.bind Utils.input_terms (function
+    | Utils.Others -> abstract_nn_term
+    | Vars input_vars ->
+      Why3.Trans.bind output_vars (fun output_vars ->
+        simplify_nn_application input_vars output_vars))
diff --git a/src/transformations/utils.ml b/src/transformations/utils.ml
index 8638ff6..8a1db3a 100644
--- a/src/transformations/utils.ml
+++ b/src/transformations/utils.ml
@@ -22,8 +22,6 @@
 
 open Base
 
-let src = Logs.Src.create "TransformationsUtils" ~doc:"Transformation utils"
-
 let float_of_real_constant rc =
   let is_neg, rc =
     ( Why3.(BigInt.lt rc.Number.rl_real.rv_sig BigInt.zero),
@@ -36,22 +34,26 @@ let float_of_real_constant rc =
 (* Collects input variables and their position inside input vectors. Such
    process is memoized wrt lsymbols corresponding to input vectors. *)
 
-type position_input_vars =
-  (int Why3.Term.Mls.t
-  [@printer
-    fun fmt mls ->
-      Why3.(
-        Pp.print_iter2 Term.Mls.iter Pp.newline Pp.comma Pretty.print_ls Pp.int
-          fmt mls)])
+(* Terms forming vectors in input to models. *)
+type input_terms =
+  | Vars of
+      (int Why3.Term.Mls.t
+      [@printer
+        fun fmt mls ->
+          Why3.(
+            Pp.print_iter2 Term.Mls.iter Pp.newline Pp.comma Pretty.print_ls
+              Pp.int fmt mls)])
+    (* A map from input variable lsymbols to corresponding positions in input
+       vectors. *)
+  | Others (* Generic terms. *)
 [@@deriving show]
 
-let input_vars : position_input_vars Why3.Trans.trans =
+let input_terms : input_terms Why3.Trans.trans =
+  let exception NotInputVariable in
   let hls = Why3.Term.Hls.create 13 in
   let add index mls = function
     | { Why3.Term.t_node = Tapp (ls, []); _ } -> Why3.Term.Mls.add ls index mls
-    | t ->
-      Logging.code_error ~src (fun m ->
-        m "Not an input variable: %a" Why3.Pretty.print_term t)
+    | _ -> raise NotInputVariable
   in
   let rec do_collect mls t =
     match t.Why3.Term.t_node with
@@ -72,8 +74,13 @@ let input_vars : position_input_vars Why3.Trans.trans =
     | _ -> Why3.Term.t_fold do_collect mls t
   in
   Why3.Trans.fold_decl
-    (fun decl mls -> Why3.Decl.decl_fold do_collect mls decl)
-    Why3.Term.Mls.empty
+    (fun decl mls ->
+      match mls with
+      | Vars mls -> (
+        try Vars (Why3.Decl.decl_fold do_collect mls decl)
+        with NotInputVariable -> Others)
+      | Others -> Others)
+    (Vars Why3.Term.Mls.empty)
 
 (* Collects input feature values (these are typically coming from a data point
    of a data set).
diff --git a/src/transformations/utils.mli b/src/transformations/utils.mli
index 63ad5a2..e883d1f 100644
--- a/src/transformations/utils.mli
+++ b/src/transformations/utils.mli
@@ -20,8 +20,13 @@
 (*                                                                        *)
 (**************************************************************************)
 
-type position_input_vars = int Why3.Term.Mls.t [@@deriving show]
-(** Map input variable lsymbols to corresponding position in input vectors. *)
+(** Terms forming vectors in input to models. *)
+type input_terms =
+  | Vars of int Why3.Term.Mls.t
+      (** A map from input variable lsymbols to corresponding positions in input
+          vectors. *)
+  | Others  (** Generic terms. *)
+[@@deriving show]
 
 type features = Float.t Why3.Term.Mls.t [@@deriving show]
 (** Input feature values. *)
@@ -34,7 +39,7 @@ and interval = float option * float option [@@deriving show]
 type label = int [@@deriving show]
 (** Output label. *)
 
-val input_vars : position_input_vars Why3.Trans.trans
+val input_terms : input_terms Why3.Trans.trans
 
 val input_features :
   Why3.Env.env -> vars:Why3.Term.lsymbol list -> features Why3.Trans.trans
diff --git a/src/verification.ml b/src/verification.ml
index 45e6cb6..766524f 100644
--- a/src/verification.ml
+++ b/src/verification.ml
@@ -273,7 +273,12 @@ let answer_saver limit config env config_prover ~proof_strategy task =
           (svm_filename, Language.svm_abstraction_of_string svm_abstraction)
         | Some _ -> assert false (* By construction of the meta. *)
       in
-      let vars = Why3.Term.Mls.keys (Trans.apply Utils.input_vars task) in
+      let vars =
+        match Trans.apply Utils.input_terms task with
+        | Utils.Vars mls -> Why3.Term.Mls.keys mls
+        | Others ->
+          Logging.user_error (fun m -> m "Cannot determine input variables")
+      in
       let dataset : Csv.t =
         let features = Trans.apply (Utils.input_features env ~vars) task in
         let features =
diff --git a/tests/acasxu.t b/tests/acasxu.t
index 3ac4e06..b51a832 100644
--- a/tests/acasxu.t
+++ b/tests/acasxu.t
@@ -158,22 +158,732 @@ Test verify on acasxu
                                 nn_format = Language.NNet }))
   vector,
   5
-  [ERROR]{TransformationsUtils} Not an input variable: div RNE
-                                                       (sub RNE x (19791.0:t))
-                                                       (60261.0:t)
-                                Unrecoverable error: please report the issue at
-                                https://git.frama-c.com/pub/caisar/issues
+  y: 0 -> 0: div RNE (sub RNE x (19791.0:t)) (60261.0:t),
+  div RNE (sub RNE x1 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x2 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x3 (650.0:t)) (1100.0:t),
+  div RNE (sub RNE x4 (600.0:t)) (1200.0:t)
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  0.0 <= x3
+  x3 <= 60760.0
+  -3.141592653589793115997963468544185161590576171875 <= x4
+  x4 <= 3.141592653589793115997963468544185161590576171875
+  -3.141592653589793115997963468544185161590576171875 <= x5
+  x5 <= 3.141592653589793115997963468544185161590576171875
+  100.0 <= x6
+  x6 <= 1200.0
+  0.0 <= x7
+  x7 <= 1200.0
+  55947.6909999999988940544426441192626953125 <= x3
+  1145.0 <= x6
+  x7 <= 60.0
+  y0 <= 3.991125645861615112153231166303157806396484375
+  
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'test'vc':
+  forall x5:t, x6:t, x7:t, x8:t, x9:t.
+   ((le (0.0:t) x5 /\ le x5 (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x6 /\
+     le x6 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x7 /\
+     le x7 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) x8 /\ le x8 (1200.0:t)) /\ le (0.0:t) x9 /\ le x9 (1200.0:t)) /\
+   le (55947.6909999999988940544426441192626953125:t) x5 /\
+   le (1145.0:t) x8 /\ le x9 (60.0:t) ->
+   le
+   (add RNE
+    (mul RNE
+     (nn_nnet
+      @@ vector (div RNE (sub RNE x5 (19791.0:t)) (60261.0:t))
+         (div RNE (sub RNE x6 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x7 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x8 (650.0:t)) (1100.0:t))
+         (div RNE (sub RNE x9 (600.0:t)) (1200.0:t)))
+     [0] (373.9499200000000200816430151462554931640625:t))
+    (7.51888402010059753166615337249822914600372314453125:t))
+   (1500.0:t)
+  nn_nnet,
+  (Interpreter_types.Model
+     (Interpreter_types.NNet, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename = "./TestNetwork.nnet";
+                                nn_format = Language.NNet }))
+  vector,
+  5
+  y1: 0 -> 0: div RNE (sub RNE x5 (19791.0:t)) (60261.0:t),
+  div RNE (sub RNE x6 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x7 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x8 (650.0:t)) (1100.0:t),
+  div RNE (sub RNE x9 (600.0:t)) (1200.0:t)
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  0.0 <= x3
+  x3 <= 60760.0
+  -3.141592653589793115997963468544185161590576171875 <= x4
+  x4 <= 3.141592653589793115997963468544185161590576171875
+  -3.141592653589793115997963468544185161590576171875 <= x5
+  x5 <= 3.141592653589793115997963468544185161590576171875
+  100.0 <= x6
+  x6 <= 1200.0
+  0.0 <= x7
+  x7 <= 1200.0
+  55947.6909999999988940544426441192626953125 <= x3
+  1145.0 <= x6
+  x7 <= 60.0
+  y0 <= 3.991125645861615112153231166303157806396484375
+  
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'P3':
+  forall x10:t, x11:t, x12:t, x13:t, x14:t.
+   ((le (0.0:t) (add RNE (mul RNE x10 (60261.0:t)) (19791.0:t)) /\
+     le (add RNE (mul RNE x10 (60261.0:t)) (19791.0:t)) (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t))
+     (add RNE
+      (mul RNE x11 (6.2831853071800001231395071954466402530670166015625:t))
+      (0.0:t)) /\
+     le
+     (add RNE
+      (mul RNE x11 (6.2831853071800001231395071954466402530670166015625:t))
+      (0.0:t))
+     (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t))
+     (add RNE
+      (mul RNE x12 (6.2831853071800001231395071954466402530670166015625:t))
+      (0.0:t)) /\
+     le
+     (add RNE
+      (mul RNE x12 (6.2831853071800001231395071954466402530670166015625:t))
+      (0.0:t))
+     (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) (add RNE (mul RNE x13 (1100.0:t)) (650.0:t)) /\
+     le (add RNE (mul RNE x13 (1100.0:t)) (650.0:t)) (1200.0:t)) /\
+    le (0.0:t) (add RNE (mul RNE x14 (1200.0:t)) (600.0:t)) /\
+    le (add RNE (mul RNE x14 (1200.0:t)) (600.0:t)) (1200.0:t)) /\
+   ((le (1500.0:t) (add RNE (mul RNE x10 (60261.0:t)) (19791.0:t)) /\
+     le (add RNE (mul RNE x10 (60261.0:t)) (19791.0:t)) (1800.0:t)) /\
+    le (neg (0.059999999999999997779553950749686919152736663818359375:t))
+    (add RNE
+     (mul RNE x11 (6.2831853071800001231395071954466402530670166015625:t))
+     (0.0:t)) /\
+    le
+    (add RNE
+     (mul RNE x11 (6.2831853071800001231395071954466402530670166015625:t))
+     (0.0:t))
+    (0.059999999999999997779553950749686919152736663818359375:t)) /\
+   le (3.100000000000000088817841970012523233890533447265625:t)
+   (add RNE
+    (mul RNE x12 (6.2831853071800001231395071954466402530670166015625:t))
+    (0.0:t)) /\
+   le (980.0:t) (add RNE (mul RNE x13 (1100.0:t)) (650.0:t)) /\
+   le (960.0:t) (add RNE (mul RNE x14 (1200.0:t)) (600.0:t)) ->
+   not (((lt (nn_nnet @@ vector x10 x11 x12 x13 x14)[0]
+          (nn_nnet @@ vector x10 x11 x12 x13 x14)[1] /\
+          lt (nn_nnet @@ vector x10 x11 x12 x13 x14)[0]
+          (nn_nnet @@ vector x10 x11 x12 x13 x14)[2]) /\
+         lt (nn_nnet @@ vector x10 x11 x12 x13 x14)[0]
+         (nn_nnet @@ vector x10 x11 x12 x13 x14)[3]) /\
+        lt (nn_nnet @@ vector x10 x11 x12 x13 x14)[0]
+        (nn_nnet @@ vector x10 x11 x12 x13 x14)[4])
+  nn_nnet,
+  (Interpreter_types.Model
+     (Interpreter_types.NNet, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename = "./TestNetwork.nnet";
+                                nn_format = Language.NNet }))
+  vector,
+  5
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  -0.328421367053318091766556108268559910356998443603515625 <= x0
+  x0 <= 0.67985927880386987087746319957659579813480377197265625
+  -0.499999999999967081887319864108576439321041107177734375 <= x1
+  x1 <= 0.499999999999967081887319864108576439321041107177734375
+  -0.499999999999967081887319864108576439321041107177734375 <= x2
+  x2 <= 0.499999999999967081887319864108576439321041107177734375
+  -0.5 <= x3
+  x3 <= 0.5
+  -0.5 <= x4
+  x4 <= 0.5
+  -0.303529646039727207806890874053351581096649169921875 <= x0
+  x0 <= -0.298551301837009008810497334707179106771945953369140625
+  -0.0095492965855130916563719978285007528029382228851318359375 <= x1
+  x1 <= 0.0095492965855130916563719978285007528029382228851318359375
+  0.493380323584843072382000173092819750308990478515625 <= x2
+  0.299999999999999988897769753748434595763683319091796875 <= x3
+  0.299999999999999988897769753748434595763683319091796875 <= x4
+  (((y0 >= y1 or y0 >= y2) or y0 >= y3) or y0 >= y4)
+  
+  Goal run'vc: Unknown ()
+  Goal test'vc: Unknown ()
+  Goal P3: Unknown ()
+<<<<<<< HEAD
+
+  $ caisar verify --prover PyRAT --prover-altern VNNLIB --ltag=ProverSpec file.mlw 2>&1 | ./filter_tmpdir.sh
+  y: 0 -> 0: div RNE (sub RNE x (19791.0:t)) (60261.0:t),
+  div RNE (sub RNE x1 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x2 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x3 (650.0:t)) (1100.0:t),
+  div RNE (sub RNE x4 (600.0:t)) (1200.0:t)
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; X_5
+  (declare-const X_5 Real)
+  
+  ;; X_6
+  (declare-const X_6 Real)
+  
+  ;; X_7
+  (declare-const X_7 Real)
+  
+  ;; Requires
+  (assert (>= X_3 0.0))
+  (assert (<= X_3 60760.0))
+  (assert (>= X_4 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_4 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_5 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_5 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_6 100.0))
+  (assert (<= X_6 1200.0))
+  (assert (>= X_7 0.0))
+  (assert (<= X_7 1200.0))
+  
+  ;; Requires
+  (assert (>= X_3 55947.6909999999988940544426441192626953125))
+  (assert (>= X_6 1145.0))
+  (assert (<= X_7 60.0))
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Goal run'vc
+  (assert (>= Y_0 3.991125645861615112153231166303157806396484375))
+  
+  y1: 0 -> 0: div RNE (sub RNE x5 (19791.0:t)) (60261.0:t),
+  div RNE (sub RNE x6 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x7 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x8 (650.0:t)) (1100.0:t),
+  div RNE (sub RNE x9 (600.0:t)) (1200.0:t)
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; X_5
+  (declare-const X_5 Real)
+  
+  ;; X_6
+  (declare-const X_6 Real)
+  
+  ;; X_7
+  (declare-const X_7 Real)
+  
+  ;; Requires
+  (assert (>= X_3 0.0))
+  (assert (<= X_3 60760.0))
+  (assert (>= X_4 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_4 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_5 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_5 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_6 100.0))
+  (assert (<= X_6 1200.0))
+  (assert (>= X_7 0.0))
+  (assert (<= X_7 1200.0))
+  
+  ;; Requires
+  (assert (>= X_3 55947.6909999999988940544426441192626953125))
+  (assert (>= X_6 1145.0))
+  (assert (<= X_7 60.0))
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Goal test'vc
+  (assert (>= Y_0 3.991125645861615112153231166303157806396484375))
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; H
+  (assert (>= X_0 -0.328421367053318091766556108268559910356998443603515625))
+  
+  ;; H
+  (assert (<= X_0 0.67985927880386987087746319957659579813480377197265625))
+  
+  ;; H
+  (assert (>= X_1 -0.499999999999967081887319864108576439321041107177734375))
+  
+  ;; H
+  (assert (<= X_1 0.499999999999967081887319864108576439321041107177734375))
+  
+  ;; H
+  (assert (>= X_2 -0.499999999999967081887319864108576439321041107177734375))
+  
+  ;; H
+  (assert (<= X_2 0.499999999999967081887319864108576439321041107177734375))
+  
+  ;; H
+  (assert (>= X_3 -0.5))
+  
+  ;; H
+  (assert (<= X_3 0.5))
+  
+  ;; H
+  (assert (>= X_4 -0.5))
+  
+  ;; H
+  (assert (<= X_4 0.5))
+  
+  ;; H
+  (assert (>= X_0 -0.303529646039727207806890874053351581096649169921875))
+  
+  ;; H
+  (assert (<= X_0 -0.298551301837009008810497334707179106771945953369140625))
+  
+  ;; H
+  (assert (>= X_1 -0.0095492965855130916563719978285007528029382228851318359375))
+  
+  ;; H
+  (assert (<= X_1 0.0095492965855130916563719978285007528029382228851318359375))
+  
+  ;; H
+  (assert (>= X_2 0.493380323584843072382000173092819750308990478515625))
+  
+  ;; H
+  (assert (>= X_3 0.299999999999999988897769753748434595763683319091796875))
+  
+  ;; H
+  (assert (>= X_4 0.299999999999999988897769753748434595763683319091796875))
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Y_1
+  (declare-const Y_1 Real)
+  
+  ;; Y_2
+  (declare-const Y_2 Real)
+  
+  ;; Y_3
+  (declare-const Y_3 Real)
+  
+  ;; Y_4
+  (declare-const Y_4 Real)
+  
+  ;; Goal P3
+  (assert (<= Y_0 Y_1))
+  (assert (<= Y_0 Y_2))
+  (assert (<= Y_0 Y_3))
+  (assert (<= Y_0 Y_4))
+  
+  Goal run'vc: Unknown ()
+  Goal test'vc: Unknown ()
+  Goal P3: Unknown ()
+
+  $ caisar verify --prover Marabou --ltag=ProverSpec file.mlw 2>&1 | ./filter_tmpdir.sh
+  y: 0 -> 0: div RNE (sub RNE x (19791.0:t)) (60261.0:t),
+  div RNE (sub RNE x1 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x2 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x3 (650.0:t)) (1100.0:t),
+  div RNE (sub RNE x4 (600.0:t)) (1200.0:t)
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x3 >= 0.0
+  x3 <= 60760.0
+  x4 >= -3.141592653589793115997963468544185161590576171875
+  x4 <= 3.141592653589793115997963468544185161590576171875
+  x5 >= -3.141592653589793115997963468544185161590576171875
+  x5 <= 3.141592653589793115997963468544185161590576171875
+  x6 >= 100.0
+  x6 <= 1200.0
+  x7 >= 0.0
+  x7 <= 1200.0
+  x3 >= 55947.6909999999988940544426441192626953125
+  x6 >= 1145.0
+  x7 <= 60.0
+  y0 >= 3.991125645861615112153231166303157806396484375
+  
+  y1: 0 -> 0: div RNE (sub RNE x5 (19791.0:t)) (60261.0:t),
+  div RNE (sub RNE x6 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x7 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x8 (650.0:t)) (1100.0:t),
+  div RNE (sub RNE x9 (600.0:t)) (1200.0:t)
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x3 >= 0.0
+  x3 <= 60760.0
+  x4 >= -3.141592653589793115997963468544185161590576171875
+  x4 <= 3.141592653589793115997963468544185161590576171875
+  x5 >= -3.141592653589793115997963468544185161590576171875
+  x5 <= 3.141592653589793115997963468544185161590576171875
+  x6 >= 100.0
+  x6 <= 1200.0
+  x7 >= 0.0
+  x7 <= 1200.0
+  x3 >= 55947.6909999999988940544426441192626953125
+  x6 >= 1145.0
+  x7 <= 60.0
+  y0 >= 3.991125645861615112153231166303157806396484375
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= -0.328421367053318091766556108268559910356998443603515625
+  x0 <= 0.67985927880386987087746319957659579813480377197265625
+  x1 >= -0.499999999999967081887319864108576439321041107177734375
+  x1 <= 0.499999999999967081887319864108576439321041107177734375
+  x2 >= -0.499999999999967081887319864108576439321041107177734375
+  x2 <= 0.499999999999967081887319864108576439321041107177734375
+  x3 >= -0.5
+  x3 <= 0.5
+  x4 >= -0.5
+  x4 <= 0.5
+  x0 >= -0.303529646039727207806890874053351581096649169921875
+  x0 <= -0.298551301837009008810497334707179106771945953369140625
+  x1 >= -0.0095492965855130916563719978285007528029382228851318359375
+  x1 <= 0.0095492965855130916563719978285007528029382228851318359375
+  x2 >= 0.493380323584843072382000173092819750308990478515625
+  x3 >= 0.299999999999999988897769753748434595763683319091796875
+  x4 >= 0.299999999999999988897769753748434595763683319091796875
+  +y0 -y1 <= 0
+  +y0 -y2 <= 0
+  +y0 -y3 <= 0
+  +y0 -y4 <= 0
+  
+  Goal run'vc: Unknown ()
+  Goal test'vc: Unknown ()
+  Goal P3: Unknown ()
+=======
 
   $ caisar verify --prover PyRAT --prover-altern VNNLIB --ltag=ProverSpec file.mlw 2>&1 | ./filter_tmpdir.sh
-  [ERROR]{TransformationsUtils} Not an input variable: div RNE
-                                                       (sub RNE x (19791.0:t))
-                                                       (60261.0:t)
-                                Unrecoverable error: please report the issue at
-                                https://git.frama-c.com/pub/caisar/issues
+  y: 0 -> 0: div RNE (sub RNE x (19791.0:t)) (60261.0:t),
+  div RNE (sub RNE x1 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x2 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x3 (650.0:t)) (1100.0:t),
+  div RNE (sub RNE x4 (600.0:t)) (1200.0:t)
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; X_5
+  (declare-const X_5 Real)
+  
+  ;; X_6
+  (declare-const X_6 Real)
+  
+  ;; X_7
+  (declare-const X_7 Real)
+  
+  ;; Requires
+  (assert (>= X_3 0.0))
+  (assert (<= X_3 60760.0))
+  (assert (>= X_4 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_4 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_5 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_5 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_6 100.0))
+  (assert (<= X_6 1200.0))
+  (assert (>= X_7 0.0))
+  (assert (<= X_7 1200.0))
+  
+  ;; Requires
+  (assert (>= X_3 55947.6909999999988940544426441192626953125))
+  (assert (>= X_6 1145.0))
+  (assert (<= X_7 60.0))
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Goal run'vc
+  (assert (>= Y_0 3.991125645861615112153231166303157806396484375))
+  
+  y1: 0 -> 0: div RNE (sub RNE x5 (19791.0:t)) (60261.0:t),
+  div RNE (sub RNE x6 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x7 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x8 (650.0:t)) (1100.0:t),
+  div RNE (sub RNE x9 (600.0:t)) (1200.0:t)
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; X_5
+  (declare-const X_5 Real)
+  
+  ;; X_6
+  (declare-const X_6 Real)
+  
+  ;; X_7
+  (declare-const X_7 Real)
+  
+  ;; Requires
+  (assert (>= X_3 0.0))
+  (assert (<= X_3 60760.0))
+  (assert (>= X_4 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_4 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_5 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_5 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_6 100.0))
+  (assert (<= X_6 1200.0))
+  (assert (>= X_7 0.0))
+  (assert (<= X_7 1200.0))
+  
+  ;; Requires
+  (assert (>= X_3 55947.6909999999988940544426441192626953125))
+  (assert (>= X_6 1145.0))
+  (assert (<= X_7 60.0))
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Goal test'vc
+  (assert (>= Y_0 3.991125645861615112153231166303157806396484375))
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; H
+  (assert (>= X_0 -0.328421367053318091766556108268559910356998443603515625))
+  
+  ;; H
+  (assert (<= X_0 0.67985927880386987087746319957659579813480377197265625))
+  
+  ;; H
+  (assert (>= X_1 -0.499999999999967081887319864108576439321041107177734375))
+  
+  ;; H
+  (assert (<= X_1 0.499999999999967081887319864108576439321041107177734375))
+  
+  ;; H
+  (assert (>= X_2 -0.499999999999967081887319864108576439321041107177734375))
+  
+  ;; H
+  (assert (<= X_2 0.499999999999967081887319864108576439321041107177734375))
+  
+  ;; H
+  (assert (>= X_3 -0.5))
+  
+  ;; H
+  (assert (<= X_3 0.5))
+  
+  ;; H
+  (assert (>= X_4 -0.5))
+  
+  ;; H
+  (assert (<= X_4 0.5))
+  
+  ;; H
+  (assert (>= X_0 -0.303529646039727207806890874053351581096649169921875))
+  
+  ;; H
+  (assert (<= X_0 -0.298551301837009008810497334707179106771945953369140625))
+  
+  ;; H
+  (assert (>= X_1 -0.0095492965855130916563719978285007528029382228851318359375))
+  
+  ;; H
+  (assert (<= X_1 0.0095492965855130916563719978285007528029382228851318359375))
+  
+  ;; H
+  (assert (>= X_2 0.493380323584843072382000173092819750308990478515625))
+  
+  ;; H
+  (assert (>= X_3 0.299999999999999988897769753748434595763683319091796875))
+  
+  ;; H
+  (assert (>= X_4 0.299999999999999988897769753748434595763683319091796875))
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Y_1
+  (declare-const Y_1 Real)
+  
+  ;; Y_2
+  (declare-const Y_2 Real)
+  
+  ;; Y_3
+  (declare-const Y_3 Real)
+  
+  ;; Y_4
+  (declare-const Y_4 Real)
+  
+  ;; Goal P3
+  (assert (<= Y_0 Y_1))
+  (assert (<= Y_0 Y_2))
+  (assert (<= Y_0 Y_3))
+  (assert (<= Y_0 Y_4))
+  
+  Goal run'vc: Unknown ()
+  Goal test'vc: Unknown ()
+  Goal P3: Unknown ()
 
   $ caisar verify --prover Marabou --ltag=ProverSpec file.mlw 2>&1 | ./filter_tmpdir.sh
-  [ERROR]{TransformationsUtils} Not an input variable: div RNE
-                                                       (sub RNE x (19791.0:t))
-                                                       (60261.0:t)
-                                Unrecoverable error: please report the issue at
-                                https://git.frama-c.com/pub/caisar/issues
+  y: 0 -> 0: div RNE (sub RNE x (19791.0:t)) (60261.0:t),
+  div RNE (sub RNE x1 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x2 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x3 (650.0:t)) (1100.0:t),
+  div RNE (sub RNE x4 (600.0:t)) (1200.0:t)
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x3 >= 0.0
+  x3 <= 60760.0
+  x4 >= -3.141592653589793115997963468544185161590576171875
+  x4 <= 3.141592653589793115997963468544185161590576171875
+  x5 >= -3.141592653589793115997963468544185161590576171875
+  x5 <= 3.141592653589793115997963468544185161590576171875
+  x6 >= 100.0
+  x6 <= 1200.0
+  x7 >= 0.0
+  x7 <= 1200.0
+  x3 >= 55947.6909999999988940544426441192626953125
+  x6 >= 1145.0
+  x7 <= 60.0
+  y0 >= 3.991125645861615112153231166303157806396484375
+  
+  y1: 0 -> 0: div RNE (sub RNE x5 (19791.0:t)) (60261.0:t),
+  div RNE (sub RNE x6 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x7 (0.0:t))
+  (6.2831853071800001231395071954466402530670166015625:t),
+  div RNE (sub RNE x8 (650.0:t)) (1100.0:t),
+  div RNE (sub RNE x9 (600.0:t)) (1200.0:t)
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x3 >= 0.0
+  x3 <= 60760.0
+  x4 >= -3.141592653589793115997963468544185161590576171875
+  x4 <= 3.141592653589793115997963468544185161590576171875
+  x5 >= -3.141592653589793115997963468544185161590576171875
+  x5 <= 3.141592653589793115997963468544185161590576171875
+  x6 >= 100.0
+  x6 <= 1200.0
+  x7 >= 0.0
+  x7 <= 1200.0
+  x3 >= 55947.6909999999988940544426441192626953125
+  x6 >= 1145.0
+  x7 <= 60.0
+  y0 >= 3.991125645861615112153231166303157806396484375
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= -0.328421367053318091766556108268559910356998443603515625
+  x0 <= 0.67985927880386987087746319957659579813480377197265625
+  x1 >= -0.499999999999967081887319864108576439321041107177734375
+  x1 <= 0.499999999999967081887319864108576439321041107177734375
+  x2 >= -0.499999999999967081887319864108576439321041107177734375
+  x2 <= 0.499999999999967081887319864108576439321041107177734375
+  x3 >= -0.5
+  x3 <= 0.5
+  x4 >= -0.5
+  x4 <= 0.5
+  x0 >= -0.303529646039727207806890874053351581096649169921875
+  x0 <= -0.298551301837009008810497334707179106771945953369140625
+  x1 >= -0.0095492965855130916563719978285007528029382228851318359375
+  x1 <= 0.0095492965855130916563719978285007528029382228851318359375
+  x2 >= 0.493380323584843072382000173092819750308990478515625
+  x3 >= 0.299999999999999988897769753748434595763683319091796875
+  x4 >= 0.299999999999999988897769753748434595763683319091796875
+  +y0 -y1 <= 0
+  +y0 -y2 <= 0
+  +y0 -y3 <= 0
+  +y0 -y4 <= 0
+  
+  Goal run'vc: Unknown ()
+  Goal test'vc: Unknown ()
+  Goal P3: Unknown ()
+>>>>>>> 7bb359e (Add new NIER AST)
-- 
GitLab