diff --git a/config/caisar-detection-data.conf b/config/caisar-detection-data.conf
index e3c57bcf181524d465dac2465ebbc600aaa1ca52..0355e67901ae35380c4757534e775b0ace248bbe 100644
--- a/config/caisar-detection-data.conf
+++ b/config/caisar-detection-data.conf
@@ -88,6 +88,18 @@ command = "%e -mp %{nnet-onnx} -pp %f --timeout %t --domain zono --split --score
 driver = "%{config}/drivers/pyrat.drv"
 use_at_auto_level = 1
 
+[ATP pyrat-denorm-acas]
+name = "PyRAT"
+alternative = "ACASd"
+exec = "pyrat.py"
+exec = "pyrat"
+version_switch = "--version"
+version_regexp = "PyRAT \\([0-9.]+\\)"
+version_ok  = "1.1"
+command = "%e -mp %{nnet-onnx} -pp %f --timeout %t --domain zono --split --scorer coef --initial"
+driver = "%{config}/drivers/pyrat.drv"
+use_at_auto_level = 1
+
 [ATP nnenum]
 name = "nnenum"
 exec = "nnenum.sh"
diff --git a/examples/acasxu/acasxu.why b/examples/acasxu/acasxu.why
index d1e34e099274df53bf936ceacda1d9bf9e66ba94..07fe0a760052abcbb1abd9cd2a7826d22e8d7d2c 100644
--- a/examples/acasxu/acasxu.why
+++ b/examples/acasxu/acasxu.why
@@ -19,42 +19,44 @@ theory ACASXU
 
   type input = vector t
 
-  constant distance_to_intruder: int = 0
-  constant angle_to_intruder: int = 1
-  constant intruder_heading: int = 2
-  constant speed: int = 3
-  constant intruder_speed: int = 4
+  let constant distance_to_intruder: int = 0
+  let constant angle_to_intruder: int = 1
+  let constant intruder_heading: int = 2
+  let constant speed: int = 3
+  let constant intruder_speed: int = 4
 
   (****************************************************************************)
   (* Meaningful names for actions acting as indices of output vectors. *)
 
   type action = int
 
-  constant clear_of_conflict: action = 0
-  constant weak_left: action = 1
-  constant weak_right: action = 2
-  constant strong_left: action = 3
-  constant strong_right: action = 4
+  let constant clear_of_conflict: action = 0
+  let constant weak_left: action = 1
+  let constant weak_right: action = 2
+  let constant strong_left: action = 3
+  let constant strong_right: action = 4
 
   (****************************************************************************)
   (* Model filename and format.
   
-     The [model_filename] and [is_onnx] constants are meant to be defined from
+     The [model_filename] and [is_onnx] let constants are meant to be defined from
      the command-line.
   *)
 
-  constant model_filename: string
-  constant nn: model nn = read_model model_filename
+  val constant model_filename: string
+  let constant nn: model nn = read_model model_filename
 
   (****************************************************************************)
   (* Utilities. *)
 
-  constant pi: t = 3.141592653589793115997963468544185161590576171875000
+  type output = vector t
 
-  function denormalize_t (i: t) (mean: t) (range: t) : t =
+  let constant pi: t = 3.141592653589793115997963468544185161590576171875000
+
+  let function denormalize_t (i: t) (mean: t) (range: t) : t =
     (i .* range) .+ mean
 
-  function denormalize_by_index (idx: int) (t: t) : t =
+  let function denormalize_by_index (idx: int) (t: t) : t =
     if idx = distance_to_intruder then denormalize_t t (19791.0:t) (60261.0:t)
     else if idx = angle_to_intruder then denormalize_t t (0.0:t) (6.2831853071800001231395071954466402530670166015625:t)
     else if idx = intruder_heading then denormalize_t t (0.0:t) (6.2831853071800001231395071954466402530670166015625:t)
@@ -62,12 +64,29 @@ theory ACASXU
     else if idx = intruder_speed then denormalize_t t (600.0:t) (1200.0:t)
     else t
 
-  function denormalize_input (i:input) : input =
+  let function denormalize_input (i: input) : input =
     Vector.mapi i denormalize_by_index
 
-  function denormalize_output (o: t) : t =
+  let function denormalize_output_t (o: t) : t =
     denormalize_t o (7.51888402010059753166615337249822914600372314453125:t) (373.9499200000000200816430151462554931640625:t)
 
+  let function normalize_t (i: t) (mean: t) (range: t) : t =
+    (i .- mean) ./ range
+
+  let function normalize_by_index (idx: int) (t: t) : t =
+    if idx = distance_to_intruder then normalize_t t (19791.0:t) (60261.0:t)
+    else if idx = angle_to_intruder then normalize_t t (0.0:t) (6.2831853071800001231395071954466402530670166015625:t)
+    else if idx = intruder_heading then normalize_t t (0.0:t) (6.2831853071800001231395071954466402530670166015625:t)
+    else if idx = speed then normalize_t t (650.0:t) (1100.0:t)
+    else if idx = intruder_speed then normalize_t t (600.0:t) (1200.0:t)
+    else t
+
+  let function normalize_input (i: input) : input =
+    Vector.mapi i normalize_by_index
+
+  let function normalize_output_t (o: t) : t =
+    normalize_t o (7.51888402010059753166615337249822914600372314453125:t) (373.9499200000000200816430151462554931640625:t)
+
   (****************************************************************************)
   (* Common predicates. *)  
 
@@ -81,15 +100,13 @@ theory ACASXU
   predicate valid_action (a: action) =
     clear_of_conflict <= a <= strong_right
 
-  predicate advises_min_score (nn: model nn) (i: input) (a: action) =
+  predicate advises_min_score (o: output) (a: action) =
     valid_action a ->
-    forall b: action.
-      valid_action b ->  a <> b -> (nn @@ i)[a] .<= (nn @@ i)[b]
+    forall b: action. valid_action b ->  a <> b -> o[a] .<= o[b]
 
-  predicate advises_max_score (nn: model nn) (i: input) (a: action) =
+  predicate advises_max_score (o: output) (a: action) =
     valid_action a ->
-    forall b: action.
-      valid_action b ->  a <> b -> (nn @@ i)[a] .>= (nn @@ i)[b]
+    forall b: action. valid_action b ->  a <> b -> o[a] .>= o[b]
 
   (****************************************************************************)
   (* Property 1.
@@ -109,7 +126,16 @@ theory ACASXU
       let j = denormalize_input i in
       valid_input j /\ intruder_distant_and_slow j ->
       let o = (nn @@ i)[clear_of_conflict] in
-      (denormalize_output o) .<= (1500.0:t)
+      (denormalize_output_t o) .<= (1500.0:t)
+
+  let runP1 (i: input) : t
+    requires { has_length i 5 }
+    requires { valid_input i }
+    requires { intruder_distant_and_slow i }
+    ensures { result .<= (1500.0:t) }  =
+      let j = normalize_input i in
+      let o = (nn @@ j)[clear_of_conflict] in
+      (denormalize_output_t o)
 
   (****************************************************************************)
   (* Property 2.
@@ -123,7 +149,14 @@ theory ACASXU
       has_length i 5 ->
       let j = denormalize_input i in
       valid_input j /\ intruder_distant_and_slow j ->
-      not (advises_max_score nn i clear_of_conflict)
+      not (advises_max_score (nn @@ i) clear_of_conflict)
+
+  let runP2 (i: input) : output
+    requires { has_length i 5 }
+    requires { valid_input i /\ intruder_distant_and_slow i }
+    ensures { not (advises_max_score result clear_of_conflict) } =
+      let j = normalize_input i in
+      nn @@ j
 
   (****************************************************************************)
   (* Property 3.
@@ -146,7 +179,14 @@ theory ACASXU
       has_length i 5 ->
       let j = denormalize_input i in
       valid_input j /\ directly_ahead j /\ moving_towards j ->
-      not (advises_min_score nn i clear_of_conflict)
+      not (advises_min_score (nn @@ i) clear_of_conflict)
+
+  let runP3 (i: input) : output
+    requires { has_length i 5 }
+    requires { valid_input i /\ directly_ahead i /\ moving_towards i }
+    ensures { not (advises_min_score result clear_of_conflict) } =
+      let j = normalize_input i in
+      nn @@ j
 
   (****************************************************************************)
   (* Property 4.
@@ -157,7 +197,7 @@ theory ACASXU
   *)
 
   predicate moving_away (i: input) =
-    i[intruder_heading] .= (0.0:t)
+    i[intruder_heading] .>= (0.0:t) /\ i[intruder_heading] .<= (0.0:t)
     /\ i[speed] .>= (1000.0:t)
     /\ (700.0:t) .<= i[intruder_speed] .<= (800.0:t)
 
@@ -166,7 +206,14 @@ theory ACASXU
       has_length i 5 ->
       let j = denormalize_input i in
       valid_input j /\ directly_ahead j /\ moving_away j ->
-      not (advises_min_score nn i clear_of_conflict)
+      not (advises_min_score (nn @@ i) clear_of_conflict)
+
+  let runP4 (i: input) : output
+    requires { has_length i 5 }
+    requires { valid_input i /\ directly_ahead i /\ moving_away i }
+    ensures { not (advises_min_score result clear_of_conflict) } =
+      let j = normalize_input i in
+      nn @@ j
 
   (****************************************************************************)
   (* Property 5.
@@ -187,7 +234,14 @@ theory ACASXU
       has_length i 5 ->
       let j = denormalize_input i in
       valid_input j /\ near_and_approaching_from_left j ->
-      advises_min_score nn i strong_right
+      advises_min_score (nn @@ i) strong_right
+
+  let runP5 (i: input) : output
+    requires { has_length i 5 }
+    requires { valid_input i /\ near_and_approaching_from_left i }
+    ensures { advises_min_score result strong_right } =
+      let j = normalize_input i in
+      nn @@ j
 
   (****************************************************************************)
   (* Property 6.
@@ -209,7 +263,14 @@ theory ACASXU
       has_length i 5 ->
       let j = denormalize_input i in
       valid_input j /\ intruder_far_away j ->
-      advises_min_score nn i clear_of_conflict
+      advises_min_score (nn @@ i) clear_of_conflict
+
+  let runP6 (i: input) : output
+    requires { has_length i 5 }
+    requires { valid_input i /\ intruder_far_away i }
+    ensures { advises_min_score result clear_of_conflict } =
+      let j = normalize_input i in
+      nn @@ j
 
   (****************************************************************************)
   (* Property 7.
@@ -230,8 +291,15 @@ theory ACASXU
       has_length i 5 ->
       let j = denormalize_input i in
       valid_input j /\ large_vertical_separation j ->
-      not (advises_min_score nn i strong_left)
-      /\ not (advises_min_score nn i strong_right)
+      not (advises_min_score (nn @@ i) strong_left)
+      /\ not (advises_min_score (nn @@ i) strong_right)
+
+  let runP7 (i: input) : output
+    requires { has_length i 5 }
+    requires { valid_input i /\ large_vertical_separation i }
+    ensures { not (advises_min_score result strong_left) /\ not (advises_min_score result strong_right) } =
+      let j = normalize_input i in
+      nn @@ j
 
   (****************************************************************************)
   (* Property 8.
@@ -253,8 +321,15 @@ theory ACASXU
       has_length i 5 ->
       let j = denormalize_input i in
       valid_input j /\ large_vertical_separation_and_previous_weak_left j ->
-      (advises_min_score nn i weak_left)
-      \/ (advises_min_score nn i clear_of_conflict)
+      (advises_min_score (nn @@ i) weak_left)
+      \/ (advises_min_score (nn @@ i) clear_of_conflict)
+
+  let runP8 (i: input) : output
+    requires { has_length i 5 }
+    requires { valid_input i /\ large_vertical_separation_and_previous_weak_left i }
+    ensures { (advises_min_score result weak_left) \/ (advises_min_score result clear_of_conflict) } =
+      let j = normalize_input i in
+      nn @@ j
 
   (****************************************************************************)
   (* Property 9.
@@ -276,7 +351,14 @@ theory ACASXU
       has_length i 5 ->
       let j = denormalize_input i in
       valid_input j /\ previous_weak_right_and_nearby_intruder j ->
-      advises_min_score nn i strong_left
+      advises_min_score (nn @@ i) strong_left
+
+  let runP9 (i: input) : output
+    requires { has_length i 5 }
+    requires { valid_input i /\ previous_weak_right_and_nearby_intruder i }
+    ensures { (advises_min_score result strong_left) } =
+      let j = normalize_input i in
+      nn @@ j
 
   (****************************************************************************)
   (* Property 10.
@@ -296,6 +378,13 @@ theory ACASXU
       has_length i 5 ->
       let j = denormalize_input i in
       valid_input j /\ intruder_far_away_2 j ->
-      advises_min_score nn i clear_of_conflict
+      advises_min_score (nn @@ i) clear_of_conflict
+
+  let runP10 (i: input) : output
+    requires { has_length i 5 }
+    requires { valid_input i /\ intruder_far_away_2 i }
+    ensures { (advises_min_score result clear_of_conflict) } =
+      let j = normalize_input i in
+      nn @@ j
 
 end
diff --git a/flake.nix b/flake.nix
index dadbee743ff35b949aee6d08d667cd2cecd9ca2f..64f7be9a6eb472cf3c38f9dc9afe0191776ce14c 100644
--- a/flake.nix
+++ b/flake.nix
@@ -25,6 +25,7 @@
             "dune-project"
             "Makefile"
             (nix-filter.lib.inDirectory "src")
+            (nix-filter.lib.inDirectory "examples")
             (nix-filter.lib.inDirectory "lib")
             (nix-filter.lib.inDirectory "tests")
             (nix-filter.lib.inDirectory "config")
diff --git a/lib/ir/dune b/lib/ir/dune
index 4b24a3c65ed0361e03b848784b354903c0435023..14a9bd69afbf4d9e5a36e6dc4b88b4ef669ddcc7 100644
--- a/lib/ir/dune
+++ b/lib/ir/dune
@@ -11,7 +11,7 @@
    ppx_deriving.iter
    ppx_deriving.fold))
  (inline_tests)
- (libraries base ocamlgraph fmt stdio caisar_logging))
+ (libraries base ocamlgraph fmt stdio unix caisar_logging))
 
 (env
  (dev
diff --git a/lib/ir/nier_cfg.mli b/lib/ir/nier_cfg.mli
index 53fc17c6aade09de87416cd096b3017f4870217e..acacd98ac6781091b86950638de57e4d9f8b1bc1 100644
--- a/lib/ir/nier_cfg.mli
+++ b/lib/ir/nier_cfg.mli
@@ -100,7 +100,7 @@ module Node : sig
     | Gather
     | ReduceSum
     | GatherND
-    | RandomNormal 
+    | RandomNormal
     | Abs
     | Log
 
diff --git a/lib/ir/nier_simple.ml b/lib/ir/nier_simple.ml
index 472fb3859452f28291cd02e4ce8753248e985b14..c179160172e25e44e97ef583e3918a98324f7aff 100644
--- a/lib/ir/nier_simple.ml
+++ b/lib/ir/nier_simple.ml
@@ -108,6 +108,13 @@ module GenTensor = struct
 
   let create_1_float f = Float (Tensor.create_1_float f)
   let create_1_int64 i = Int64 (Tensor.create_1_int64 i)
+
+  let of_int_array a =
+    Int64
+      (Tensor.of_array1
+         (Shape.of_array [| Array.length a |])
+         (Bigarray.Array1.of_array Int64 C_layout (Array.map a ~f:Int64.of_int)))
+
   let shape = function Float f -> Tensor.shape f | Int64 i -> Tensor.shape i
 end
 
@@ -160,7 +167,6 @@ type descr =
   | Reshape of {
       input : node;
       shape : node; (* data int64 *)
-      allowzero : int;
     }
   | Flatten of {
       input : node;
@@ -203,13 +209,20 @@ and node = {
   id : int;
   descr : descr;
   shape : Shape.t;
+  ty : ty;
 }
 
+and ty =
+  | Float
+  | Int64
+
 let pp_descr fmt p =
   match p with
   | Input { shape } -> Fmt.pf fmt "Input: %a" Shape.pp shape
   | Transpose { perm; _ } ->
     Fmt.pf fmt "Transpose: [%a]" Fmt.(list ~sep:semi int) perm
+  | Constant { data = Int64 b } when Shape.size (Tensor.shape b) < 3 ->
+    Fmt.pf fmt "Constant[%a]" Fmt.(list ~sep:comma int64) (Tensor.flatten b)
   | Constant _ -> Fmt.pf fmt "Constant"
   | Add _ -> Fmt.pf fmt "Add"
   | Sub _ -> Fmt.pf fmt "Sub"
@@ -226,7 +239,7 @@ let pp_descr fmt p =
   | Flatten _ -> Fmt.pf fmt "Flatten"
   | Identity _ -> Fmt.pf fmt "Identity"
   | RW_Linearized_ReLu -> Fmt.pf fmt "RW_Linearized_ReLu"
-  | Concat _ -> Fmt.pf fmt "Concat"
+  | Concat { axis; _ } -> Fmt.pf fmt "Concat[%i]" axis
   | Gather _ -> Fmt.pf fmt "Gather"
   | ReduceSum _ -> Fmt.pf fmt "ReduceSum"
   | GatherND _ -> Fmt.pf fmt "GatherND"
@@ -357,19 +370,133 @@ module Node = struct
         (check_matmul_size_ab
            ~a_sh:(Shape.to_list (compute_shape input1))
            ~b_sh:(Shape.to_list (compute_shape input2)))
-    | n -> failwith (Fmt.str "todo compute shape : %a" pp_descr n)
+    | Reshape { input; shape; _ } ->
+      let shape =
+        match shape.descr with
+        | Constant { data = Int64 a } ->
+          List.map ~f:Int64.to_int_exn (Tensor.flatten a)
+        | _ ->
+          (* Some constant propagation could be useful in some cases eg. patch-1
+             VNNcomp *)
+          failwith "non-constant shape in reshape not supported"
+      in
+      List.iter shape ~f:(function
+        | -1 | 0 -> failwith "not implemented 0 -1 in shape for reshape"
+        | _ -> ());
+      let out = Shape.of_list shape in
+      if Shape.size out <> Shape.size input.shape
+      then
+        failwith
+          "Reshape: shape of input and shape given have not the same number of \
+           elements";
+      out
+    | Gemm { inputA; inputB; inputC = _; alpha = _; beta = _; transA; transB }
+      ->
+      let rank2 i =
+        match Shape.to_array_unsafe i.shape with
+        | [| k; n |] -> (k, n)
+        | _ -> failwith "Gemm input must be of size 2"
+      in
+      let tr trans (k, n) = if trans then (n, k) else (k, n) in
+      let a1, a2 = tr transA @@ rank2 inputA in
+      let b1, b2 = tr transB @@ rank2 inputB in
+      if not (Int.equal a2 b1)
+      then
+        Fmt.failwith "Gemm (M:%i,K:%i) (K:%i,N:%i) -> (M:%i,N:%i)" a1 a2 b1 b2
+          a1 b2;
+      Shape.of_array [| a1; b2 |]
+    | ( LogSoftmax | Squeeze _ | MaxPool | Conv | Identity _
+      | RW_Linearized_ReLu | ReduceSum _ | GatherND _ | RandomNormal _ | Abs _
+      | Log _ ) as n ->
+      failwith (Fmt.str "todo compute shape : %a" pp_descr n)
+
+  let compute_ty : _ -> ty = function
+    | Constant { data = Float _ } -> Float
+    | Constant { data = Int64 _ } -> Int64
+    | _ -> Float
 
   let create =
     let c = ref (-1) in
     fun descr ->
       Int.incr c;
-      { id = !c; descr; shape = compute_shape_descr descr }
+      {
+        id = !c;
+        descr;
+        shape = compute_shape_descr descr;
+        ty = compute_ty descr;
+      }
+
+  let constant_int_array a =
+    create (Constant { data = GenTensor.of_int_array a })
+
+  let reshape shape node =
+    if Shape.equal node.shape shape
+    then node
+    else
+      create
+        (Reshape
+           { input = node; shape = constant_int_array (Shape.to_array shape) })
 
-  let gather_int input i =
-    let indices =
-      create (Constant { data = GenTensor.create_1_int64 (Int64.of_int i) })
+  let gather_int_as_matmul input i =
+    let input1 =
+      reshape (Shape.of_array [| 1; Shape.size input.shape |]) input
+    in
+    let selector = Array.create ~len:(Shape.size input1.shape) Float.zero in
+    Array.set selector i Float.one;
+    let selector =
+      GenTensor.Float
+        (Tensor.of_array1
+           (Shape.of_array [| Array.length selector; 1 |])
+           (Bigarray.Array1.of_array Float64 C_layout selector))
     in
-    create (Gather { input; indices; axis = 0 })
+    let input2 = create (Constant { data = selector }) in
+    let result = create (Matmul { input1; input2 }) in
+    reshape (Shape.of_array [| 1 |]) result
+
+  let gather_int ?(encode = true) input i =
+    if encode
+    then gather_int_as_matmul input i
+    else
+      let indices =
+        create (Constant { data = GenTensor.create_1_int64 (Int64.of_int i) })
+      in
+      create (Gather { input; indices; axis = 0 })
+
+  let mul_float input f =
+    let input1 = reshape (Shape.of_array [| 1; 1 |]) input in
+    let f = Array.create ~len:1 f in
+    let f =
+      GenTensor.Float
+        (Tensor.of_array1
+           (Shape.of_array [| Array.length f; 1 |])
+           (Bigarray.Array1.of_array Float64 C_layout f))
+    in
+    let input2 = create (Constant { data = f }) in
+    let result = create (Matmul { input1; input2 }) in
+    reshape (Shape.of_array [| 1 |]) result
+
+  let div_float ?(encode = true) input f =
+    if encode
+    then
+      let f = Float.one /. f in
+      mul_float input f
+    else
+      let input1 = reshape (Shape.of_array [| 1; 1 |]) input in
+      let f = Array.create ~len:1 f in
+      let f =
+        GenTensor.Float
+          (Tensor.of_array1
+             (Shape.of_array [| Array.length f; 1 |])
+             (Bigarray.Array1.of_array Float64 C_layout f))
+      in
+      let input2 = create (Constant { data = f }) in
+      let result = create (Div { input1; input2 }) in
+      reshape (Shape.of_array [| 1 |]) result
+
+  let concat_0 = function
+    | [ n ] -> n
+    | [] -> failwith "empty concat"
+    | inputs -> create (Concat { inputs; axis = 0 })
 
   let preds node =
     match node.descr with
@@ -393,7 +520,7 @@ module Node = struct
     | Gemm { inputA; inputB; inputC = Some x; _ } -> [ inputA; inputB; x ]
     | Gemm { inputA; inputB; inputC = None; _ } -> [ inputA; inputB ]
     | Squeeze { data; _ } -> [ data ]
-    | Reshape { input; _ } -> [ input ]
+    | Reshape { input; shape; _ } -> [ input; shape ]
     | LogSoftmax | MaxPool | Conv | RW_Linearized_ReLu -> []
 
   let map f n =
@@ -497,9 +624,17 @@ let pp fmt t = iter_vertex (fun v -> Fmt.pf fmt "@[%a@]@ " pp_descr v.descr) t
 
 let pp_debug fmt t =
   iter_vertex
-    (fun v -> Fmt.pf fmt "@[%a : %a@]@ " pp_descr v.descr Shape.pp v.shape)
+    (fun v ->
+      Fmt.pf fmt "@[%i: %a(%a) : %a@]@ " v.id pp_descr v.descr
+        Fmt.(list ~sep:comma (using (fun x -> x.id) int))
+        (Node.preds v) Shape.pp v.shape)
     t
 
+let nodes t =
+  let l = ref [] in
+  iter_vertex (fun v -> l := v :: !l) t;
+  !l
+
 module M = Graph.Topological.Make
 
 module GFloat = struct
@@ -544,3 +679,17 @@ module Dot = Graph.Graphviz.Dot (struct
   let default_vertex_attributes _ = []
   let graph_attributes _ = []
 end)
+
+let grapheasy g =
+  try
+    let cin, cout =
+      Unix.open_process_args "graph-easy"
+        [| "graph-easy"; "--from=graphviz"; "--as=boxart" |]
+    in
+    Dot.output_graph cout g;
+    Stdlib.close_out cout;
+    let ascii = Stdio.In_channel.input_all cin in
+    ignore (Unix.close_process (cin, cout));
+    ascii
+  with exn ->
+    Fmt.str "Error graph-easy call: %s" (Stdlib.Printexc.to_string exn)
diff --git a/lib/ir/nier_simple.mli b/lib/ir/nier_simple.mli
index 30792f91d1ca39bf3385cf6903b6cab91ad0c8ab..53d8888f06fc162a00b555d53c04daf126f05e50 100644
--- a/lib/ir/nier_simple.mli
+++ b/lib/ir/nier_simple.mli
@@ -28,7 +28,7 @@ module Tensor : sig
   val of_array1 :
     Shape.t -> ('a, 'b, Bigarray.c_layout) Bigarray.Array1.t -> ('a, 'b) t
 
-  val get: ('a, 'b) t -> int array -> 'a
+  val get : ('a, 'b) t -> int array -> 'a
 end
 
 module GenTensor : sig
@@ -39,6 +39,7 @@ module GenTensor : sig
   val create_1_float : float -> t
   val create_1_int64 : int64 -> t
   val shape : t -> Shape.t
+  val of_int_array : int array -> t
 end
 
 type descr =
@@ -89,7 +90,6 @@ type descr =
   | Reshape of {
       input : node;
       shape : node; (* int64 *)
-      allowzero : int;
     }
   | Flatten of {
       input : node;
@@ -129,49 +129,76 @@ type descr =
   | Log of { input : node }
 
 and node = private {
-  id : int;
+  id : int; (* unique identifier *)
   descr : descr;
   shape : Shape.t;
+  ty : ty;
 }
 
+and ty =
+  | Float
+  | Int64
+
 module Node : sig
   type t = node [@@deriving show]
 
-  val equal : node -> node -> bool
+  val equal : t -> t -> bool
 
   include Base.Hashtbl.Key.S with type t := t
   include Base.Comparator.S with type t := t
 
-  val create : descr -> node
-  val gather_int : node -> int -> node
+  val create : descr -> t
+
+  val gather_int : ?encode:bool -> t -> int -> t
+  (** create a node by selection at a given index. *)
+  (* Implemented via a [Matmul] if [encode] (true by default).
+
+     TODO: [encode] should be not be a parameter, rather depend on prover. *)
+
+  val mul_float : t -> float -> t
+  (* Implemented via a [Matmul]. *)
+
+  val div_float : ?encode:bool -> t -> float -> t
+  (* Implemented via a [Matmul] if [encode] (true by default).
 
-  val map : (node -> node) -> node -> node
+     TODO: [encode] should be not be a parameter, rather depend on prover. *)
+
+  val constant_int_array : int array -> t
+  (** create a node for a constant array *)
+
+  val reshape : Shape.t -> t -> t
+  (** create if necessary a reshape node *)
+
+  val concat_0 : t list -> t
+  (** create if necessary a concat node for the first axis *)
+
+  val map : (t -> t) -> t -> t
   (** [map f n] replace the direct inputs [i] of n by [f i] *)
 
-  val map_rec : (node -> node) -> node -> node
+  val map_rec : (t -> t) -> t -> t
   (** [map_rec f n] replace top-bottom the nodes [i] accessible from [n] by
       [f i] *)
 
-  val replace_input : (unit -> node) -> node -> node
+  val replace_input : (unit -> t) -> t -> t
   (** [replace_input f n] replace the input in [n] by [f ()] *)
 
-  val preds : node -> node list
+  val preds : t -> t list
   (** Direct predecessors of a node *)
 
   val iter_rec : (t -> unit) -> t -> unit
   (** Iterate on the predecessors of a node and itself. Repect topological
       order. *)
 
-  val compute_shape : node -> Shape.t
+  val compute_shape : t -> Shape.t
 end
 
 type t
 
-val pp: t Fmt.t
-val pp_debug: t Fmt.t
+val pp : t Fmt.t
+val pp_debug : t Fmt.t
 
 val create : node -> t
-(** Create a network from its output node *)
+(** Create a network from its output node, it must have only one input *)
 
 val output : t -> node
 (** Output node of the network *)
@@ -184,6 +211,7 @@ val succs : t -> node -> node list
 
 val iter_vertex : (Node.t -> unit) -> t -> unit
 val iter_succ : (Node.t -> unit) -> t -> Node.t -> unit
+val nodes : t -> Node.t list
 
 (** Respect some OcamlGraph signature *)
 module GFloat : sig
@@ -217,3 +245,6 @@ module Dot : sig
   val fprint_graph : Format.formatter -> GFloat.t -> unit
   val output_graph : out_channel -> GFloat.t -> unit
 end
+
+val grapheasy: t -> string
+(** Return an ascii representation of the graph using "graph-easy" external command*)
\ No newline at end of file
diff --git a/lib/onnx/onnx.ml b/lib/onnx/onnx.ml
index 2db87cdfdf62683b5d5c8113ce0ae9d23b752394..a647157b3620cccba1e603083b080d20b190be01 100644
--- a/lib/onnx/onnx.ml
+++ b/lib/onnx/onnx.ml
@@ -691,4 +691,4 @@ let write nier filename =
   write_nier_to_onnx nier out_chan;
   Stdlib.close_out out_chan
 
-module Simple = Simple
\ No newline at end of file
+module Simple = Simple
diff --git a/lib/onnx/onnx.mli b/lib/onnx/onnx.mli
index a618adb0e859b0a456f0e8298bf671a96fa5aecc..d7e52c2872088defca6c63cf1ffc5ed3bb566788 100644
--- a/lib/onnx/onnx.mli
+++ b/lib/onnx/onnx.mli
@@ -35,4 +35,4 @@ val parse : string -> (t, string) Result.t
 val write : G.t -> string -> unit
 (** Write a NIER into an ONNX file. *)
 
-module Simple = Simple
\ No newline at end of file
+module Simple = Simple
diff --git a/lib/onnx/simple.ml b/lib/onnx/simple.ml
index 5841e0a7479685187d9c94f7a65286af5ea82c75..268ac5469fe49e3a76f6206fdd669f2cc5683dc7 100644
--- a/lib/onnx/simple.ml
+++ b/lib/onnx/simple.ml
@@ -194,37 +194,50 @@ end = struct
             (module String)
             (List.map ~f:(fun a -> (Option.value_exn a.name, a)) n.attribute)
         in
-        let get_float name : float =
-          match Hashtbl.find_exn attrs name with
-          | { type' = Some AttributeProto.AttributeType.FLOAT; f = Some f; _ }
-            ->
-            f
-          | _ -> failwith "Attribute wrongly typed"
+        let get_attr ?default name m =
+          match Hashtbl.find attrs name with
+          | Some v -> m v
+          | None -> (
+            match default with
+            | Some v -> v
+            | None -> Fmt.failwith "Required attribute %s missing" name)
         in
-        let get_int name : int =
-          match Hashtbl.find_exn attrs name with
-          | { type' = Some AttributeProto.AttributeType.INT; i = Some i; _ } ->
-            Int64.to_int_exn i
-          | _ -> failwith "Attribute wrongly typed"
+        let get_float ?default name : float =
+          get_attr ?default name (function
+            | { type' = Some AttributeProto.AttributeType.FLOAT; f = Some f; _ }
+              ->
+              f
+            | _ -> failwith "Attribute wrongly typed")
         in
-        let get_ints name : int list =
-          match Hashtbl.find_exn attrs name with
-          | { type' = Some AttributeProto.AttributeType.INTS; ints = l; _ } ->
-            List.map ~f:Int64.to_int_exn l
-          | _ -> failwith "Attribute wrongly typed"
+        let get_int ?default name : int =
+          get_attr ?default name (function
+            | { type' = Some AttributeProto.AttributeType.INT; i = Some i; _ }
+              ->
+              Int64.to_int_exn i
+            | _ -> failwith "Attribute wrongly typed")
         in
-        let get_bool name : bool =
-          match Hashtbl.find_exn attrs name with
-          | { type' = Some AttributeProto.AttributeType.INT; i = Some i; _ } ->
-            not (Int64.equal i 0L)
-          | _ -> failwith "Attribute wrongly typed"
+        let get_ints ?default name : int list =
+          get_attr ?default name (function
+            | { type' = Some AttributeProto.AttributeType.INTS; ints = l; _ } ->
+              List.map ~f:Int64.to_int_exn l
+            | _ -> failwith "Attribute wrongly typed")
         in
-        let get_tensor name : Ir.Nier_simple.GenTensor.t =
-          match Hashtbl.find_exn attrs name with
-          | { type' = Some AttributeProto.AttributeType.TENSOR; t = Some t; _ }
-            ->
-            convert_tensor t
-          | _ -> failwith "Attribute wrongly typed"
+        let get_bool ?default name : bool =
+          get_attr ?default name (function
+            | { type' = Some AttributeProto.AttributeType.INT; i = Some i; _ }
+              ->
+              not (Int64.equal i 0L)
+            | _ -> failwith "Attribute wrongly typed")
+        in
+        let get_tensor ?default name : Ir.Nier_simple.GenTensor.t =
+          get_attr ?default name (function
+            | {
+                type' = Some AttributeProto.AttributeType.TENSOR;
+                t = Some t;
+                _;
+              } ->
+              convert_tensor t
+            | _ -> failwith "Attribute wrongly typed")
         in
         let n' =
           match n.op_type with
@@ -266,10 +279,10 @@ end = struct
                   inputA = convert inputA;
                   inputB = convert inputB;
                   inputC = Option.map ~f:convert inputC;
-                  alpha = get_float "alpha";
-                  beta = get_float "beta";
-                  transA = get_bool "transA";
-                  transB = get_bool "transB";
+                  alpha = get_float ~default:1.0 "alpha";
+                  beta = get_float ~default:1.0 "beta";
+                  transA = get_bool ~default:false "transA";
+                  transB = get_bool ~default:false "transB";
                 }
             | "LogSoftmax" -> Ir.Nier_simple.LogSoftmax
             | "Transpose" ->
@@ -355,17 +368,20 @@ let parse filename =
     ~finally:(fun () -> Stdlib.close_in in_channel)
     (fun () -> parse_in_channel in_channel)
 
-let value_info_from_tensor_shape ~name ~shape =
+let value_info_from_tensor_shape ~name (n : Ir.Nier_simple.Node.t) =
   let open Oproto.Onnx in
   let dim =
-    List.map (Ir.Nier_simple.Shape.to_list shape) ~f:(fun i ->
+    List.map (Ir.Nier_simple.Shape.to_list n.shape) ~f:(fun i ->
       TensorShapeProto.Dimension.make ~value:(`Dim_value (Int64.of_int i)) ())
   in
   let shape = TensorShapeProto.make ~dim () in
+  let ty : TensorProto.DataType.t =
+    match n.ty with Float -> FLOAT | Int64 -> INT64
+  in
   let value =
     `Tensor_type
       (TypeProto.Tensor.make
-         ~elem_type:AttributeProto.AttributeType.(to_int FLOAT)
+         ~elem_type:TensorProto.DataType.(to_int ty)
          ~shape ())
   in
   let type' = TypeProto.make ~value () in
@@ -388,12 +404,12 @@ let convert_into_tensor ?name (t : Ir.Nier_simple.GenTensor.t) =
 let default_opset_import =
   let open Oproto.Onnx in
   let onnx_domain = "" in
-  OperatorSetIdProto.make ~domain:onnx_domain ~version:8L ()
+  OperatorSetIdProto.make ~domain:onnx_domain ~version:13L ()
 
 let nier_simple_to_onnx_protoc (nier_simple : Ir.Nier_simple.GFloat.t) =
   let open Oproto.Onnx in
   let get_name (v : Ir.Nier_simple.Node.t) = Int.to_string v.id in
-  let protocs, (input, input_shape) =
+  let protocs, input =
     let acc = Queue.create () in
     let g_input = ref None in
     let vertex_to_protoc (v : Ir.Nier_simple.Node.t) =
@@ -417,10 +433,10 @@ let nier_simple_to_onnx_protoc (nier_simple : Ir.Nier_simple.GFloat.t) =
       let mk_tensor name t = AttributeProto.make ~name ~type':TENSOR ~t () in
       match v.descr with
       | Gemm _ | LogSoftmax | Transpose _ | Squeeze _ | MaxPool | Conv
-      | Reshape _ | Identity _ | RW_Linearized_ReLu | GatherND _ | ReduceSum _
-        ->
-        assert false
+      | Identity _ | RW_Linearized_ReLu | GatherND _ | ReduceSum _ ->
+        failwith (Fmt.str "Not implemented export: %a" Ir.Nier_simple.Node.pp v)
       | Flatten { axis; _ } -> make "Flatten" [ mk_int "axis" axis ]
+      | Reshape _ -> make "Reshape" []
       | Constant { data } ->
         let data = convert_into_tensor data in
         make "Constant" [ mk_tensor "value" data ]
@@ -430,7 +446,7 @@ let nier_simple_to_onnx_protoc (nier_simple : Ir.Nier_simple.GFloat.t) =
       | Div _ -> make "Div" []
       | Matmul _ -> make "MatMul" []
       | ReLu _ -> make "Relu" []
-      | Input { shape } -> g_input := Some (v, shape)
+      | Input _ -> g_input := Some v
       | Concat { axis; _ } -> make "Concat" [ mk_int "axis" axis ]
       | Gather { axis; _ } -> make "Gather" [ mk_int "axis" axis ]
       | Abs _ -> make "Abs" []
@@ -452,20 +468,19 @@ let nier_simple_to_onnx_protoc (nier_simple : Ir.Nier_simple.GFloat.t) =
     "This ONNX model was generated from the Neural Intermediate Representation \
      of CAISAR"
   in
-  let input =
-    [ value_info_from_tensor_shape ~name:(get_name input) ~shape:input_shape ]
-  in
+  let input = [ value_info_from_tensor_shape ~name:(get_name input) input ] in
   let output =
     let output = Ir.Nier_simple.output nier_simple in
-    [
-      value_info_from_tensor_shape ~name:(get_name output)
-        ~shape:(Ir.Nier_simple.Node.compute_shape output);
-    ]
+    [ value_info_from_tensor_shape ~name:(get_name output) output ]
+  in
+  let value_info =
+    List.map (Ir.Nier_simple.nodes nier_simple) ~f:(fun v ->
+      value_info_from_tensor_shape ~name:(get_name v) v)
   in
   let protog =
     GraphProto.make ~name:"ONNX CAISAR Export" ~node:protocs ~initializer':[]
       ~sparse_initializer:[] ~doc_string:"ONNX graph generated from CAISAR NIER"
-      ~input ~output ~value_info:[] ~quantization_annotation:[] ()
+      ~input ~output ~value_info ~quantization_annotation:[] ()
   in
   let protom =
     Oproto.Onnx.ModelProto.make ~ir_version:8L
diff --git a/lib/onnx/tests/dune b/lib/onnx/tests/dune
index 2478d04a4424f2848b7966b67de3ef7f153b89cc..14925791b345e145bcc6b6891ad56d890e9a2606 100644
--- a/lib/onnx/tests/dune
+++ b/lib/onnx/tests/dune
@@ -1,6 +1,6 @@
-
-(tests (names print)
+(tests
+ (names print)
  (libraries caisar.onnx caisar.ir unix)
- (deps ../../../examples/acasxu/nets/onnx/ACASXU_1_1.onnx
-       ../../../tests/bin/inspect_onnx.py)
-)
\ No newline at end of file
+ (deps
+  ../../../examples/acasxu/nets/onnx/ACASXU_1_1.onnx
+  ../../../tests/bin/inspect_onnx.py))
diff --git a/lib/onnx/tests/print.expected b/lib/onnx/tests/print.expected
index e5444a6f08f881a2a427ccc2df25ac60c6d317cc..8301cf07a9742a3a3836b4e637d2d2c9dd20854a 100644
--- a/lib/onnx/tests/print.expected
+++ b/lib/onnx/tests/print.expected
@@ -1,4 +1,6 @@
 true
 ok
 ok
-Input name 0
+out/test.onnx has 1 input nodes
+{'name': '0', 'type': {'tensorType': {'elemType': 1, 'shape': {'dim': [{'dimValue': '1'}, {'dimValue': '1'}, {'dimValue': '1'}, {'dimValue': '5'}]}}}}
+1 files checked
diff --git a/src/convert_xgboost.ml b/src/convert_xgboost.ml
index 34856a41fca089890c07075ba9ade22c8c3efdde..86e1db0a80c79bf210b701505a24eea0d8ba9024 100644
--- a/src/convert_xgboost.ml
+++ b/src/convert_xgboost.ml
@@ -71,7 +71,7 @@ let convert_model env (model : Caisar_xgboost.Parser.t) task =
   let rec term_of_tree : Caisar_xgboost.Tree.tree -> Why3.Term.term = function
     | Leaf { leaf_value } ->
       Why3.Term.t_const
-        (Dataset.real_constant_of_float leaf_value)
+        (Utils.real_constant_of_float leaf_value)
         Why3.Ty.ty_real
     | Split { split_indice; split_condition; left; right; missing = `Left } ->
       let var =
@@ -80,7 +80,7 @@ let convert_model env (model : Caisar_xgboost.Parser.t) task =
       let missing = Why3.Term.ps_app variables.(split_indice).missing [] in
       let value =
         Why3.Term.t_const
-          (Dataset.real_constant_of_float split_condition)
+          (Utils.real_constant_of_float split_condition)
           Why3.Ty.ty_real
       in
       let cond = Why3.Term.ps_app ls_lt [ var; value ] in
@@ -102,7 +102,7 @@ let convert_model env (model : Caisar_xgboost.Parser.t) task =
     Array.fold trees
       ~init:
         (Why3.Term.t_const
-           (Dataset.real_constant_of_float tree.base_score)
+           (Utils.real_constant_of_float tree.base_score)
            Why3.Ty.ty_real)
       ~f:(fun term (ls, _) ->
         Why3.Term.fs_app ls_add
@@ -125,7 +125,7 @@ let _convert_dataset mapping (data : Caisar_xgboost.Input.t) task =
         | None -> missing
         | Some v ->
           let value =
-            Why3.Term.t_const (Dataset.real_constant_of_float v) Why3.Ty.ty_real
+            Why3.Term.t_const (Utils.real_constant_of_float v) Why3.Ty.ty_real
           in
           Why3.Term.(t_and (t_not missing) (t_equ var value))
       in
@@ -172,10 +172,10 @@ let compute_bounds_of_input over (data : Caisar_xgboost.Input.t) =
 let bound_term ?(epsilon = 0.0) env ~low ~high t =
   let v_low, v_high = (low -. epsilon, high +. epsilon) in
   let v_low =
-    Why3.Term.t_const (Dataset.real_constant_of_float v_low) Why3.Ty.ty_real
+    Why3.Term.t_const (Utils.real_constant_of_float v_low) Why3.Ty.ty_real
   in
   let v_high =
-    Why3.Term.t_const (Dataset.real_constant_of_float v_high) Why3.Ty.ty_real
+    Why3.Term.t_const (Utils.real_constant_of_float v_high) Why3.Ty.ty_real
   in
   let th_real = Why3.Env.read_theory env [ "real" ] "Real" in
   let ls_lt =
diff --git a/src/dataset.ml b/src/dataset.ml
index ac2d3f3b5a3964d13c1717ba285c21b0c8e3d097..2a319529d627107d49ad36e4a35822bba8127fd0 100644
--- a/src/dataset.ml
+++ b/src/dataset.ml
@@ -23,30 +23,13 @@
 open Why3
 open Base
 
-let real_constant_of_float value =
-  let neg = Float.is_negative value in
-  let value = Fmt.str "%.64f" (Float.abs value) in
-  (* Split into integer and fractional parts. *)
-  let int_frac = String.split value ~on:'.' in
-  let int = List.hd_exn int_frac in
-  let frac =
-    match List.tl_exn int_frac with
-    | [] -> ""
-    | [ s ] -> s
-    | _ -> assert false (* Since every float has one '.' at most. *)
-  in
-  Constant.ConstReal (Number.real_literal ~radix:10 ~neg ~int ~frac ~exp:None)
-
-let float_of_real_constant rc =
-  Float.of_string (Fmt.str "%a" Number.(print_real_constant full_support) rc)
-
 type eps = float [@@deriving yojson, show]
 
 let string_of_eps eps = Float.to_string eps
 
 let term_of_eps env eps =
   let th = Symbols.Float64.create env in
-  Term.t_const (real_constant_of_float eps) th.ty
+  Term.t_const (Utils.real_constant_of_float eps) th.ty
 
 type threshold = float [@@deriving yojson, show]
 
@@ -131,7 +114,7 @@ let interpret_predicate env ~on_model ~on_dataset task =
       | [ { t_node = Tconst (Constant.ConstReal e); _ } ] ->
         let robust_predicate = find_predicate_ls env "robust" in
         let cond_robust_predicate = find_predicate_ls env "cond_robust" in
-        let f = float_of_real_constant e in
+        let f = Utils.float_of_real_constant e in
         if Term.ls_equal ls robust_predicate
         then Robust f
         else if Term.ls_equal ls cond_robust_predicate
@@ -147,7 +130,7 @@ let interpret_predicate env ~on_model ~on_dataset task =
         let meta_robust_predicate = find_predicate_ls env "meta_robust" in
         let some_ls = find_option_ls env "Some" in
         let none_ls = find_option_ls env "None" in
-        let f = float_of_real_constant e in
+        let f = Utils.float_of_real_constant e in
         if Term.ls_equal ls meta_robust_predicate
         then
           let ampli =
@@ -208,7 +191,7 @@ let add_axioms ?eps th task input value =
   let t_ge =
     let value = value -. eps in
     let value = clip value in
-    let cst = real_constant_of_float value in
+    let cst = Utils.real_constant_of_float value in
     Term.(ps_app ls_ge [ fs_app input [] Ty.ty_real; t_const cst Ty.ty_real ])
   in
   let task = Task.add_prop_decl task Decl.Paxiom pr_ge t_ge in
@@ -221,7 +204,7 @@ let add_axioms ?eps th task input value =
   let t_le =
     let value = value +. eps in
     let value = clip value in
-    let cst = real_constant_of_float value in
+    let cst = Utils.real_constant_of_float value in
     Term.(ps_app ls_le [ fs_app input [] Ty.ty_real; t_const cst Ty.ty_real ])
   in
   Task.add_prop_decl task Decl.Paxiom pr_le t_le
diff --git a/src/dataset.mli b/src/dataset.mli
index 4660c27d288a6bdef93988f4ace6958ccc5ed5ce..af59d535ddd95b844bb432d771bf42131a6298c8 100644
--- a/src/dataset.mli
+++ b/src/dataset.mli
@@ -24,7 +24,6 @@ open Why3
 
 type eps [@@deriving yojson, show]
 
-val real_constant_of_float : float -> Constant.constant
 val string_of_eps : eps -> string
 val term_of_eps : Env.env -> eps -> Term.term
 
diff --git a/src/interpretation/interpreter.ml b/src/interpretation/interpreter.ml
index 128ec43ae21132f9523436a8d8a006c9fa5408a7..95ce3397e5f9559e52fdc1b749077b92d4dcb147 100644
--- a/src/interpretation/interpreter.ml
+++ b/src/interpretation/interpreter.ml
@@ -87,7 +87,13 @@ let builtin_of_constant env known_map (name, value) =
     Logging.user_error (fun m ->
       m "'%s' does not appear to be a declared toplevel constant" name)
 
-let bounded_quant engine vs ~cond : IRE.bounded_quant_result option =
+type bounded_quant_result = {
+  new_quant : Why3.Term.vsymbol list;
+  substitutions : Why3.Term.term Why3.Term.Mvs.t;
+  remaining_implication : Why3.Term.term;
+}
+
+let rec bounded_quant_aux engine vs ~cond : bounded_quant_result option =
   match vs.Why3.Term.vs_ty with
   | {
    ty_node = Tyapp ({ ts_name = { id_string = "vector"; _ }; _ }, ty :: _);
@@ -115,10 +121,33 @@ let bounded_quant engine vs ~cond : IRE.bounded_quant_result option =
           ITypes.Vector (Language.create_vector env n)
         in
         let substitutions =
-          [ ITypes.term_of_op ~args engine op (Some vs.vs_ty) ]
+          Why3.Term.Mvs.singleton vs
+            (ITypes.term_of_op ~args engine op (Some vs.vs_ty))
         in
-        Some { new_quant; substitutions }
+
+        Some
+          { new_quant; substitutions; remaining_implication = Why3.Term.t_true }
     | Tapp ({ ls_name = { id_string = "has_length"; _ }; _ }, _) -> None
+    | Tbinop (Tand, a, b) -> (
+      match bounded_quant_aux engine vs ~cond:a with
+      | None -> (
+        match bounded_quant_aux engine vs ~cond:b with
+        | None -> None
+        | Some { new_quant; substitutions; remaining_implication } ->
+          Some
+            {
+              new_quant;
+              substitutions;
+              remaining_implication =
+                Why3.Term.t_and_simp a remaining_implication;
+            })
+      | Some { new_quant; substitutions; remaining_implication } ->
+        Some
+          {
+            new_quant;
+            substitutions;
+            remaining_implication = Why3.Term.t_and_simp remaining_implication b;
+          })
     | _ ->
       Logging.user_error ?loc:vs.vs_name.id_loc (fun m ->
         m
@@ -127,6 +156,45 @@ let bounded_quant engine vs ~cond : IRE.bounded_quant_result option =
           Why3.Pretty.print_vs vs))
   | _ -> None
 
+(* Only one vector for now *)
+let rec bounded_quant engine vsl ~cond : bounded_quant_result option =
+  match vsl with
+  | ({
+       Why3.Term.vs_ty =
+         {
+           ty_node = Tyapp ({ ts_name = { id_string = "vector"; _ }; _ }, _);
+           _;
+         };
+       _;
+     } as vs)
+    :: l -> (
+    match bounded_quant_aux engine vs ~cond with
+    | None -> bounded_quant engine l ~cond
+    | Some r1 -> (
+      match bounded_quant engine l ~cond:r1.remaining_implication with
+      | None -> Some { r1 with new_quant = r1.new_quant @ l }
+      | Some r2 ->
+        Some
+          {
+            new_quant = r1.new_quant @ r2.new_quant;
+            substitutions =
+              Why3.Term.Mvs.set_union r1.substitutions r2.substitutions;
+            remaining_implication = r2.remaining_implication;
+          }))
+  | v :: l -> (
+    match bounded_quant engine l ~cond with
+    | None -> None
+    | Some r -> Some { r with new_quant = v :: r.new_quant })
+  | [] -> None
+
+let bounded_quant engine vsl ~cond : IRE.bounded_quant_result option =
+  Option.map (bounded_quant engine vsl ~cond) ~f:(fun r ->
+    {
+      IRE.new_quant = r.new_quant;
+      substitutions = [ r.substitutions ];
+      remaining_implication = r.remaining_implication;
+    })
+
 let declare_language_lsymbols interpreter_env task =
   (* Declare [Language] logic symbols only. *)
   Why3.Term.Hls.fold
diff --git a/src/interpretation/interpreter_reduction_engine.ml b/src/interpretation/interpreter_reduction_engine.ml
index 6b9042886a8e502377b46d64c47f4b03a7b45a36..93b883b94f0ede423b367dc3a0bfd555ef9a9c6d 100644
--- a/src/interpretation/interpreter_reduction_engine.ml
+++ b/src/interpretation/interpreter_reduction_engine.ml
@@ -39,7 +39,8 @@ type params = {
 
 type bounded_quant_result = {
   new_quant : vsymbol list;
-  substitutions : term list;
+  substitutions : term Mvs.t list;
+  remaining_implication : term;
 }
 
 type builtin_value =
@@ -52,7 +53,7 @@ type 'a builtin =
   'a engine -> lsymbol -> value list -> Ty.ty option -> builtin_value
 
 and 'a bounded_quant =
-  'a engine -> vsymbol -> cond:term -> bounded_quant_result option
+  'a engine -> vsymbol list -> cond:term -> bounded_quant_result option
 
 and 'a engine = {
   env : Env.env;
@@ -788,35 +789,31 @@ let reduce_bounded_quant engine ls_lt limit t sigma st rem =
       in
       { value_stack = st; cont_stack = loop rem b }
   | ( Term cond :: st,
-      (Kbinop Timplies, _)
-      :: (Kquant ((Tforall as quant), [ vs ], _), t_orig)
-      :: rem ) -> (
-    match engine.bounded_quant engine vs ~cond with
+      (Kbinop Timplies, _) :: (Kquant (Tforall, vsl, _), t_orig) :: rem ) -> (
+    match engine.bounded_quant engine vsl ~cond with
     | None -> raise Exit
     | Some res -> (
-      let t_empty, binop =
-        match quant with Tforall -> (t_true, Tand) | Texists -> (t_false, Tor)
-      in
       let rem =
         match res.new_quant with
         | [] -> rem
-        | _ -> (Kquant (quant, res.new_quant, []), t_orig) :: rem
+        | _ -> (Kquant (Tforall, res.new_quant, []), t_orig) :: rem
       in
+      let t = Term.t_implies_simp res.remaining_implication t in
       let rec loop rem = function
         | [] -> rem
-        | t_i :: l ->
+        | m_i :: l ->
           let rem =
             match l with
             | [] -> rem
             | _ ->
               (* conjunction *)
-              (Kbinop binop, t_true) :: rem
+              (Kbinop Tand, t_true) :: rem
           in
-          let rem = (Keval (t, Mvs.add vs t_i sigma), t_true) :: rem in
+          let rem = (Keval (t, Mvs.set_union m_i sigma), t_true) :: rem in
           loop rem l
       in
       match res.substitutions with
-      | [] -> { value_stack = Term t_empty :: st; cont_stack = rem }
+      | [] -> { value_stack = Term t_true :: st; cont_stack = rem }
       | _ -> { value_stack = st; cont_stack = loop rem res.substitutions }))
   | _ -> raise Exit
 
diff --git a/src/interpretation/interpreter_reduction_engine.mli b/src/interpretation/interpreter_reduction_engine.mli
index 644f50e51309bc37d19290f32297827e317d2287..85933f000b30b30cce91c0bfe81fa26866ead5d3 100644
--- a/src/interpretation/interpreter_reduction_engine.mli
+++ b/src/interpretation/interpreter_reduction_engine.mli
@@ -104,11 +104,15 @@ type 'a built_in_theories =
 
 type bounded_quant_result = {
   new_quant : Term.vsymbol list;
-  substitutions : Term.term list;
+  substitutions : Term.term Why3.Term.Mvs.t list;
+  remaining_implication : Term.term;
 }
 
 type 'a bounded_quant =
-  'a engine -> Term.vsymbol -> cond:Term.term -> bounded_quant_result option
+  'a engine ->
+  Term.vsymbol list ->
+  cond:Term.term ->
+  bounded_quant_result option
 
 val value_term : Term.term -> builtin_value
 val value_int : BigInt.t -> builtin_value
diff --git a/src/interpretation/interpreter_theory.ml b/src/interpretation/interpreter_theory.ml
index 55b568874eb4ce649da5d6fc46339b9557776d12..655ef8e633da1220906f8f1fa6d68810cdefb5dc 100644
--- a/src/interpretation/interpreter_theory.ml
+++ b/src/interpretation/interpreter_theory.ml
@@ -24,22 +24,6 @@ module IRE = Interpreter_reduction_engine
 module ITypes = Interpreter_types
 open Base
 
-(* TODO: Remove this and use common one. *)
-let const_real_of_float value =
-  let neg = Float.is_negative value in
-  let value = Fmt.str "%.64f" (Float.abs value) in
-  (* Split into integer and fractional parts. *)
-  let int_frac = String.split value ~on:'.' in
-  let int = List.hd_exn int_frac in
-  let frac =
-    match List.tl_exn int_frac with
-    | [] -> ""
-    | [ s ] -> s
-    | _ -> assert false (* Since every float has one '.' at most. *)
-  in
-  Why3.Constant.ConstReal
-    (Why3.Number.real_literal ~radix:10 ~neg ~int ~frac ~exp:None)
-
 let fail_on_unexpected_argument ls =
   Logging.code_error ~src:Logging.src_interpret_goal (fun m ->
     m "Unexpected argument(s) for '%a'" Why3.Pretty.print_ls ls)
@@ -182,7 +166,7 @@ module Vector = struct
         in
         let csts =
           List.map data ~f:(fun d ->
-            let cst = const_real_of_float (Float.of_string d) in
+            let cst = Utils.real_constant_of_float (Float.of_string d) in
             Why3.Term.t_const cst ty_cst)
         in
         let { ITypes.env; _ } = IRE.user_env engine in
@@ -224,7 +208,7 @@ module Vector = struct
         in
         let csts =
           List.map data ~f:(fun d ->
-            let cst = const_real_of_float (Float.of_string d) in
+            let cst = Utils.real_constant_of_float (Float.of_string d) in
             Why3.Term.t_const cst ty_cst)
         in
         let { ITypes.env; _ } = IRE.user_env engine in
diff --git a/src/language.mli b/src/language.mli
index 44b81fe0b74dd13b016d186e9f82d0e27152ccb5..72be9f3f461a984bfbd9a6351f335b03c69929ee 100644
--- a/src/language.mli
+++ b/src/language.mli
@@ -67,7 +67,6 @@ val ovo_parser : Env.env -> string -> Pmodule.pmodule Wstdlib.Mstr.t
 
 (** Creator of vector... *)
 
-
 val create_vector : Env.env -> int -> Term.lsymbol
 val lookup_vector : Term.lsymbol -> int option
 val mem_vector : Term.lsymbol -> bool
diff --git a/src/meta.ml b/src/meta.ml
index ae6eb300890d16b13794cc8bbed98975f69b8ad9..4b6b433d79c4a4fbe033ef72bf6b0e65ccb8dd13 100644
--- a/src/meta.ml
+++ b/src/meta.ml
@@ -49,3 +49,19 @@ let meta_dataset_filename =
   Why3.Theory.(
     register_meta_excl "caisar_dataset"
       ~desc:"Indicates the filename of the dataset" [ MTstring ])
+
+let rec collect_meta_input_output ~input_encoding ~output_encoding hls =
+  function
+  | None -> ()
+  | Some { Why3.Task.task_prev; task_decl; _ } -> (
+    collect_meta_input_output ~input_encoding ~output_encoding hls task_prev;
+    match task_decl.Why3.Theory.td_node with
+    | Meta (meta, l) when Why3.Theory.meta_equal meta meta_input -> (
+      match l with
+      | [ MAls ls; MAint i ] -> Why3.Term.Hls.add hls ls (input_encoding i)
+      | _ -> assert false)
+    | Meta (meta, l) when Why3.Theory.meta_equal meta meta_output -> (
+      match l with
+      | [ MAls ls; MAint i ] -> Why3.Term.Hls.add hls ls (output_encoding i)
+      | _ -> assert false)
+    | Use _ | Clone _ | Decl _ | Meta _ -> ())
diff --git a/src/meta.mli b/src/meta.mli
index 0ed5f6a0b7ae2f7441905952ccbbcb38c808a70b..8a99a5a8632c457ef0b8a5769df6437e3ee0caef 100644
--- a/src/meta.mli
+++ b/src/meta.mli
@@ -34,3 +34,12 @@ val meta_svm_filename : Why3.Theory.meta
 
 val meta_dataset_filename : Why3.Theory.meta
 (** The filename of the dataset. *)
+
+val collect_meta_input_output :
+  input_encoding:(int -> 'a) ->
+  output_encoding:(int -> 'a) ->
+  'a Why3.Term.Hls.t ->
+  Why3.Task.task ->
+  unit
+(** [collect_meta_input_output ~input_encoding ~output_encoding hls task] adds
+    to [hls] all the input and output encoding strings. *)
diff --git a/src/printers/marabou.ml b/src/printers/marabou.ml
index 052964adea5be903654075737b0470631b68f326..f15fdd403d6235e5ff0ceb9a82f8c4b3547f37f5 100644
--- a/src/printers/marabou.ml
+++ b/src/printers/marabou.ml
@@ -169,14 +169,6 @@ let rec print_tdecl info fmt task =
     print_tdecl info fmt task_prev;
     match task_decl.Theory.td_node with
     | Use _ | Clone _ -> ()
-    | Meta (meta, l) when Theory.meta_equal meta Meta.meta_input -> (
-      match l with
-      | [ MAls ls; MAint i ] -> Term.Hls.add info.variables ls (Fmt.str "x%i" i)
-      | _ -> assert false)
-    | Meta (meta, l) when Theory.meta_equal meta Meta.meta_output -> (
-      match l with
-      | [ MAls ls; MAint i ] -> Term.Hls.add info.variables ls (Fmt.str "y%i" i)
-      | _ -> assert false)
     | Meta (_, _) -> ()
     | Decl d -> print_decl info fmt d)
 
@@ -198,6 +190,8 @@ let print_task args ?old:_ fmt task =
     }
   in
   Printer.print_prelude fmt args.Printer.prelude;
+  Meta.collect_meta_input_output info.variables task
+    ~input_encoding:(Fmt.str "x%i") ~output_encoding:(Fmt.str "y%i");
   print_tdecl info fmt task
 
 let init () =
diff --git a/src/printers/pyrat.ml b/src/printers/pyrat.ml
index 162d91e3f0b3a64af9ccc54171f61a07d3fc5273..b6538ea27bc9f5f5889ac9d79f4c428ad0fa80b3 100644
--- a/src/printers/pyrat.ml
+++ b/src/printers/pyrat.ml
@@ -155,14 +155,6 @@ let rec print_tdecl info fmt task =
     print_tdecl info fmt task_prev;
     match task_decl.Theory.td_node with
     | Use _ | Clone _ -> ()
-    | Meta (meta, l) when Theory.meta_equal meta Meta.meta_input -> (
-      match l with
-      | [ MAls ls; MAint i ] -> Term.Hls.add info.variables ls (Fmt.str "x%i" i)
-      | _ -> assert false)
-    | Meta (meta, l) when Theory.meta_equal meta Meta.meta_output -> (
-      match l with
-      | [ MAls ls; MAint i ] -> Term.Hls.add info.variables ls (Fmt.str "y%i" i)
-      | _ -> assert false)
     | Meta (_, _) -> ()
     | Decl d -> print_decl info fmt d)
 
@@ -184,6 +176,8 @@ let print_task args ?old:_ fmt task =
     }
   in
   Printer.print_prelude fmt args.Printer.prelude;
+  Meta.collect_meta_input_output info.variables task
+    ~input_encoding:(Fmt.str "x%i") ~output_encoding:(Fmt.str "y%i");
   print_tdecl info fmt task
 
 let init () =
diff --git a/src/printers/vnnlib.ml b/src/printers/vnnlib.ml
index 2bad9dec08f133c0b75b7905b1c58752258b8345..e6c39297230ec53d39178e1e059b9d96ab0c0b7a 100644
--- a/src/printers/vnnlib.ml
+++ b/src/printers/vnnlib.ml
@@ -502,16 +502,6 @@ let rec print_tdecl info fmt task =
     print_tdecl info fmt task_prev;
     match task_decl.Theory.td_node with
     | Use _ | Clone _ -> ()
-    | Meta (meta, l) when Theory.meta_equal meta Meta.meta_input -> (
-      match l with
-      | [ MAls ls; MAint i ] ->
-        Term.Hls.add info.variables ls (Fmt.str "X_%i" i)
-      | _ -> assert false)
-    | Meta (meta, l) when Theory.meta_equal meta Meta.meta_output -> (
-      match l with
-      | [ MAls ls; MAint i ] ->
-        Term.Hls.add info.variables ls (Fmt.str "Y_%i" i)
-      | _ -> assert false)
     | Meta _ -> ()
     | Decl d -> print_decl info fmt d)
 
@@ -534,6 +524,8 @@ let print_task args ?old:_ fmt task =
     }
   in
   Printer.print_prelude fmt args.Printer.prelude;
+  Meta.collect_meta_input_output info.variables task
+    ~input_encoding:(Fmt.str "X_%i") ~output_encoding:(Fmt.str "Y_%i");
   print_tdecl info fmt task
 
 let init () =
diff --git a/src/proof_strategy.ml b/src/proof_strategy.ml
index 29a9789bdef572f737995d5fab85154c2bddc7be..cbaaa44e6e6f4116535cf985fbf2c6e4127d8915 100644
--- a/src/proof_strategy.ml
+++ b/src/proof_strategy.ml
@@ -40,8 +40,9 @@ let do_apply_prover_strategy ~lookup ~trans tasks =
   in
   match Term.Sls.cardinal sls_model with
   | 0 -> tasks
-  | 1 -> List.map tasks ~f:(Trans.apply trans)
-  | _ -> Logging.user_error (fun m -> m "Multiple models are not supported yet")
+  | _ ->
+    (* TODO: rework for dealing with SVMs also. *)
+    List.map tasks ~f:(Trans.apply trans)
 
 let apply_classic_prover_strategy env task =
   let lookup = Language.lookup_nn in
@@ -57,11 +58,11 @@ let split_on_dataset_elts task =
        first thing to do is to destruct such toplevel conjunctions. *)
     Trans.apply Split.split_goal_full_stop_on_quant task
 
-let apply_native_nn_prover_strategy task =
+let apply_native_nn_prover_strategy env task =
   let tasks = split_on_dataset_elts task in
   let lookup = Language.lookup_nn in
   let trans =
-    Trans.seq [ Introduction.introduce_premises; Native_nn_prover.trans ]
+    Trans.seq [ Introduction.introduce_premises; Native_nn_prover.trans env ]
   in
   do_apply_prover_strategy ~lookup ~trans tasks
 
diff --git a/src/proof_strategy.mli b/src/proof_strategy.mli
index 6e388ce5e3fccd7b21aff8ece1cc7ac3b8005c95..fc28654546e422fecbe94796fed77cc52502df45 100644
--- a/src/proof_strategy.mli
+++ b/src/proof_strategy.mli
@@ -20,14 +20,14 @@
 (*                                                                        *)
 (**************************************************************************)
 
-open Why3
-
-val apply_classic_prover_strategy : Env.env -> Task.task -> Task.task list
+val apply_classic_prover_strategy :
+  Why3.Env.env -> Why3.Task.task -> Why3.Task.task list
 (** Detect and translate applications of neural networks into SMT-LIB. *)
 
-val apply_native_nn_prover_strategy : Task.task -> Task.task list
+val apply_native_nn_prover_strategy :
+  Why3.Env.env -> Why3.Task.task -> Why3.Task.task list
 (** First detect and split on dataset, if any, intended for neural networks.
     Then, detect and execute applications of neural networks. *)
 
-val apply_svm_prover_strategy : Task.task -> Task.task list
+val apply_svm_prover_strategy : Why3.Task.task -> Why3.Task.task list
 (** Detect and split on dataset, if any, intended for support vector machines. *)
diff --git a/src/transformations/native_nn_prover.ml b/src/transformations/native_nn_prover.ml
index f7754ae48abff0021fe98ff4565b905337775756..322bcf3074aebc4231ff3d2f53ad54466fc4b965 100644
--- a/src/transformations/native_nn_prover.ml
+++ b/src/transformations/native_nn_prover.ml
@@ -20,36 +20,380 @@
 (*                                                                        *)
 (**************************************************************************)
 
-open Why3
 open Base
 
+let src = Logs.Src.create "NN-Gen" ~doc:"Generation of neural networks"
+let src_term = Logs.Src.create "NN-Gen-Term" ~doc:"Generation of new formula"
+
+let src_show =
+  Logs.Src.create "NN-Gen-Show" ~doc:"Show generated neural networks"
+
+type new_output = {
+  index : int;
+  term : Why3.Term.term;
+}
+
+exception UnknownLogicSymbol
+
+let tempfile =
+  let c = ref (-1) in
+  fun () ->
+    match Sys.getenv "CAISAR_ONNX_OUTPUT_DIR" with
+    | Some dir ->
+      (* deterministic *)
+      Int.incr c;
+      Stdlib.Filename.concat dir (Fmt.str "caisar_%i.onnx" !c)
+    | None -> Stdlib.Filename.temp_file "caisar" ".onnx"
+
+let create_new_nn env input_vars outputs : string =
+  let module IR = Ir.Nier_simple in
+  let th_f64 = Symbols.Float64.create env in
+  let th_model = Symbols.Model.create env in
+  let th_nn = Symbols.NN.create env in
+  let len_input = Why3.Term.Mls.cardinal input_vars in
+  let input =
+    IR.Node.create (Input { shape = IR.Shape.of_list [ len_input ] })
+  in
+  assert (Why3.Term.Mls.for_all (fun _ i -> 0 <= i && i < len_input) input_vars);
+  let get_input =
+    Why3.Term.Hls.memo 10 (fun ls ->
+      let i = Why3.Term.Mls.find_exn UnknownLogicSymbol ls input_vars in
+      Ir.Nier_simple.Node.gather_int input i)
+  in
+  let cache = Why3.Term.Hterm.create 17 in
+  let nn_cache = Stdlib.Hashtbl.create 17 in
+  (* Instantiate the input of [old_nn] with the [old_nn_args] terms transformed
+     into nodes. *)
+  let rec convert_old_nn old_nn old_nn_args =
+    let converted_args = List.map ~f:convert_term old_nn_args in
+    let id =
+      ( old_nn.Language.nn_filename,
+        List.map converted_args ~f:(fun n -> n.Ir.Nier_simple.id) )
+    in
+    match Stdlib.Hashtbl.find_opt nn_cache id with
+    | None ->
+      let node_nn = convert_old_nn_aux old_nn converted_args in
+      Stdlib.Hashtbl.add nn_cache id node_nn;
+      node_nn
+    | Some node_nn -> node_nn
+  and convert_old_nn_aux old_nn converted_args =
+    let old_nn_nier =
+      match Onnx.Simple.parse old_nn.Language.nn_filename with
+      | Error s ->
+        Logging.code_error ~src (fun m ->
+          m "No parsed NN '%s': %s" old_nn.nn_filename s)
+      | Ok { nier = Error s; _ } ->
+        Logging.code_error ~src (fun m ->
+          m "No NIER available for NN '%s': %s" old_nn.nn_filename s)
+      | Ok { nier = Ok g; _ } -> g
+    in
+    (* Create the graph to replace the old input of the nn *)
+    let input () =
+      (* Regroup the terms into one node *)
+      let node =
+        IR.Node.create (Concat { inputs = converted_args; axis = 0 })
+      in
+      Ir.Nier_simple.Node.reshape (IR.input_shape old_nn_nier) node
+    in
+    let out =
+      IR.Node.replace_input input (IR.output old_nn_nier)
+      |> Ir.Nier_simple.Node.reshape
+           (Ir.Nier_simple.Shape.of_array [| old_nn.nn_nb_outputs |])
+    in
+    out
+  and convert_old_nn_at_old_index old_nn old_index old_nn_args =
+    let out = convert_old_nn old_nn old_nn_args in
+    let old_index = Why3.Number.to_small_integer old_index in
+    Ir.Nier_simple.Node.gather_int out old_index
+  and convert_term term =
+    match Why3.Term.Hterm.find_opt cache term with
+    | None ->
+      let n = convert_term_aux term in
+      Why3.Term.Hterm.add cache term n;
+      n
+    | Some n -> n
+  and convert_term_aux term : IR.GFloat.Node.t =
+    if not (Why3.Ty.ty_equal (Option.value_exn term.Why3.Term.t_ty) th_f64.ty)
+    then
+      Logging.user_error ?loc:term.t_loc (fun m ->
+        m "Cannot convert non Float64 term %a" Why3.Pretty.print_term term);
+    match term.Why3.Term.t_node with
+    | Tconst (Why3.Constant.ConstReal r) ->
+      IR.Node.create
+        (Constant
+           {
+             data = IR.GenTensor.create_1_float (Utils.float_of_real_constant r);
+           })
+    | Tapp (ls, []) -> get_input ls
+    | Tapp (ls, [ _; a; b ]) when Why3.Term.ls_equal ls th_f64.add ->
+      IR.Node.create (Add { input1 = convert_term a; input2 = convert_term b })
+    | Tapp (ls, [ _; a; b ]) when Why3.Term.ls_equal ls th_f64.sub ->
+      IR.Node.create (Sub { input1 = convert_term a; input2 = convert_term b })
+    | Tapp (ls, [ _; a; b ]) when Why3.Term.ls_equal ls th_f64.mul ->
+      IR.Node.create (Mul { input1 = convert_term a; input2 = convert_term b })
+    | Tapp (ls, [ _; a; b ]) when Why3.Term.ls_equal ls th_f64.div -> (
+      match b.t_node with
+      | Tconst (Why3.Constant.ConstReal r) ->
+        let f = Utils.float_of_real_constant r in
+        Ir.Nier_simple.Node.div_float (convert_term a) f
+      | _ ->
+        IR.Node.create
+          (Div { input1 = convert_term a; input2 = convert_term b }))
+    | Tapp (ls, [ a ]) when Why3.Term.ls_equal ls th_f64.neg ->
+      Ir.Nier_simple.Node.mul_float (convert_term a) (-1.)
+    | Tapp
+        ( ls_get (* [ ] *),
+          [
+            {
+              t_node =
+                Tapp
+                  ( ls_atat (* @@ *),
+                    [
+                      { t_node = Tapp (ls_nn (* nn *), _); _ };
+                      {
+                        t_node =
+                          Tapp (ls (* input vector *), tl (* input vars *));
+                        _;
+                      };
+                    ] );
+              _;
+            };
+            { t_node = Tconst (ConstInt old_index); _ };
+          ] )
+      when String.equal ls_get.ls_name.id_string (Why3.Ident.op_get "")
+           && (Why3.Term.ls_equal ls_atat th_model.atat
+              || Why3.Term.ls_equal ls_atat th_nn.atat) -> (
+      match (Language.lookup_nn ls_nn, Language.lookup_vector ls) with
+      | Some ({ nn_nb_inputs; _ } as old_nn), Some vector_length ->
+        assert (nn_nb_inputs = vector_length && vector_length = List.length tl);
+        convert_old_nn_at_old_index old_nn old_index tl
+      | _, _ ->
+        Logging.code_error ~src (fun m ->
+          m "Neural network application without fixed NN or arguments: %a"
+            Why3.Pretty.print_term term))
+    | Tconst _
+    | Tapp (_, _)
+    | Tif (_, _, _)
+    | Tlet (_, _)
+    | Tbinop (_, _, _)
+    | Tcase (_, _)
+    | Tnot _ | Ttrue | Tfalse ->
+      Logging.not_implemented_yet (fun m ->
+        m "Why3 term to IR: %a" Why3.Pretty.print_term term)
+    | Tvar _ | Teps _ | Tquant (_, _) ->
+      Logging.not_implemented_yet (fun m ->
+        m "Why3 term to IR (impossible?): %a" Why3.Pretty.print_term term)
+  in
+  (* Start actual term conversion. *)
+  let outputs =
+    List.rev_map outputs ~f:(fun { index; term } -> (index, convert_term term))
+    |> List.sort ~compare:(fun (i, _) (j, _) -> Int.compare i j)
+    |> List.mapi ~f:(fun i (j, n) ->
+         assert (i = j);
+         n)
+  in
+  let output = IR.Node.concat_0 outputs in
+  assert (
+    IR.Shape.equal output.shape (IR.Shape.of_array [| List.length outputs |]));
+  let nn = IR.create output in
+  Logs.debug ~src:src_show (fun f -> f "@.%s@." (IR.grapheasy nn));
+  Logs.debug ~src (fun f -> f "@[<v>%a@]@." IR.pp_debug nn);
+  let filename = tempfile () in
+  Onnx.Simple.write nn filename;
+  filename
+
+(* Choose the term pattern for starting the conversion to ONNX. *)
+let has_start_pattern env term =
+  let th_model = Symbols.Model.create env in
+  let th_nn = Symbols.NN.create env in
+  let th_f = Symbols.Float64.create env in
+  match term.Why3.Term.t_node with
+  | Tapp
+      ( ls_get (* [ ] *),
+        [
+          {
+            t_node =
+              Why3.Term.Tapp
+                ( ls_atat (* @@ *),
+                  [
+                    { t_node = Tapp (ls_nn (* nn *), _); _ };
+                    {
+                      t_node = Tapp (ls (* input vector *), tl (* input vars *));
+                      _;
+                    };
+                  ] );
+            _;
+          };
+          { t_node = Tconst (ConstInt _); _ };
+        ] )
+    when String.equal ls_get.ls_name.id_string (Why3.Ident.op_get "")
+         && (Why3.Term.ls_equal ls_atat th_model.Symbols.Model.atat
+            || Why3.Term.ls_equal ls_atat th_nn.Symbols.NN.atat) -> (
+    match (Language.lookup_nn ls_nn, Language.lookup_vector ls) with
+    | Some { nn_nb_inputs; _ }, Some vector_length ->
+      assert (nn_nb_inputs = vector_length && vector_length = List.length tl);
+      true
+    | _, _ ->
+      Logging.code_error ~src (fun m ->
+        m "Neural network application without fixed NN or arguments: %a"
+          Why3.Pretty.print_term term))
+  | Tapp ({ ls_value = Some ty; _ }, []) ->
+    (* input symbol *) Why3.Ty.ty_equal ty th_f.ty
+  | Tapp (ls_app, _) ->
+    List.mem
+      [ th_f.add; th_f.sub; th_f.mul; th_f.div ]
+      ~equal:Why3.Term.ls_equal ls_app
+  | _ -> false
+
+(* Abstract terms that contains neural network applications. *)
+let abstract_nn_term env =
+  let th_float64 = Symbols.Float64.create env in
+  let simplify_as_output_vars term =
+    let rec do_simplify ((new_index, output_vars) as acc) term =
+      if has_start_pattern env term
+      then
+        let acc, ls_output_var =
+          match Why3.Term.Mterm.find_opt term output_vars with
+          | None ->
+            let ls =
+              Why3.Term.create_fsymbol (Why3.Ident.id_fresh "y") []
+                th_float64.ty
+            in
+            let output_vars =
+              Why3.Term.Mterm.add term (new_index, ls) output_vars
+            in
+            ((new_index + 1, output_vars), ls)
+          | Some (_, ls) -> (acc, ls)
+        in
+        let output_var = Why3.Term.fs_app ls_output_var [] th_float64.ty in
+        (acc, output_var)
+      else Why3.Term.t_map_fold do_simplify acc term
+    in
+    let (_, output_vars), term = do_simplify (0, Why3.Term.Mterm.empty) term in
+    (output_vars, term)
+  in
+  Why3.Trans.fold_map
+    (fun task_hd (input_vars, task) ->
+      let tdecl = task_hd.task_decl in
+      match tdecl.td_node with
+      | Decl { d_node = Dparam ls; _ } when Language.mem_nn ls ->
+        (* All neural networks are removed. *)
+        (input_vars, task)
+      | Meta (meta, _) when Why3.Theory.meta_equal meta Meta.meta_nn_filename ->
+        (* All neural networks meta are removed. *)
+        (input_vars, task)
+      | Decl
+          {
+            d_node =
+              Dparam
+                ({
+                   ls_args = [];
+                   ls_constr = 0;
+                   ls_proj = false;
+                   ls_value = Some ty;
+                   _;
+                 } as ls);
+            _;
+          }
+        when Why3.Ty.ty_equal ty th_float64.ty ->
+        let task = Why3.Task.add_tdecl task tdecl in
+        let input_vars = Why3.Term.Sls.add ls input_vars in
+        (input_vars, task)
+      | Decl { d_node = Dprop (Pgoal, pr, goal); _ } ->
+        let output_vars, goal = simplify_as_output_vars goal in
+        Logs.debug ~src:src_term (fun f ->
+          f "new goal:%a" Why3.Pretty.print_term goal);
+        (* Add output variable declarations and corresponding meta. *)
+        let task =
+          Why3.Term.Mterm.fold_left
+            (fun task _ (index, output_var) ->
+              let task =
+                Why3.Task.add_meta task Meta.meta_output
+                  [ MAls output_var; MAint index ]
+              in
+              Why3.Task.add_param_decl task output_var)
+            task output_vars
+        in
+        let new_outputs =
+          Why3.Term.Mterm.fold_left
+            (fun acc term (index, _) -> { index; term } :: acc)
+            [] output_vars
+        in
+        (* Collect all lsymbols appearing in terms simplified as output
+           variables: at the moment, vector selection on a neural network
+           application to an input vector (ie, (nn @@ v)[_]). *)
+        let used_ls =
+          List.fold
+            ~f:(fun acc new_output ->
+              Why3.Term.t_s_fold
+                (fun acc _ -> acc)
+                (fun acc ls -> Why3.Term.Sls.add ls acc)
+                acc new_output.term)
+            ~init:Why3.Term.Sls.empty new_outputs
+        in
+        let input_vars = Why3.Term.Sls.inter input_vars used_ls in
+        let _, indexed_input_vars =
+          Why3.Term.Mls.mapi_fold (fun _ _ i -> (i + 1, i)) input_vars 0
+        in
+        (* Add meta for input variable declarations. *)
+        let task =
+          Why3.Term.Mls.fold_left
+            (fun task ls index ->
+              Why3.Task.add_meta task Meta.meta_input [ MAls ls; MAint index ])
+            task indexed_input_vars
+        in
+        (* Create new ONNX and add the corresponding meta. *)
+        let nn_filename = create_new_nn env indexed_input_vars new_outputs in
+        let task =
+          Why3.Task.add_meta task Meta.meta_nn_filename [ MAstr nn_filename ]
+        in
+        let decl =
+          let pr = Why3.Decl.create_prsymbol (Why3.Ident.id_clone pr.pr_name) in
+          Why3.Decl.create_prop_decl Pgoal pr goal
+        in
+        (input_vars, Why3.Task.add_decl task decl)
+      | Decl { d_node = Dprop (_, _, _); _ } ->
+        (* TODO: check no nn applications *)
+        (input_vars, Why3.Task.add_tdecl task tdecl)
+      | _ -> (input_vars, Why3.Task.add_tdecl task tdecl))
+    Why3.Term.Sls.empty None
+
+(* -------------------------------------------------------------------------- *)
+(* --- Old method                                                             *)
+(* -------------------------------------------------------------------------- *)
+
 (* Creates a list of pairs made of output variables and respective indices in
    the list, for each neural network application to an input vector appearing in
    a task. Such a list stands for the resulting output vector of a neural
    network application to an input vector (ie, something of the form: nn @@ v).
    The creation process is memoized wrt terms corresponding to neural network
    applications to input vectors. *)
-let create_output_vars =
-  let rec do_create mt (term : Term.term) =
-    match term.t_node with
-    | Term.Tapp (ls1 (* @@ *), [ { t_node = Tapp (ls2 (* nn *), _); _ }; _ ])
-      when String.equal ls1.ls_name.id_string (Ident.op_infix "@@") -> (
-      match Language.lookup_nn ls2 with
+let output_vars env =
+  let th_model = Symbols.Model.create env in
+  let th_nn = Symbols.NN.create env in
+  let rec create_output_var mt term =
+    match term.Why3.Term.t_node with
+    | Tapp (ls_atat (* @@ *), [ { t_node = Tapp (ls_nn (* nn *), _); _ }; _ ])
+      when Why3.Term.ls_equal ls_atat th_model.atat
+           || Why3.Term.ls_equal ls_atat th_nn.atat -> (
+      match Language.lookup_nn ls_nn with
       | Some { nn_nb_outputs; nn_ty_elt; _ } ->
-        if Term.Mterm.mem term mt
+        if Why3.Term.Mterm.mem term mt
         then mt
         else
           let output_vars =
             List.init nn_nb_outputs ~f:(fun index ->
-              (index, Term.create_fsymbol (Ident.id_fresh "y") [] nn_ty_elt))
+              ( index,
+                Why3.Term.create_fsymbol (Why3.Ident.id_fresh "y") [] nn_ty_elt
+              ))
           in
-          Term.Mterm.add term output_vars mt
+          Why3.Term.Mterm.add term output_vars mt
       | None -> mt)
-    | _ -> Term.t_fold do_create mt term
+    | _ -> Why3.Term.t_fold create_output_var mt term
   in
-  Trans.fold_decl
-    (fun decl mt -> Decl.decl_fold do_create mt decl)
-    Term.Mterm.empty
+  Why3.Trans.fold_decl
+    (fun decl mt -> Why3.Decl.decl_fold create_output_var mt decl)
+    Why3.Term.Mterm.empty
 
 (* Simplifies a task goal exhibiting a vector selection on a neural network
    application to an input vector (ie, (nn @@ v)[_]) by the corresponding output
@@ -57,27 +401,30 @@ let create_output_vars =
    that describes the respective index in the input vector. Output variables are
    all declared, each with a meta that describes the respective index in the
    output vector. *)
-let simplify_nn_application input_vars output_vars =
-  let rec do_simplify (term : Term.term) =
+let simplify_nn_application env input_vars output_vars =
+  let th_model = Symbols.Model.create env in
+  let th_nn = Symbols.NN.create env in
+  let rec do_simplify (term : Why3.Term.term) =
     match term.t_node with
-    | Term.Tapp
+    | Why3.Term.Tapp
         ( ls_get (* [_] *),
           [
             ({ t_node = Tapp (ls_atat (* @@ *), _); _ } as t1);
             ({ t_node = Tconst (ConstInt index); _ } as _t2);
           ] )
-      when String.equal ls_get.ls_name.id_string (Ident.op_get "")
-           && String.equal ls_atat.ls_name.id_string (Ident.op_infix "@@") -> (
-      match Term.Mterm.find_opt t1 output_vars with
-      | None -> Term.t_map do_simplify term
+      when String.equal ls_get.ls_name.id_string (Why3.Ident.op_get "")
+           && (Why3.Term.ls_equal ls_atat th_nn.atat
+              || Why3.Term.ls_equal ls_atat th_model.atat) -> (
+      match Why3.Term.Mterm.find_opt t1 output_vars with
+      | None -> Why3.Term.t_map do_simplify term
       | Some output_vars ->
-        let index = Number.to_small_integer index in
+        let index = Why3.Number.to_small_integer index in
         assert (index < List.length output_vars);
         let ls = Stdlib.List.assoc index output_vars in
-        Term.t_app_infer ls [])
-    | _ -> Term.t_map do_simplify term
+        Why3.Term.t_app_infer ls [])
+    | _ -> Why3.Term.t_map do_simplify term
   in
-  Trans.fold
+  Why3.Trans.fold
     (fun task_hd task ->
       match task_hd.task_decl.td_node with
       | Decl { d_node = Dparam ls; _ } ->
@@ -85,38 +432,41 @@ let simplify_nn_application input_vars output_vars =
            to appear before the corresponding declaration in order to be
            leveraged by prover printers. *)
         let task =
-          match Term.Mls.find_opt ls input_vars with
+          match Why3.Term.Mls.find_opt ls input_vars with
           | None -> task
           | Some index ->
-            Task.add_meta task Meta.meta_input [ MAls ls; MAint index ]
+            Why3.Task.add_meta task Meta.meta_input [ MAls ls; MAint index ]
         in
-        Task.add_tdecl task task_hd.task_decl
+        Why3.Task.add_tdecl task task_hd.task_decl
       | Decl ({ d_node = Dprop (Pgoal, _, _); _ } as decl) ->
-        let decl = Decl.decl_map do_simplify decl in
+        let decl = Why3.Decl.decl_map do_simplify decl in
         let task =
           (* Output variables are not declared yet in the task as they are
              created on the fly for each (different) neural network application
              on an input vector. We add here their declarations in the task. *)
-          Term.Mterm.fold
+          Why3.Term.Mterm.fold
             (fun _t output_vars task ->
               (* Again, for each output variable, add the meta first, then its
                  actual declaration. *)
               List.fold output_vars ~init:task
                 ~f:(fun task (index, output_var) ->
                 let task =
-                  Task.add_meta task Meta.meta_output
+                  Why3.Task.add_meta task Meta.meta_output
                     [ MAls output_var; MAint index ]
                 in
-                let decl = Decl.create_param_decl output_var in
-                Task.add_decl task decl))
+                let decl = Why3.Decl.create_param_decl output_var in
+                Why3.Task.add_decl task decl))
             output_vars task
         in
-        Task.add_decl task decl
+        Why3.Task.add_decl task decl
       | Use _ | Clone _ | Meta _ | Decl _ ->
-        Task.add_tdecl task task_hd.task_decl)
+        Why3.Task.add_tdecl task task_hd.task_decl)
     None
 
 let trans =
-  Trans.bind Utils.input_vars (fun input_vars ->
-    Trans.bind create_output_vars (fun output_vars ->
-      simplify_nn_application input_vars output_vars))
+  Why3.Env.Wenv.memoize 10 (fun env ->
+    Why3.Trans.bind Utils.input_terms (function
+      | Utils.Others -> abstract_nn_term env
+      | Vars input_vars ->
+        Why3.Trans.bind (output_vars env) (fun output_vars ->
+          simplify_nn_application env input_vars output_vars)))
diff --git a/src/transformations/native_nn_prover.mli b/src/transformations/native_nn_prover.mli
index 2bce53f83afce974b1e51a0d5264df5b8169ac44..421cfe4ada32f3b69af4a7d7f4cff2392ec75c2e 100644
--- a/src/transformations/native_nn_prover.mli
+++ b/src/transformations/native_nn_prover.mli
@@ -20,4 +20,4 @@
 (*                                                                        *)
 (**************************************************************************)
 
-val trans : Why3.Task.task Why3.Trans.trans
+val trans : Why3.Env.env -> Why3.Task.task Why3.Trans.trans
diff --git a/src/transformations/nn2smt.ml b/src/transformations/nn2smt.ml
index 34e4cca61ea6ce0cf69eb1f7ac4bdaa76fb19b1e..56f24a1324b4d7077d5de3e414b7d134f98959f1 100644
--- a/src/transformations/nn2smt.ml
+++ b/src/transformations/nn2smt.ml
@@ -59,7 +59,7 @@ let rec terms_of_nier m d node index ty_inputs env input_vars input_terms =
     in
     Queue.enqueue d (Why3.Theory.create_decl decl);
     Queue.enqueue d
-    @@ Why3.Theory.create_meta Why3.Inlining.meta [ Why3.Theory.MAls ls ];
+      (Why3.Theory.create_meta Why3.Inlining.meta [ Why3.Theory.MAls ls ]);
     let mi = Map.add_exn mi ~key:index ~data:ls in
     m := Map.set !m ~key:node ~data:mi;
     ls
@@ -83,7 +83,7 @@ and terms_of_nier_aux m d node index ty_inputs env input_vars input_terms =
     match node.descr with
     | Constant { data = Float ba } ->
       let id = IR.Shape.unrow_major node.shape index in
-      Relop.term_of_float env (IR.Tensor.get ba id)
+      Utils.term_of_float env (IR.Tensor.get ba id)
     | Matmul { input1; input2 } ->
       (* [|... ; _; n |], [| ...; n; _ |] *)
       let id = IR.Shape.unrow_major node.shape index in
diff --git a/src/transformations/relop.ml b/src/transformations/relop.ml
index 77b9ef72a2547ef65cef6a6b107b925f287377ff..f89b71e4ca9c2a1e97890bf24d29df26a34fcb69 100644
--- a/src/transformations/relop.ml
+++ b/src/transformations/relop.ml
@@ -23,32 +23,6 @@
 open Why3
 open Base
 
-let float_of_real_constant rc =
-  let is_neg, rc =
-    (BigInt.lt rc.Number.rl_real.rv_sig BigInt.zero, Number.abs_real rc)
-  in
-  let rc_str = Fmt.str "%a" Number.(print_real_constant full_support) rc in
-  let f = Float.of_string rc_str in
-  if is_neg then Float.neg f else f
-
-let real_constant_of_float value =
-  let neg = Float.is_negative value in
-  let value = Fmt.str "%.64f" (Float.abs value) in
-  (* Split into integer and fractional parts. *)
-  let int_frac = String.split value ~on:'.' in
-  let int = List.hd_exn int_frac in
-  let frac =
-    match List.tl_exn int_frac with
-    | [] -> ""
-    | [ s ] -> s
-    | _ -> assert false (* Since every float has one '.' at most. *)
-  in
-  Constant.ConstReal (Number.real_literal ~radix:10 ~neg ~int ~frac ~exp:None)
-
-let term_of_float env f =
-  let th = Symbols.Float64.create env in
-  Term.t_const (real_constant_of_float f) th.ty
-
 (* -------------------------------------------------------------------------- *)
 (* --- Simplify Relops                                                        *)
 (* -------------------------------------------------------------------------- *)
@@ -99,10 +73,10 @@ let make_rewriter env =
       when List.exists ~f:(Term.ls_equal ls_op) aop_f64 ->
       (* [rc1 ls_op rc2] => [res_rc], where [res_rc] is the actual arithmetic
          result of computing [ls_op] on [rc1] and [rc2]. *)
-      let rc1_float = float_of_real_constant rc1 in
-      let rc2_float = float_of_real_constant rc2 in
+      let rc1_float = Utils.float_of_real_constant rc1 in
+      let rc2_float = Utils.float_of_real_constant rc2 in
       let res_rc_float = (aop_f64_of_ls ls_op) rc1_float rc2_float in
-      let res_rc = real_constant_of_float res_rc_float in
+      let res_rc = Utils.real_constant_of_float res_rc_float in
       Term.t_const res_rc th_f64.ty
     | Tapp
         ( ls_rel,
@@ -121,10 +95,10 @@ let make_rewriter env =
            && List.exists ~f:(Term.ls_equal ls_op) aop_f64 ->
       (* [rc1 ls_rel t' ls_op rc2] => [(rc1 inv_ls_op rc2) ls_rel t'], where
          [inv_ls_op] is the inverse operation of the one provided by [ls_op]. *)
-      let rc1_float = float_of_real_constant rc1 in
-      let rc2_float = float_of_real_constant rc2 in
+      let rc1_float = Utils.float_of_real_constant rc1 in
+      let rc2_float = Utils.float_of_real_constant rc2 in
       let res_rc_float = (inv_aop_f64_of_ls ls_op) rc1_float rc2_float in
-      let res_rc_t = term_of_float env res_rc_float in
+      let res_rc_t = Utils.term_of_float env res_rc_float in
       let t = Term.t_app_infer ls_rel [ res_rc_t; t' ] in
       rewriter t
     | Tapp
@@ -144,10 +118,10 @@ let make_rewriter env =
            && List.exists ~f:(Term.ls_equal ls_op) aop_f64 ->
       (* [t' ls_op rc2 ls_rel rc1] => [t' ls_rel (rc1 inv_ls_op rc2)], where
          [inv_ls_op] is the inverse operation of the one provided by [ls_op]. *)
-      let rc1_float = float_of_real_constant rc1 in
-      let rc2_float = float_of_real_constant rc2 in
+      let rc1_float = Utils.float_of_real_constant rc1 in
+      let rc2_float = Utils.float_of_real_constant rc2 in
       let res_rc_float = (inv_aop_f64_of_ls ls_op) rc1_float rc2_float in
-      let res_rc_t = term_of_float env res_rc_float in
+      let res_rc_t = Utils.term_of_float env res_rc_float in
       let t = Term.t_app_infer ls_rel [ t'; res_rc_t ] in
       rewriter t
     | _ -> t
diff --git a/src/transformations/relop.mli b/src/transformations/relop.mli
index 0b808b2242a93530cf860a897b9360d9244e8130..d5a662fe1c59ce676d03ff00133a71373fe2e7a8 100644
--- a/src/transformations/relop.mli
+++ b/src/transformations/relop.mli
@@ -32,6 +32,3 @@ val register_simplify_relop : unit -> unit
 
 val register_vars_on_lhs_of_relop : unit -> unit
 (** Register the [vars_on_lhs_of_relop] transformation. *)
-
-val term_of_float : Why3.Env.env -> float -> Why3.Term.term
-(** Create the constant term from a floating point number *)
\ No newline at end of file
diff --git a/src/transformations/symbols.ml b/src/transformations/symbols.ml
index c07edad17db54d12f0d9a40c24a36ea26d822ce4..da11b63faf155bf0c023f0b8e4daf72dc0bbb82f 100644
--- a/src/transformations/symbols.ml
+++ b/src/transformations/symbols.ml
@@ -20,6 +20,10 @@
 (*                                                                        *)
 (**************************************************************************)
 
+(* -------------------------------------------------------------------------- *)
+(* --- Why3 Theories                                                          *)
+(* -------------------------------------------------------------------------- *)
+
 module Float64 = struct
   type t = {
     ty : Why3.Ty.ty;
@@ -84,15 +88,15 @@ module Real = struct
   type t = {
     ty : Why3.Ty.ty;
     tysymbol : Why3.Ty.tysymbol;
-    add : Why3.Term.lsymbol;  (** Real.add *)
-    sub : Why3.Term.lsymbol;  (** Real.sub *)
-    mul : Why3.Term.lsymbol;  (** Real.mul *)
-    div : Why3.Term.lsymbol;  (** Real.div *)
-    neg : Why3.Term.lsymbol;  (** Real.neg *)
-    le : Why3.Term.lsymbol;  (** Real.le *)
-    ge : Why3.Term.lsymbol;  (** Real.ge *)
-    lt : Why3.Term.lsymbol;  (** Real.lt *)
-    gt : Why3.Term.lsymbol;  (** Real.gt *)
+    add : Why3.Term.lsymbol; (* Real.add *)
+    sub : Why3.Term.lsymbol; (* Real.sub *)
+    mul : Why3.Term.lsymbol; (* Real.mul *)
+    div : Why3.Term.lsymbol; (* Real.div *)
+    neg : Why3.Term.lsymbol; (* Real.neg *)
+    le : Why3.Term.lsymbol; (* Real.le *)
+    ge : Why3.Term.lsymbol; (* Real.ge *)
+    lt : Why3.Term.lsymbol; (* Real.lt *)
+    gt : Why3.Term.lsymbol; (* Real.gt *)
   }
 
   let create_t env =
@@ -114,3 +118,33 @@ module Real = struct
 
   let create = Why3.Env.Wenv.memoize 10 (fun env -> create_t env)
 end
+
+(* -------------------------------------------------------------------------- *)
+(* --- CAISAR Theories                                                        *)
+(* -------------------------------------------------------------------------- *)
+
+module Model = struct
+  type t = { atat : Why3.Term.lsymbol (* Model.( @@ ) *) }
+
+  let create_t env =
+    let th = Why3.Env.read_theory env [ "caisar"; "model" ] "Model" in
+    let atat =
+      Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_infix "@@" ]
+    in
+    { atat }
+
+  let create = Why3.Env.Wenv.memoize 10 (fun env -> create_t env)
+end
+
+module NN = struct
+  type t = { atat : Why3.Term.lsymbol (* NN.( @@ ) *) }
+
+  let create_t env =
+    let th = Why3.Env.read_theory env [ "caisar"; "model" ] "NN" in
+    let atat =
+      Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_infix "@@" ]
+    in
+    { atat }
+
+  let create = Why3.Env.Wenv.memoize 10 (fun env -> create_t env)
+end
diff --git a/src/transformations/symbols.mli b/src/transformations/symbols.mli
index e7c19c9e8232a57cd0761771bc0f3391d1751b1d..9e76d2a94ce6ee2e5b962c019dad31d85f26fe0e 100644
--- a/src/transformations/symbols.mli
+++ b/src/transformations/symbols.mli
@@ -20,6 +20,10 @@
 (*                                                                        *)
 (**************************************************************************)
 
+(* -------------------------------------------------------------------------- *)
+(** {2 Why3 Theories} *)
+(* -------------------------------------------------------------------------- *)
+
 module Float64 : sig
   type t = private {
     ty : Why3.Ty.ty;
@@ -63,4 +67,20 @@ module Real : sig
   }
 
   val create : Why3.Env.env -> t
-end
\ No newline at end of file
+end
+
+(* -------------------------------------------------------------------------- *)
+(** {2 CAISAR Theories} *)
+(* -------------------------------------------------------------------------- *)
+
+module Model : sig
+  type t = private { atat : Why3.Term.lsymbol  (** Model.( \@\@ ) *) }
+
+  val create : Why3.Env.env -> t
+end
+
+module NN : sig
+  type t = private { atat : Why3.Term.lsymbol  (** NN.( \@\@ ) *) }
+
+  val create : Why3.Env.env -> t
+end
diff --git a/src/transformations/utils.ml b/src/transformations/utils.ml
index 8638ff6a954795b532fe0215164b883a7fae3b67..fd420463fb4a6fcd52c123701ab52bcb0c63505e 100644
--- a/src/transformations/utils.ml
+++ b/src/transformations/utils.ml
@@ -22,36 +22,64 @@
 
 open Base
 
-let src = Logs.Src.create "TransformationsUtils" ~doc:"Transformation utils"
+(* -------------------------------------------------------------------------- *)
+(* --- Conversions from/to float                                              *)
+(* -------------------------------------------------------------------------- *)
 
 let float_of_real_constant rc =
   let is_neg, rc =
-    ( Why3.(BigInt.lt rc.Number.rl_real.rv_sig BigInt.zero),
-      Why3.Number.abs_real rc )
+    Why3.(BigInt.lt rc.Number.rl_real.rv_sig BigInt.zero, Number.abs_real rc)
   in
   let rc_str = Fmt.str "%a" Why3.Number.(print_real_constant full_support) rc in
   let f = Float.of_string rc_str in
   if is_neg then Float.neg f else f
 
+let real_constant_of_float value =
+  let neg = Float.is_negative value in
+  let value = Fmt.str "%.64f" (Float.abs value) in
+  (* Split into integer and fractional parts. *)
+  let int_frac = String.split value ~on:'.' in
+  let int = List.hd_exn int_frac in
+  let frac =
+    match List.tl_exn int_frac with
+    | [] -> ""
+    | [ s ] -> s
+    | _ -> assert false (* Since every float has one '.' at most. *)
+  in
+  Why3.Constant.ConstReal
+    (Why3.Number.real_literal ~radix:10 ~neg ~int ~frac ~exp:None)
+
+let term_of_float env f =
+  let th = Symbols.Float64.create env in
+  Why3.Term.t_const (real_constant_of_float f) th.ty
+
+(* -------------------------------------------------------------------------- *)
+(* --- Information about input specification                                  *)
+(* -------------------------------------------------------------------------- *)
+
 (* Collects input variables and their position inside input vectors. Such
    process is memoized wrt lsymbols corresponding to input vectors. *)
 
-type position_input_vars =
-  (int Why3.Term.Mls.t
-  [@printer
-    fun fmt mls ->
-      Why3.(
-        Pp.print_iter2 Term.Mls.iter Pp.newline Pp.comma Pretty.print_ls Pp.int
-          fmt mls)])
+(* Terms forming vectors in input to models. *)
+type input_terms =
+  | Vars of
+      (int Why3.Term.Mls.t
+      [@printer
+        fun fmt mls ->
+          Why3.(
+            Pp.print_iter2 Term.Mls.iter Pp.newline Pp.comma Pretty.print_ls
+              Pp.int fmt mls)])
+    (* A map from input variable lsymbols to corresponding positions in input
+       vectors. *)
+  | Others (* Generic terms. *)
 [@@deriving show]
 
-let input_vars : position_input_vars Why3.Trans.trans =
+let input_terms : input_terms Why3.Trans.trans =
+  let exception NotInputVariable in
   let hls = Why3.Term.Hls.create 13 in
   let add index mls = function
     | { Why3.Term.t_node = Tapp (ls, []); _ } -> Why3.Term.Mls.add ls index mls
-    | t ->
-      Logging.code_error ~src (fun m ->
-        m "Not an input variable: %a" Why3.Pretty.print_term t)
+    | _ -> raise NotInputVariable
   in
   let rec do_collect mls t =
     match t.Why3.Term.t_node with
@@ -72,8 +100,13 @@ let input_vars : position_input_vars Why3.Trans.trans =
     | _ -> Why3.Term.t_fold do_collect mls t
   in
   Why3.Trans.fold_decl
-    (fun decl mls -> Why3.Decl.decl_fold do_collect mls decl)
-    Why3.Term.Mls.empty
+    (fun decl mls ->
+      match mls with
+      | Vars mls -> (
+        try Vars (Why3.Decl.decl_fold do_collect mls decl)
+        with NotInputVariable -> Others)
+      | Others -> Others)
+    (Vars Why3.Term.Mls.empty)
 
 (* Collects input feature values (these are typically coming from a data point
    of a data set).
diff --git a/src/transformations/utils.mli b/src/transformations/utils.mli
index 63ad5a212a63e7d65c2f9cef1d79f62425a5e8d7..7ec56fe2b5447037dd0028525363722b4e112326 100644
--- a/src/transformations/utils.mli
+++ b/src/transformations/utils.mli
@@ -20,8 +20,25 @@
 (*                                                                        *)
 (**************************************************************************)
 
-type position_input_vars = int Why3.Term.Mls.t [@@deriving show]
-(** Map input variable lsymbols to corresponding position in input vectors. *)
+(* -------------------------------------------------------------------------- *)
+(** {2 Conversions from/to float} *)
+(* -------------------------------------------------------------------------- *)
+
+val float_of_real_constant : Why3.Number.real_constant -> float
+val real_constant_of_float : float -> Why3.Constant.constant
+val term_of_float : Why3.Env.env -> float -> Why3.Term.term
+
+(* -------------------------------------------------------------------------- *)
+(** {2 Information about input specification} *)
+(* -------------------------------------------------------------------------- *)
+
+(** Terms forming vectors in input to models. *)
+type input_terms =
+  | Vars of int Why3.Term.Mls.t
+      (** A map from input variable lsymbols to corresponding positions in input
+          vectors. *)
+  | Others  (** Generic terms. *)
+[@@deriving show]
 
 type features = Float.t Why3.Term.Mls.t [@@deriving show]
 (** Input feature values. *)
@@ -34,7 +51,7 @@ and interval = float option * float option [@@deriving show]
 type label = int [@@deriving show]
 (** Output label. *)
 
-val input_vars : position_input_vars Why3.Trans.trans
+val input_terms : input_terms Why3.Trans.trans
 
 val input_features :
   Why3.Env.env -> vars:Why3.Term.lsymbol list -> features Why3.Trans.trans
diff --git a/src/verification.ml b/src/verification.ml
index 45e6cb6ec5eb948216065f33d7bba0ca2f8f2276..cc97ab34268b421f737b854dd22e136d7120a666 100644
--- a/src/verification.ml
+++ b/src/verification.ml
@@ -273,7 +273,12 @@ let answer_saver limit config env config_prover ~proof_strategy task =
           (svm_filename, Language.svm_abstraction_of_string svm_abstraction)
         | Some _ -> assert false (* By construction of the meta. *)
       in
-      let vars = Why3.Term.Mls.keys (Trans.apply Utils.input_vars task) in
+      let vars =
+        match Trans.apply Utils.input_terms task with
+        | Utils.Vars mls -> Why3.Term.Mls.keys mls
+        | Others ->
+          Logging.user_error (fun m -> m "Cannot determine input variables")
+      in
       let dataset : Csv.t =
         let features = Trans.apply (Utils.input_features env ~vars) task in
         let features =
@@ -368,7 +373,7 @@ let call_prover ~cwd ~limit config env prover config_prover driver ?dataset
       answer_dataset limit config env prover config_prover driver dataset task
     | Marabou | Pyrat | Nnenum | ABCrown ->
       let task = Interpreter.interpret_task ~cwd env ~definitions task in
-      let proof_strategy = Proof_strategy.apply_native_nn_prover_strategy in
+      let proof_strategy = Proof_strategy.apply_native_nn_prover_strategy env in
       answer_generic limit config prover config_prover driver ~proof_strategy
         task
     | CVC5 ->
diff --git a/stdlib/caisar/model.mlw b/stdlib/caisar/model.mlw
index c9643ca058dd261c49937706ce053515a6f98231..62fc630513470a5448c63322b45b7f92cad6fd5f 100644
--- a/stdlib/caisar/model.mlw
+++ b/stdlib/caisar/model.mlw
@@ -31,8 +31,8 @@ theory Model
   type kind
   type model 'kind
 
-  function read_model (n: string) : model 'kind
-  function (@@) (m: model 'kind) (v: vector 'a) : vector 'a
+  val function read_model (n: string) : model 'kind
+  val function (@@) (m: model 'kind) (v: vector 'a) : vector 'a
 
 end
 
diff --git a/stdlib/caisar/types.mlw b/stdlib/caisar/types.mlw
index 7720c47ba3d1cec2b73d6b9eaa4165578aceb8e8..4f01692fd72d17577eff475c4de0b71efe9171fd 100644
--- a/stdlib/caisar/types.mlw
+++ b/stdlib/caisar/types.mlw
@@ -80,29 +80,29 @@ theory Vector
   type vector 'a
   type index = int
 
-  function ([]) (v: vector 'a) (i: index) : 'a
-  function length (v: vector 'a) : int
+  val function ([]) (v: vector 'a) (i: index) : 'a
+  val function length (v: vector 'a) : int
 
-  function (-) (v1: vector 'a) (v2: vector 'a) : vector 'a
-  function (+) (v1: vector 'a) (v2: vector 'a) : vector 'a
+  val function (-) (v1: vector 'a) (v2: vector 'a) : vector 'a
+  val function (+) (v1: vector 'a) (v2: vector 'a) : vector 'a
 
   predicate has_length (v: vector 'a) (i: int)
   predicate valid_index (v: vector 'a) (i: index) = 0 <= i < length v
 
-  function mapi (v: vector 'a) (f: index -> 'a -> 'b) : vector 'b
-  function map (v: vector 'a) (f: 'a -> 'b) : vector 'b
-  function map2 (v1: vector 'a) (v2: vector 'b) (f: 'a -> 'b -> 'c) : vector 'c
+  val function mapi (v: vector 'a) (f: index -> 'a -> 'b) : vector 'b
+  val function map (v: vector 'a) (f: 'a -> 'b) : vector 'b
+  val function map2 (v1: vector 'a) (v2: vector 'b) (f: 'a -> 'b -> 'c) : vector 'c
 
   predicate forall_ (v: vector 'a) (f: 'a -> bool) =
     forall i: index. valid_index v i -> f v[i]
 
-  function foreach (v: vector 'a) (f: 'a -> 'b) : vector 'b =
+  let function foreach (v: vector 'a) (f: 'a -> 'b) : vector 'b =
     map v f
 
   predicate forall2 (v1: vector 'a) (v2: vector 'b) (f: 'a -> 'b -> bool) =
     length(v1) = length(v2) -> forall i: index. valid_index v1 i -> f v1[i] v2[i]
 
-  function foreach2 (v1: vector 'a) (v2: vector 'b) (f: 'a -> 'b -> 'c) : vector 'c =
+  let function foreach2 (v1: vector 'a) (v2: vector 'b) (f: 'a -> 'b -> 'c) : vector 'c =
     map2 v1 v2 f
 
 end
diff --git a/tests/acasxu.t b/tests/acasxu.t
index 5ba5bb7eeaefe1a49d74d0741b0b1b3db37d2066..0d19a6594cbffbba1f011a43458c1e182c8ba949 100644
--- a/tests/acasxu.t
+++ b/tests/acasxu.t
@@ -13,24 +13,27 @@ Test verify on acasxu
   >   use caisar.model.NN
   > 
   >   constant nn: model nn = read_model "TestNetwork.nnet"
+  >   let constant nn_onnx: model nn = read_model "../examples/acasxu/nets/onnx/ACASXU_1_1.onnx"
+  >   let constant nn_onnx_1_1: model nn = read_model "../examples/acasxu/nets/onnx/ACASXU_1_1.onnx"
+  >   let constant nn_onnx_1_9: model nn = read_model "../examples/acasxu/nets/onnx/ACASXU_1_9.onnx"
   > 
   >   type input = vector t
   > 
-  >   constant distance_to_intruder: int = 0
-  >   constant angle_to_intruder: int = 1
-  >   constant intruder_heading: int = 2
-  >   constant speed: int = 3
-  >   constant intruder_speed: int = 4
+  >   let constant distance_to_intruder: int = 0
+  >   let constant angle_to_intruder: int = 1
+  >   let constant intruder_heading: int = 2
+  >   let constant speed: int = 3
+  >   let constant intruder_speed: int = 4
   > 
   >   type action = int
   > 
-  >   constant clear_of_conflict: action = 0
-  >   constant weak_left: action = 1
-  >   constant weak_right: action = 2
-  >   constant strong_left: action = 3
-  >   constant strong_right: action = 4
+  >   let constant clear_of_conflict: action = 0
+  >   let constant weak_left: action = 1
+  >   let constant weak_right: action = 2
+  >   let constant strong_left: action = 3
+  >   let constant strong_right: action = 4
   > 
-  >   constant pi: t = 3.141592653589793115997963468544185161590576171875000
+  >   let constant pi: t = 3.141592653589793115997963468544185161590576171875000
   > 
   >   predicate valid_input (i: input) =
   >     (0.0:t) .<= i[distance_to_intruder] .<= (60760.0:t)
@@ -51,7 +54,7 @@ Test verify on acasxu
   >     /\ i[speed] .>= (1145.0:t)
   >     /\ i[intruder_speed] .<= (60.0:t)
   > 
-  >   function denormalize_t (i: t) (mean: t) (range: t) : t = (i .* range) .+ mean
+  >   let function denormalize_t (i: t) (mean: t) (range: t) : t = (i .* range) .+ mean
   > 
   >   function denormalize_by_index (idx: int) (t: t) : t =
   >     if idx = distance_to_intruder then denormalize_t t (19791.0:t) (60261.0:t)
@@ -64,9 +67,87 @@ Test verify on acasxu
   >   function denormalize_input (i:input) : input =
   >     Vector.mapi i denormalize_by_index
   > 
-  >   function denormalize_output (o: t) : t =
+  >   let function denormalize_output (o: t) : t =
   >     denormalize_t o (7.51888402010059753166615337249822914600372314453125:t) (373.9499200000000200816430151462554931640625:t)
   > 
+  >   let function normalize_t (i: t) (mean: t) (range: t) : t = ((i .- mean) ./ range)
+  > 
+  >   let function normalize_by_index (idx: int) (t: t) : t =
+  >     if idx = distance_to_intruder then normalize_t t (19791.0:t) (60261.0:t)
+  >     else if idx = angle_to_intruder then normalize_t t (0.0:t) (6.2831853071800001231395071954466402530670166015625:t)
+  >     else if idx = intruder_heading then normalize_t t (0.0:t) (6.2831853071800001231395071954466402530670166015625:t)
+  >     else if idx = speed then normalize_t t (650.0:t) (1100.0:t)
+  >     else if idx = intruder_speed then normalize_t t (600.0:t) (1200.0:t)
+  >     else t
+  > 
+  >   let function normalize_input (i:input) : input =
+  >     Vector.mapi i normalize_by_index
+  > 
+  >   let runP1 (j: input)
+  >     requires { has_length j 5 }
+  >     requires { valid_input j }
+  >     requires { intruder_distant_and_slow j }
+  >     ensures { result .<= (1500.0:t) }  =
+  >       let i = normalize_input j in
+  >       let o = (nn_onnx @@ i)[clear_of_conflict] in
+  >       (denormalize_output o)
+  > 
+  >   let runP1v2 (j: input)
+  >     requires { has_length j 5 }
+  >     requires { valid_input j }
+  >     requires { intruder_distant_and_slow j }
+  >     ensures { let o1, o2 = result in o1 .<= (1500.0:t) /\ o2 .<= (1500.0:t) }  =
+  >       let i = normalize_input j in
+  >       let o1 = (nn_onnx_1_1 @@ i)[clear_of_conflict] in
+  >       let o2 = (nn_onnx_1_9 @@ i)[clear_of_conflict] in
+  >       (denormalize_output o1), (denormalize_output o2)
+  > 
+  >   let run2 (j: input)
+  >     requires { has_length j 5 }
+  >     requires { valid_input j }
+  >     requires { intruder_distant_and_slow j }
+  >     ensures { let o1, o2 = result in o1 .>= o2 /\ o2 .>= o1 }  =
+  >       let i = normalize_input j in
+  >       let o1 = (nn_onnx @@ i)[clear_of_conflict] in
+  >       let o2 = (nn_onnx @@ i)[weak_left] in
+  >       o1, o2
+  > 
+  >   let run2v2 (i: input) (j: input)
+  >     requires { has_length i 5 }
+  >     requires { has_length j 5 }
+  >     requires { valid_input i }
+  >     requires { intruder_distant_and_slow i }
+  >     requires { intruder_distant_and_slow j }
+  >     ensures { let o1, o2 = result in o1 .>= o2 /\ o2 .>= o1 }  =
+  >       let i_ = normalize_input i in
+  >       let j_ = normalize_input j in
+  >       let o1 = (nn_onnx @@ i_)[clear_of_conflict] in
+  >       let o2 = (nn_onnx @@ j_)[clear_of_conflict] in
+  >       o1, o2
+  > 
+  >   let run3 (j: input) (eps:t)
+  >     requires { has_length j 5 }
+  >     requires { valid_input j }
+  >     requires { intruder_distant_and_slow j }
+  >     requires { (-1.:t) .<= eps .<= (1.:t) }
+  >     ensures { let o1, o2 = result in o1 .>= o2 /\ o2 .>= o1 }  =
+  >       let i = normalize_input j in
+  >       let j2 = Vector.mapi j (fun i x -> if i = distance_to_intruder then x .+ eps else x) in
+  >       let i2 = normalize_input j2 in
+  >       let o1 = (nn_onnx @@ i)[clear_of_conflict] in
+  >       let o2 = (nn_onnx @@ i2)[clear_of_conflict] in
+  >       o1, o2
+  > 
+  >  let robust_output (j: input) (eps:t)
+  >    requires { has_length j 5 }
+  >    requires { valid_input j }
+  >    requires { (0.0:t) .<= eps .<= (0.375:t) }
+  >    ensures { let o1, o2 = result in o1[0] .- o2[0] .- eps .>= (0.0:t) /\ o1[0] .- o2[0] .- eps .<= (0.0:t) } =
+  >      let i = normalize_input j in
+  >      let o1 = (nn_onnx @@ i) in
+  >      let o2 = (nn_onnx @@ i) in
+  >      o1, o2
+  > 
   >   goal P1:
   >     forall i: input.
   >       has_length i 5 ->
@@ -93,7 +174,480 @@ Test verify on acasxu
   > end
   > EOF
 
-  $ caisar verify --prover PyRAT --ltag=ProverSpec file.mlw
+  $ caisar verify --prover PyRAT --ltag=ProverSpec --ltag=InterpretGoal --ltag=NN-Gen-Term file.mlw
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'runP1'vc':
+  forall x:t, x1:t, x2:t, x3:t, x4:t.
+   ((le (0.0:t) x /\ le x (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x1 /\
+     le x1 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x2 /\
+     le x2 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) x3 /\ le x3 (1200.0:t)) /\ le (0.0:t) x4 /\ le x4 (1200.0:t)) /\
+   le (55947.6909999999988940544426441192626953125:t) x /\
+   le (1145.0:t) x3 /\ le x4 (60.0:t) ->
+   le
+   (add RNE
+    (mul RNE
+     (nn_onnx
+      @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+         (div RNE (sub RNE x1 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x2 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+         (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+     [0] (373.9499200000000200816430151462554931640625:t))
+    (7.51888402010059753166615337249822914600372314453125:t))
+   (1500.0:t)
+  nn_onnx,
+  (Interpreter_types.Model
+     (Interpreter_types.ONNX, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename =
+                                "./../examples/acasxu/nets/onnx/ACASXU_1_1.onnx";
+                                nn_format = <nier> }))
+  vector,
+  5
+  [DEBUG]{NN-Gen-Term} new goal:le y (1500.0:t)
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  0.0 <= x0
+  x0 <= 60760.0
+  -3.141592653589793115997963468544185161590576171875 <= x1
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  -3.141592653589793115997963468544185161590576171875 <= x2
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  100.0 <= x3
+  x3 <= 1200.0
+  0.0 <= x4
+  x4 <= 1200.0
+  55947.6909999999988940544426441192626953125 <= x0
+  1145.0 <= x3
+  x4 <= 60.0
+  y0 <= 1500.0
+  
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'runP1v2'vc':
+  forall x:t, x1:t, x2:t, x3:t, x4:t.
+   ((le (0.0:t) x /\ le x (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x1 /\
+     le x1 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x2 /\
+     le x2 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) x3 /\ le x3 (1200.0:t)) /\ le (0.0:t) x4 /\ le x4 (1200.0:t)) /\
+   le (55947.6909999999988940544426441192626953125:t) x /\
+   le (1145.0:t) x3 /\ le x4 (60.0:t) ->
+   le
+   (add RNE
+    (mul RNE
+     (nn_onnx
+      @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+         (div RNE (sub RNE x1 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x2 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+         (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+     [0] (373.9499200000000200816430151462554931640625:t))
+    (7.51888402010059753166615337249822914600372314453125:t))
+   (1500.0:t) /\
+   le
+   (add RNE
+    (mul RNE
+     (nn_onnx1
+      @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+         (div RNE (sub RNE x1 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x2 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+         (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+     [0] (373.9499200000000200816430151462554931640625:t))
+    (7.51888402010059753166615337249822914600372314453125:t))
+   (1500.0:t)
+  nn_onnx1,
+  (Interpreter_types.Model
+     (Interpreter_types.ONNX, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename =
+                                "./../examples/acasxu/nets/onnx/ACASXU_1_9.onnx";
+                                nn_format = <nier> }))
+  nn_onnx,
+  (Interpreter_types.Model
+     (Interpreter_types.ONNX, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename =
+                                "./../examples/acasxu/nets/onnx/ACASXU_1_1.onnx";
+                                nn_format = <nier> }))
+  vector,
+  5
+  [DEBUG]{NN-Gen-Term} new goal:le y1 (1500.0:t) /\ le y2 (1500.0:t)
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  0.0 <= x0
+  x0 <= 60760.0
+  -3.141592653589793115997963468544185161590576171875 <= x1
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  -3.141592653589793115997963468544185161590576171875 <= x2
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  100.0 <= x3
+  x3 <= 1200.0
+  0.0 <= x4
+  x4 <= 1200.0
+  55947.6909999999988940544426441192626953125 <= x0
+  1145.0 <= x3
+  x4 <= 60.0
+  y0 <= 1500.0 and y1 <= 1500.0
+  
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'run2'vc':
+  forall x:t, x1:t, x2:t, x3:t, x4:t.
+   ((le (0.0:t) x /\ le x (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x1 /\
+     le x1 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x2 /\
+     le x2 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) x3 /\ le x3 (1200.0:t)) /\ le (0.0:t) x4 /\ le x4 (1200.0:t)) /\
+   le (55947.6909999999988940544426441192626953125:t) x /\
+   le (1145.0:t) x3 /\ le x4 (60.0:t) ->
+   le
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [1]
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [0] /\
+   le
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [0]
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [1]
+  nn_onnx,
+  (Interpreter_types.Model
+     (Interpreter_types.ONNX, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename =
+                                "./../examples/acasxu/nets/onnx/ACASXU_1_1.onnx";
+                                nn_format = <nier> }))
+  vector,
+  5
+  [DEBUG]{NN-Gen-Term} new goal:le y3 y4 /\ le y4 y3
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  0.0 <= x0
+  x0 <= 60760.0
+  -3.141592653589793115997963468544185161590576171875 <= x1
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  -3.141592653589793115997963468544185161590576171875 <= x2
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  100.0 <= x3
+  x3 <= 1200.0
+  0.0 <= x4
+  x4 <= 1200.0
+  55947.6909999999988940544426441192626953125 <= x0
+  1145.0 <= x3
+  x4 <= 60.0
+  y0 <= y1 and y1 <= y0
+  
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'run2v2'vc':
+  forall x:t, x1:t, x2:t, x3:t, x4:t, x5:t, x6:t, x7:t, x8:t, x9:t.
+   ((le (0.0:t) x /\ le x (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x1 /\
+     le x1 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x2 /\
+     le x2 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) x3 /\ le x3 (1200.0:t)) /\ le (0.0:t) x4 /\ le x4 (1200.0:t)) /\
+   (le (55947.6909999999988940544426441192626953125:t) x /\
+    le (1145.0:t) x3 /\ le x4 (60.0:t)) /\
+   le (55947.6909999999988940544426441192626953125:t) x5 /\
+   le (1145.0:t) x8 /\ le x9 (60.0:t) ->
+   le
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x5 (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x6 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x7 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x8 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x9 (600.0:t)) (1200.0:t)))
+   [0]
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [0] /\
+   le
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [0]
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x5 (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x6 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x7 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x8 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x9 (600.0:t)) (1200.0:t)))
+   [0]
+  nn_onnx,
+  (Interpreter_types.Model
+     (Interpreter_types.ONNX, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename =
+                                "./../examples/acasxu/nets/onnx/ACASXU_1_1.onnx";
+                                nn_format = <nier> }))
+  vector,
+  5
+  [DEBUG]{NN-Gen-Term} new goal:le y5 y6 /\ le y6 y5
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  0.0 <= x0
+  x0 <= 60760.0
+  -3.141592653589793115997963468544185161590576171875 <= x1
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  -3.141592653589793115997963468544185161590576171875 <= x2
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  100.0 <= x3
+  x3 <= 1200.0
+  0.0 <= x4
+  x4 <= 1200.0
+  55947.6909999999988940544426441192626953125 <= x0
+  1145.0 <= x3
+  x4 <= 60.0
+  55947.6909999999988940544426441192626953125 <= x5
+  1145.0 <= x8
+  x9 <= 60.0
+  y0 <= y1 and y1 <= y0
+  
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'run3'vc':
+  forall x:t, x1:t, x2:t, x3:t, x4:t, eps:t.
+   ((le (0.0:t) x /\ le x (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x1 /\
+     le x1 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x2 /\
+     le x2 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) x3 /\ le x3 (1200.0:t)) /\ le (0.0:t) x4 /\ le x4 (1200.0:t)) /\
+   (le (55947.6909999999988940544426441192626953125:t) x /\
+    le (1145.0:t) x3 /\ le x4 (60.0:t)) /\
+   le ((- 1.0):t) eps /\ le eps (1.0:t) ->
+   le
+   (nn_onnx
+    @@ vector (div RNE (sub RNE (add RNE x eps) (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [0]
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [0] /\
+   le
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [0]
+   (nn_onnx
+    @@ vector (div RNE (sub RNE (add RNE x eps) (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [0]
+  nn_onnx,
+  (Interpreter_types.Model
+     (Interpreter_types.ONNX, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename =
+                                "./../examples/acasxu/nets/onnx/ACASXU_1_1.onnx";
+                                nn_format = <nier> }))
+  vector,
+  5
+  [DEBUG]{NN-Gen-Term} new goal:le y7 y8 /\ le y8 y7
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  0.0 <= x0
+  x0 <= 60760.0
+  -3.141592653589793115997963468544185161590576171875 <= x1
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  -3.141592653589793115997963468544185161590576171875 <= x2
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  100.0 <= x3
+  x3 <= 1200.0
+  0.0 <= x4
+  x4 <= 1200.0
+  55947.6909999999988940544426441192626953125 <= x0
+  1145.0 <= x3
+  x4 <= 60.0
+  -1.0 <= x5
+  x5 <= 1.0
+  y0 <= y1 and y1 <= y0
+  
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'robust_output'vc':
+  forall x:t, x1:t, x2:t, x3:t, x4:t, eps:t.
+   ((le (0.0:t) x /\ le x (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x1 /\
+     le x1 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x2 /\
+     le x2 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) x3 /\ le x3 (1200.0:t)) /\ le (0.0:t) x4 /\ le x4 (1200.0:t)) /\
+   le (0.0:t) eps /\ le eps (0.375:t) ->
+   le (0.0:t)
+   (sub RNE
+    (sub RNE
+     (nn_onnx
+      @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+         (div RNE (sub RNE x1 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x2 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+         (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+     [0]
+     (nn_onnx
+      @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+         (div RNE (sub RNE x1 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x2 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+         (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+     [0])
+    eps) /\
+   le
+   (sub RNE
+    (sub RNE
+     (nn_onnx
+      @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+         (div RNE (sub RNE x1 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x2 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+         (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+     [0]
+     (nn_onnx
+      @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+         (div RNE (sub RNE x1 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x2 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+         (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+     [0])
+    eps)
+   (0.0:t)
+  nn_onnx,
+  (Interpreter_types.Model
+     (Interpreter_types.ONNX, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename =
+                                "./../examples/acasxu/nets/onnx/ACASXU_1_1.onnx";
+                                nn_format = <nier> }))
+  vector,
+  5
+  [DEBUG]{NN-Gen-Term} new goal:le (0.0:t) y9 /\ le y9 (0.0:t)
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  0.0 <= x0
+  x0 <= 60760.0
+  -3.141592653589793115997963468544185161590576171875 <= x1
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  -3.141592653589793115997963468544185161590576171875 <= x2
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  100.0 <= x3
+  x3 <= 1200.0
+  0.0 <= x4
+  x4 <= 1200.0
+  0.0 <= x5
+  x5 <= 0.375
+  0.0 <= y0 and y0 <= 0.0
+  
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'P1':
+  forall x:t, x1:t, x2:t, x3:t, x4:t.
+   ((le (0.0:t) (add RNE (mul RNE x (60261.0:t)) (19791.0:t)) /\
+     le (add RNE (mul RNE x (60261.0:t)) (19791.0:t)) (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t))
+     (add RNE
+      (mul RNE x1 (6.2831853071800001231395071954466402530670166015625:t))
+      (0.0:t)) /\
+     le
+     (add RNE
+      (mul RNE x1 (6.2831853071800001231395071954466402530670166015625:t))
+      (0.0:t))
+     (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t))
+     (add RNE
+      (mul RNE x2 (6.2831853071800001231395071954466402530670166015625:t))
+      (0.0:t)) /\
+     le
+     (add RNE
+      (mul RNE x2 (6.2831853071800001231395071954466402530670166015625:t))
+      (0.0:t))
+     (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) (add RNE (mul RNE x3 (1100.0:t)) (650.0:t)) /\
+     le (add RNE (mul RNE x3 (1100.0:t)) (650.0:t)) (1200.0:t)) /\
+    le (0.0:t) (add RNE (mul RNE x4 (1200.0:t)) (600.0:t)) /\
+    le (add RNE (mul RNE x4 (1200.0:t)) (600.0:t)) (1200.0:t)) /\
+   le (55947.6909999999988940544426441192626953125:t)
+   (add RNE (mul RNE x (60261.0:t)) (19791.0:t)) /\
+   le (1145.0:t) (add RNE (mul RNE x3 (1100.0:t)) (650.0:t)) /\
+   le (add RNE (mul RNE x4 (1200.0:t)) (600.0:t)) (60.0:t) ->
+   le
+   (add RNE
+    (mul RNE (nn_nnet @@ vector x x1 x2 x3 x4)[0]
+     (373.9499200000000200816430151462554931640625:t))
+    (7.51888402010059753166615337249822914600372314453125:t))
+   (1500.0:t)
+  vector, 5
+  nn_nnet,
+  (Interpreter_types.Model
+     (Interpreter_types.NNet, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename = "./TestNetwork.nnet";
+                                nn_format = Language.NNet }))
   [DEBUG]{ProverSpec} Prover-tailored specification:
   -0.328421367053318091766556108268559910356998443603515625 <= x0
   x0 <= 0.67985927880386987087746319957659579813480377197265625
@@ -110,6 +664,64 @@ Test verify on acasxu
   x4 <= -0.450000000000000011102230246251565404236316680908203125
   y0 <= 3.991125645861615112153231166303157806396484375
   
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'P3':
+  forall x:t, x1:t, x2:t, x3:t, x4:t.
+   ((le (0.0:t) (add RNE (mul RNE x (60261.0:t)) (19791.0:t)) /\
+     le (add RNE (mul RNE x (60261.0:t)) (19791.0:t)) (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t))
+     (add RNE
+      (mul RNE x1 (6.2831853071800001231395071954466402530670166015625:t))
+      (0.0:t)) /\
+     le
+     (add RNE
+      (mul RNE x1 (6.2831853071800001231395071954466402530670166015625:t))
+      (0.0:t))
+     (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t))
+     (add RNE
+      (mul RNE x2 (6.2831853071800001231395071954466402530670166015625:t))
+      (0.0:t)) /\
+     le
+     (add RNE
+      (mul RNE x2 (6.2831853071800001231395071954466402530670166015625:t))
+      (0.0:t))
+     (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) (add RNE (mul RNE x3 (1100.0:t)) (650.0:t)) /\
+     le (add RNE (mul RNE x3 (1100.0:t)) (650.0:t)) (1200.0:t)) /\
+    le (0.0:t) (add RNE (mul RNE x4 (1200.0:t)) (600.0:t)) /\
+    le (add RNE (mul RNE x4 (1200.0:t)) (600.0:t)) (1200.0:t)) /\
+   ((le (1500.0:t) (add RNE (mul RNE x (60261.0:t)) (19791.0:t)) /\
+     le (add RNE (mul RNE x (60261.0:t)) (19791.0:t)) (1800.0:t)) /\
+    le (neg (0.059999999999999997779553950749686919152736663818359375:t))
+    (add RNE
+     (mul RNE x1 (6.2831853071800001231395071954466402530670166015625:t))
+     (0.0:t)) /\
+    le
+    (add RNE
+     (mul RNE x1 (6.2831853071800001231395071954466402530670166015625:t))
+     (0.0:t))
+    (0.059999999999999997779553950749686919152736663818359375:t)) /\
+   le (3.100000000000000088817841970012523233890533447265625:t)
+   (add RNE
+    (mul RNE x2 (6.2831853071800001231395071954466402530670166015625:t))
+    (0.0:t)) /\
+   le (980.0:t) (add RNE (mul RNE x3 (1100.0:t)) (650.0:t)) /\
+   le (960.0:t) (add RNE (mul RNE x4 (1200.0:t)) (600.0:t)) ->
+   not (((lt (nn_nnet @@ vector x x1 x2 x3 x4)[0]
+          (nn_nnet @@ vector x x1 x2 x3 x4)[1] /\
+          lt (nn_nnet @@ vector x x1 x2 x3 x4)[0]
+          (nn_nnet @@ vector x x1 x2 x3 x4)[2]) /\
+         lt (nn_nnet @@ vector x x1 x2 x3 x4)[0]
+         (nn_nnet @@ vector x x1 x2 x3 x4)[3]) /\
+        lt (nn_nnet @@ vector x x1 x2 x3 x4)[0]
+        (nn_nnet @@ vector x x1 x2 x3 x4)[4])
+  vector, 5
+  nn_nnet,
+  (Interpreter_types.Model
+     (Interpreter_types.NNet, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename = "./TestNetwork.nnet";
+                                nn_format = Language.NNet }))
   [DEBUG]{ProverSpec} Prover-tailored specification:
   -0.328421367053318091766556108268559910356998443603515625 <= x0
   x0 <= 0.67985927880386987087746319957659579813480377197265625
@@ -130,6 +742,12 @@ Test verify on acasxu
   0.299999999999999988897769753748434595763683319091796875 <= x4
   (((y0 >= y1 or y0 >= y2) or y0 >= y3) or y0 >= y4)
   
+  Goal runP1'vc: Unknown ()
+  Goal runP1v2'vc: Unknown ()
+  Goal run2'vc: Unknown ()
+  Goal run2v2'vc: Unknown ()
+  Goal run3'vc: Unknown ()
+  Goal robust_output'vc: Unknown ()
   Goal P1: Unknown ()
   Goal P3: Unknown ()
 
@@ -152,6 +770,307 @@ Test verify on acasxu
   ;; X_4
   (declare-const X_4 Real)
   
+  ;; Requires
+  (assert (>= X_0 0.0))
+  (assert (<= X_0 60760.0))
+  (assert (>= X_1 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_1 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_2 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_2 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_3 100.0))
+  (assert (<= X_3 1200.0))
+  (assert (>= X_4 0.0))
+  (assert (<= X_4 1200.0))
+  
+  ;; Requires
+  (assert (>= X_0 55947.6909999999988940544426441192626953125))
+  (assert (>= X_3 1145.0))
+  (assert (<= X_4 60.0))
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Goal runP1'vc
+  (assert (>= Y_0 1500.0))
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; Requires
+  (assert (>= X_0 0.0))
+  (assert (<= X_0 60760.0))
+  (assert (>= X_1 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_1 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_2 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_2 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_3 100.0))
+  (assert (<= X_3 1200.0))
+  (assert (>= X_4 0.0))
+  (assert (<= X_4 1200.0))
+  
+  ;; Requires
+  (assert (>= X_0 55947.6909999999988940544426441192626953125))
+  (assert (>= X_3 1145.0))
+  (assert (<= X_4 60.0))
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Y_1
+  (declare-const Y_1 Real)
+  
+  ;; Goal runP1v2'vc
+  (assert (or (and (>= Y_0 1500.0))
+              (and (>= Y_1 1500.0))
+              ))
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; Requires
+  (assert (>= X_0 0.0))
+  (assert (<= X_0 60760.0))
+  (assert (>= X_1 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_1 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_2 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_2 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_3 100.0))
+  (assert (<= X_3 1200.0))
+  (assert (>= X_4 0.0))
+  (assert (<= X_4 1200.0))
+  
+  ;; Requires
+  (assert (>= X_0 55947.6909999999988940544426441192626953125))
+  (assert (>= X_3 1145.0))
+  (assert (<= X_4 60.0))
+  
+  ;; Y_1
+  (declare-const Y_1 Real)
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Goal run2'vc
+  (assert (or (and (>= Y_0 Y_1))
+              (and (>= Y_1 Y_0))
+              ))
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; X_5
+  (declare-const X_5 Real)
+  
+  ;; X_6
+  (declare-const X_6 Real)
+  
+  ;; X_7
+  (declare-const X_7 Real)
+  
+  ;; X_8
+  (declare-const X_8 Real)
+  
+  ;; X_9
+  (declare-const X_9 Real)
+  
+  ;; Requires
+  (assert (>= X_0 0.0))
+  (assert (<= X_0 60760.0))
+  (assert (>= X_1 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_1 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_2 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_2 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_3 100.0))
+  (assert (<= X_3 1200.0))
+  (assert (>= X_4 0.0))
+  (assert (<= X_4 1200.0))
+  
+  ;; Requires
+  (assert (>= X_0 55947.6909999999988940544426441192626953125))
+  (assert (>= X_3 1145.0))
+  (assert (<= X_4 60.0))
+  
+  ;; Requires
+  (assert (>= X_5 55947.6909999999988940544426441192626953125))
+  (assert (>= X_8 1145.0))
+  (assert (<= X_9 60.0))
+  
+  ;; Y_1
+  (declare-const Y_1 Real)
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Goal run2v2'vc
+  (assert (or (and (>= Y_0 Y_1))
+              (and (>= Y_1 Y_0))
+              ))
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; X_5
+  (declare-const X_5 Real)
+  
+  ;; Requires
+  (assert (>= X_0 0.0))
+  (assert (<= X_0 60760.0))
+  (assert (>= X_1 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_1 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_2 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_2 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_3 100.0))
+  (assert (<= X_3 1200.0))
+  (assert (>= X_4 0.0))
+  (assert (<= X_4 1200.0))
+  
+  ;; Requires
+  (assert (>= X_0 55947.6909999999988940544426441192626953125))
+  (assert (>= X_3 1145.0))
+  (assert (<= X_4 60.0))
+  
+  ;; H
+  (assert (>= X_5 -1.0))
+  
+  ;; H
+  (assert (<= X_5 1.0))
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Y_1
+  (declare-const Y_1 Real)
+  
+  ;; Goal run3'vc
+  (assert (or (and (>= Y_0 Y_1))
+              (and (>= Y_1 Y_0))
+              ))
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; X_5
+  (declare-const X_5 Real)
+  
+  ;; Requires
+  (assert (>= X_0 0.0))
+  (assert (<= X_0 60760.0))
+  (assert (>= X_1 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_1 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_2 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_2 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_3 100.0))
+  (assert (<= X_3 1200.0))
+  (assert (>= X_4 0.0))
+  (assert (<= X_4 1200.0))
+  
+  ;; H
+  (assert (>= X_5 0.0))
+  
+  ;; H
+  (assert (<= X_5 0.375))
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Goal robust_output'vc
+  (assert (or (and (<= Y_0 0.0))
+              (and (>= Y_0 0.0))
+              ))
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
   ;; H
   (assert (>= X_0 -0.328421367053318091766556108268559910356998443603515625))
   
@@ -299,10 +1218,200 @@ Test verify on acasxu
   (assert (<= Y_0 Y_3))
   (assert (<= Y_0 Y_4))
   
+  Goal runP1'vc: Unknown ()
+  Goal runP1v2'vc: Unknown ()
+  Goal run2'vc: Unknown ()
+  Goal run2v2'vc: Unknown ()
+  Goal run3'vc: Unknown ()
+  Goal robust_output'vc: Unknown ()
   Goal P1: Unknown ()
   Goal P3: Unknown ()
 
   $ caisar verify --prover Marabou --ltag=ProverSpec file.mlw
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  y0 >= 1500.0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  y0 >= 1500.0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  y1 >= 1500.0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  +y0 -y1 >= 0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  +y1 -y0 >= 0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  x5 >= 55947.6909999999988940544426441192626953125
+  x8 >= 1145.0
+  x9 <= 60.0
+  +y0 -y1 >= 0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  x5 >= 55947.6909999999988940544426441192626953125
+  x8 >= 1145.0
+  x9 <= 60.0
+  +y1 -y0 >= 0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  x5 >= -1.0
+  x5 <= 1.0
+  +y0 -y1 >= 0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  x5 >= -1.0
+  x5 <= 1.0
+  +y1 -y0 >= 0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x5 >= 0.0
+  x5 <= 0.375
+  y0 <= 0.0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x5 >= 0.0
+  x5 <= 0.375
+  y0 >= 0.0
+  
   [DEBUG]{ProverSpec} Prover-tailored specification:
   x0 >= -0.328421367053318091766556108268559910356998443603515625
   x0 <= 0.67985927880386987087746319957659579813480377197265625
@@ -342,5 +1451,11 @@ Test verify on acasxu
   +y0 -y3 <= 0
   +y0 -y4 <= 0
   
+  Goal runP1'vc: Unknown ()
+  Goal runP1v2'vc: Unknown ()
+  Goal run2'vc: Unknown ()
+  Goal run2v2'vc: Unknown ()
+  Goal run3'vc: Unknown ()
+  Goal robust_output'vc: Unknown ()
   Goal P1: Unknown ()
   Goal P3: Unknown ()
diff --git a/tests/acasxu_ci.t b/tests/acasxu_ci.t
new file mode 100644
index 0000000000000000000000000000000000000000..c55a2f7781913e9a97a0e8cc1562040333181606
--- /dev/null
+++ b/tests/acasxu_ci.t
@@ -0,0 +1,944 @@
+Test verify on acasxu
+  $ . ./setup_env.sh
+
+  $ bin/pyrat.py --version
+  PyRAT 1.1
+
+  $ mkdir out
+  $ export CAISAR_ONNX_OUTPUT_DIR=out
+
+  $ cat > file.mlw <<EOF
+  > theory T
+  >   use ieee_float.Float64
+  >   use int.Int
+  >   use caisar.types.Vector
+  >   use caisar.model.Model
+  >   use caisar.model.NN
+  > 
+  >   let constant nn: model nn = read_model "../examples/acasxu/nets/onnx/ACASXU_1_1.onnx"
+  >   let constant nn_: model nn = read_model "../examples/acasxu/nets/onnx/ACASXU_1_9.onnx"
+  > 
+  >   type input = vector t
+  > 
+  >   let constant distance_to_intruder: int = 0
+  >   let constant angle_to_intruder: int = 1
+  >   let constant intruder_heading: int = 2
+  >   let constant speed: int = 3
+  >   let constant intruder_speed: int = 4
+  > 
+  >   type action = int
+  > 
+  >   let constant clear_of_conflict: action = 0
+  >   let constant weak_left: action = 1
+  >   let constant weak_right: action = 2
+  >   let constant strong_left: action = 3
+  >   let constant strong_right: action = 4
+  > 
+  >   let constant pi: t = 3.141592653589793115997963468544185161590576171875000
+  > 
+  >   predicate valid_input (i: input) =
+  >     (0.0:t) .<= i[distance_to_intruder] .<= (60760.0:t)
+  >     /\ .- pi .<= i[angle_to_intruder] .<= pi
+  >     /\ .- pi .<= i[intruder_heading] .<= pi
+  >     /\ (100.0:t) .<= i[speed] .<= (1200.0:t)
+  >     /\ (0.0:t) .<= i[intruder_speed] .<= (1200.0:t)
+  > 
+  >   predicate valid_action (a: action) = clear_of_conflict <= a <= strong_right
+  > 
+  >   predicate advises (o: vector t) (a: action) =
+  >     valid_action a ->
+  >     forall b: action. valid_action b ->  a <> b -> o[a] .< o[b]
+  > 
+  >   predicate intruder_distant_and_slow (i: input) =
+  >     i[distance_to_intruder] .>= (55947.6909999999988940544426441192626953125:t)
+  >     /\ i[speed] .>= (1145.0:t)
+  >     /\ i[intruder_speed] .<= (60.0:t)
+  > 
+  >   let function normalize_t (i: t) (mean: t) (range: t) : t = ((i .- mean) ./ range)
+  > 
+  >   let function normalize_by_index (idx: int) (t: t) : t =
+  >     if idx = distance_to_intruder then normalize_t t (19791.0:t) (60261.0:t)
+  >     else if idx = angle_to_intruder then normalize_t t (0.0:t) (6.2831853071800001231395071954466402530670166015625:t)
+  >     else if idx = intruder_heading then normalize_t t (0.0:t) (6.2831853071800001231395071954466402530670166015625:t)
+  >     else if idx = speed then normalize_t t (650.0:t) (1100.0:t)
+  >     else if idx = intruder_speed then normalize_t t (600.0:t) (1200.0:t)
+  >     else t
+  > 
+  >   let function normalize_input (i:input) : input =
+  >     Vector.mapi i normalize_by_index
+  > 
+  >   let function denormalize_t (i: t) (mean: t) (range: t) : t = (i .* range) .+ mean
+  > 
+  >   let function denormalize_output (o: t) : t =
+  >     denormalize_t o (7.51888402010059753166615337249822914600372314453125:t) (373.9499200000000200816430151462554931640625:t)
+  > 
+  >   let run (j:input) : t
+  >      requires { has_length j 5 }
+  >      requires { valid_input j }
+  >      requires { intruder_distant_and_slow j }
+  >      ensures { result .<= (1500.0:t) }  =
+  >       let i = normalize_input j in
+  >       let o = (nn@@i)[clear_of_conflict] in
+  >       (denormalize_output o)
+  > 
+  >   let run1 (j: input)
+  >     requires { has_length j 5 }
+  >     requires { valid_input j }
+  >     requires { intruder_distant_and_slow j }
+  >     ensures { let o1, o2 = result in o1 .<= (1500.0:t) /\ o2 .<= (1500.0:t) }  =
+  >       let i = normalize_input j in
+  >       let o1 = (nn @@ i)[clear_of_conflict] in
+  >       let o2 = (nn_ @@ i)[clear_of_conflict] in
+  >       (denormalize_output o1), (denormalize_output o2)
+  > 
+  >   let run2 (j:input)
+  >      requires { has_length j 5 }
+  >      requires { valid_input j }
+  >      requires { intruder_distant_and_slow j }
+  >      ensures { let o1, o2 = result in o1 .>= o2 /\ o2 .>= o1 }  =
+  >       let i = normalize_input j in
+  >       let o1 = (nn@@i)[clear_of_conflict] in
+  >       let o2 = (nn@@i)[weak_left] in
+  >       o1, o2
+  > 
+  >     let run3 (j:input) (eps:t)
+  >      requires { has_length j 5 }
+  >      requires { valid_input j }
+  >      requires { intruder_distant_and_slow j }
+  >      requires { (-1.:t) .<= eps .<= (1.:t) }
+  >      ensures { let o1, o2 = result in o1 .>= o2 /\ o2 .>= o1 }  =
+  >       let i = normalize_input j in
+  >       let j2 = Vector.mapi j (fun i x -> if i = distance_to_intruder then x .+ eps else x) in
+  >       let i2 = normalize_input j2 in
+  >       let o1 = (nn@@i)[clear_of_conflict] in
+  >       let o2 = (nn@@i2)[clear_of_conflict] in
+  >       o1, o2
+  > 
+  >   predicate directly_ahead (i: input) =
+  >     (1500.0:t) .<= i[distance_to_intruder] .<= (1800.0:t)
+  >     /\ .- (0.059999999999999997779553950749686919152736663818359375:t) .<= i[angle_to_intruder] .<= (0.059999999999999997779553950749686919152736663818359375:t)
+  > 
+  >   predicate moving_towards (i: input) =
+  >     i[intruder_heading] .>= (3.100000000000000088817841970012523233890533447265625:t)
+  >     /\ i[speed] .>= (980.0:t)
+  >     /\ i[intruder_speed] .>= (960.0:t)
+  > 
+  > 
+  >   let runP3 (j: input) : vector t
+  >      requires { has_length j 5 }
+  >      requires { valid_input j }
+  >      requires { directly_ahead j /\ moving_towards j }
+  >      ensures { not (advises result clear_of_conflict) }  =
+  >       let i = normalize_input j in
+  >       nn @@ i
+  > 
+  > end
+  > EOF
+
+  $ caisar verify --prover PyRAT --ltag=ProverSpec --ltag=InterpretGoal file.mlw
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'run'vc':
+  forall x:t, x1:t, x2:t, x3:t, x4:t.
+   ((le (0.0:t) x /\ le x (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x1 /\
+     le x1 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x2 /\
+     le x2 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) x3 /\ le x3 (1200.0:t)) /\ le (0.0:t) x4 /\ le x4 (1200.0:t)) /\
+   le (55947.6909999999988940544426441192626953125:t) x /\
+   le (1145.0:t) x3 /\ le x4 (60.0:t) ->
+   le
+   (add RNE
+    (mul RNE
+     (nn_onnx
+      @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+         (div RNE (sub RNE x1 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x2 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+         (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+     [0] (373.9499200000000200816430151462554931640625:t))
+    (7.51888402010059753166615337249822914600372314453125:t))
+   (1500.0:t)
+  nn_onnx,
+  (Interpreter_types.Model
+     (Interpreter_types.ONNX, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename =
+                                "./../examples/acasxu/nets/onnx/ACASXU_1_1.onnx";
+                                nn_format = <nier> }))
+  vector,
+  5
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  0.0 <= x0
+  x0 <= 60760.0
+  -3.141592653589793115997963468544185161590576171875 <= x1
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  -3.141592653589793115997963468544185161590576171875 <= x2
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  100.0 <= x3
+  x3 <= 1200.0
+  0.0 <= x4
+  x4 <= 1200.0
+  55947.6909999999988940544426441192626953125 <= x0
+  1145.0 <= x3
+  x4 <= 60.0
+  y0 <= 1500.0
+  
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'run1'vc':
+  forall x:t, x1:t, x2:t, x3:t, x4:t.
+   ((le (0.0:t) x /\ le x (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x1 /\
+     le x1 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x2 /\
+     le x2 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) x3 /\ le x3 (1200.0:t)) /\ le (0.0:t) x4 /\ le x4 (1200.0:t)) /\
+   le (55947.6909999999988940544426441192626953125:t) x /\
+   le (1145.0:t) x3 /\ le x4 (60.0:t) ->
+   le
+   (add RNE
+    (mul RNE
+     (nn_onnx
+      @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+         (div RNE (sub RNE x1 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x2 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+         (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+     [0] (373.9499200000000200816430151462554931640625:t))
+    (7.51888402010059753166615337249822914600372314453125:t))
+   (1500.0:t) /\
+   le
+   (add RNE
+    (mul RNE
+     (nn_onnx1
+      @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+         (div RNE (sub RNE x1 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x2 (0.0:t))
+          (6.2831853071800001231395071954466402530670166015625:t))
+         (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+         (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+     [0] (373.9499200000000200816430151462554931640625:t))
+    (7.51888402010059753166615337249822914600372314453125:t))
+   (1500.0:t)
+  nn_onnx,
+  (Interpreter_types.Model
+     (Interpreter_types.ONNX, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename =
+                                "./../examples/acasxu/nets/onnx/ACASXU_1_1.onnx";
+                                nn_format = <nier> }))
+  vector, 5
+  nn_onnx1,
+  (Interpreter_types.Model
+     (Interpreter_types.ONNX, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename =
+                                "./../examples/acasxu/nets/onnx/ACASXU_1_9.onnx";
+                                nn_format = <nier> }))
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  0.0 <= x0
+  x0 <= 60760.0
+  -3.141592653589793115997963468544185161590576171875 <= x1
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  -3.141592653589793115997963468544185161590576171875 <= x2
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  100.0 <= x3
+  x3 <= 1200.0
+  0.0 <= x4
+  x4 <= 1200.0
+  55947.6909999999988940544426441192626953125 <= x0
+  1145.0 <= x3
+  x4 <= 60.0
+  y0 <= 1500.0 and y1 <= 1500.0
+  
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'run2'vc':
+  forall x:t, x1:t, x2:t, x3:t, x4:t.
+   ((le (0.0:t) x /\ le x (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x1 /\
+     le x1 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x2 /\
+     le x2 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) x3 /\ le x3 (1200.0:t)) /\ le (0.0:t) x4 /\ le x4 (1200.0:t)) /\
+   le (55947.6909999999988940544426441192626953125:t) x /\
+   le (1145.0:t) x3 /\ le x4 (60.0:t) ->
+   le
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [1]
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [0] /\
+   le
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [0]
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [1]
+  nn_onnx,
+  (Interpreter_types.Model
+     (Interpreter_types.ONNX, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename =
+                                "./../examples/acasxu/nets/onnx/ACASXU_1_1.onnx";
+                                nn_format = <nier> }))
+  vector,
+  5
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  0.0 <= x0
+  x0 <= 60760.0
+  -3.141592653589793115997963468544185161590576171875 <= x1
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  -3.141592653589793115997963468544185161590576171875 <= x2
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  100.0 <= x3
+  x3 <= 1200.0
+  0.0 <= x4
+  x4 <= 1200.0
+  55947.6909999999988940544426441192626953125 <= x0
+  1145.0 <= x3
+  x4 <= 60.0
+  y0 <= y1 and y1 <= y0
+  
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'run3'vc':
+  forall x:t, x1:t, x2:t, x3:t, x4:t, eps:t.
+   ((le (0.0:t) x /\ le x (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x1 /\
+     le x1 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x2 /\
+     le x2 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) x3 /\ le x3 (1200.0:t)) /\ le (0.0:t) x4 /\ le x4 (1200.0:t)) /\
+   (le (55947.6909999999988940544426441192626953125:t) x /\
+    le (1145.0:t) x3 /\ le x4 (60.0:t)) /\
+   le ((- 1.0):t) eps /\ le eps (1.0:t) ->
+   le
+   (nn_onnx
+    @@ vector (div RNE (sub RNE (add RNE x eps) (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [0]
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [0] /\
+   le
+   (nn_onnx
+    @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [0]
+   (nn_onnx
+    @@ vector (div RNE (sub RNE (add RNE x eps) (19791.0:t)) (60261.0:t))
+       (div RNE (sub RNE x1 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x2 (0.0:t))
+        (6.2831853071800001231395071954466402530670166015625:t))
+       (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+       (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+   [0]
+  nn_onnx,
+  (Interpreter_types.Model
+     (Interpreter_types.ONNX, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename =
+                                "./../examples/acasxu/nets/onnx/ACASXU_1_1.onnx";
+                                nn_format = <nier> }))
+  vector,
+  5
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  0.0 <= x0
+  x0 <= 60760.0
+  -3.141592653589793115997963468544185161590576171875 <= x1
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  -3.141592653589793115997963468544185161590576171875 <= x2
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  100.0 <= x3
+  x3 <= 1200.0
+  0.0 <= x4
+  x4 <= 1200.0
+  55947.6909999999988940544426441192626953125 <= x0
+  1145.0 <= x3
+  x4 <= 60.0
+  -1.0 <= x5
+  x5 <= 1.0
+  y0 <= y1 and y1 <= y0
+  
+  [DEBUG]{InterpretGoal} Interpreted formula for goal 'runP3'vc':
+  forall x:t, x1:t, x2:t, x3:t, x4:t.
+   ((le (0.0:t) x /\ le x (60760.0:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x1 /\
+     le x1 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (neg (3.141592653589793115997963468544185161590576171875:t)) x2 /\
+     le x2 (3.141592653589793115997963468544185161590576171875:t)) /\
+    (le (100.0:t) x3 /\ le x3 (1200.0:t)) /\ le (0.0:t) x4 /\ le x4 (1200.0:t)) /\
+   ((le (1500.0:t) x /\ le x (1800.0:t)) /\
+    le (neg (0.059999999999999997779553950749686919152736663818359375:t)) x1 /\
+    le x1 (0.059999999999999997779553950749686919152736663818359375:t)) /\
+   le (3.100000000000000088817841970012523233890533447265625:t) x2 /\
+   le (980.0:t) x3 /\ le (960.0:t) x4 ->
+   not (((lt
+          (nn_onnx
+           @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+              (div RNE (sub RNE x1 (0.0:t))
+               (6.2831853071800001231395071954466402530670166015625:t))
+              (div RNE (sub RNE x2 (0.0:t))
+               (6.2831853071800001231395071954466402530670166015625:t))
+              (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+              (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+          [0]
+          (nn_onnx
+           @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+              (div RNE (sub RNE x1 (0.0:t))
+               (6.2831853071800001231395071954466402530670166015625:t))
+              (div RNE (sub RNE x2 (0.0:t))
+               (6.2831853071800001231395071954466402530670166015625:t))
+              (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+              (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+          [1] /\
+          lt
+          (nn_onnx
+           @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+              (div RNE (sub RNE x1 (0.0:t))
+               (6.2831853071800001231395071954466402530670166015625:t))
+              (div RNE (sub RNE x2 (0.0:t))
+               (6.2831853071800001231395071954466402530670166015625:t))
+              (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+              (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+          [0]
+          (nn_onnx
+           @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+              (div RNE (sub RNE x1 (0.0:t))
+               (6.2831853071800001231395071954466402530670166015625:t))
+              (div RNE (sub RNE x2 (0.0:t))
+               (6.2831853071800001231395071954466402530670166015625:t))
+              (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+              (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+          [2]) /\
+         lt
+         (nn_onnx
+          @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+             (div RNE (sub RNE x1 (0.0:t))
+              (6.2831853071800001231395071954466402530670166015625:t))
+             (div RNE (sub RNE x2 (0.0:t))
+              (6.2831853071800001231395071954466402530670166015625:t))
+             (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+             (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+         [0]
+         (nn_onnx
+          @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+             (div RNE (sub RNE x1 (0.0:t))
+              (6.2831853071800001231395071954466402530670166015625:t))
+             (div RNE (sub RNE x2 (0.0:t))
+              (6.2831853071800001231395071954466402530670166015625:t))
+             (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+             (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+         [3]) /\
+        lt
+        (nn_onnx
+         @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+            (div RNE (sub RNE x1 (0.0:t))
+             (6.2831853071800001231395071954466402530670166015625:t))
+            (div RNE (sub RNE x2 (0.0:t))
+             (6.2831853071800001231395071954466402530670166015625:t))
+            (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+            (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+        [0]
+        (nn_onnx
+         @@ vector (div RNE (sub RNE x (19791.0:t)) (60261.0:t))
+            (div RNE (sub RNE x1 (0.0:t))
+             (6.2831853071800001231395071954466402530670166015625:t))
+            (div RNE (sub RNE x2 (0.0:t))
+             (6.2831853071800001231395071954466402530670166015625:t))
+            (div RNE (sub RNE x3 (650.0:t)) (1100.0:t))
+            (div RNE (sub RNE x4 (600.0:t)) (1200.0:t)))
+        [4])
+  nn_onnx,
+  (Interpreter_types.Model
+     (Interpreter_types.ONNX, { Language.nn_nb_inputs = 5; nn_nb_outputs = 5;
+                                nn_ty_elt = t;
+                                nn_filename =
+                                "./../examples/acasxu/nets/onnx/ACASXU_1_1.onnx";
+                                nn_format = <nier> }))
+  vector,
+  5
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  0.0 <= x0
+  x0 <= 60760.0
+  -3.141592653589793115997963468544185161590576171875 <= x1
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  -3.141592653589793115997963468544185161590576171875 <= x2
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  100.0 <= x3
+  x3 <= 1200.0
+  0.0 <= x4
+  x4 <= 1200.0
+  1500.0 <= x0
+  x0 <= 1800.0
+  -0.059999999999999997779553950749686919152736663818359375 <= x1
+  x1 <= 0.059999999999999997779553950749686919152736663818359375
+  3.100000000000000088817841970012523233890533447265625 <= x2
+  980.0 <= x3
+  960.0 <= x4
+  (((y0 >= y1 or y0 >= y2) or y0 >= y3) or y0 >= y4)
+  
+  Goal run'vc: Unknown ()
+  Goal run1'vc: Unknown ()
+  Goal run2'vc: Unknown ()
+  Goal run3'vc: Unknown ()
+  Goal runP3'vc: Unknown ()
+
+  $ caisar verify --prover PyRAT --prover-altern VNNLIB --ltag=ProverSpec file.mlw
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; Requires
+  (assert (>= X_0 0.0))
+  (assert (<= X_0 60760.0))
+  (assert (>= X_1 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_1 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_2 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_2 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_3 100.0))
+  (assert (<= X_3 1200.0))
+  (assert (>= X_4 0.0))
+  (assert (<= X_4 1200.0))
+  
+  ;; Requires
+  (assert (>= X_0 55947.6909999999988940544426441192626953125))
+  (assert (>= X_3 1145.0))
+  (assert (<= X_4 60.0))
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Goal run'vc
+  (assert (>= Y_0 1500.0))
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; Requires
+  (assert (>= X_0 0.0))
+  (assert (<= X_0 60760.0))
+  (assert (>= X_1 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_1 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_2 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_2 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_3 100.0))
+  (assert (<= X_3 1200.0))
+  (assert (>= X_4 0.0))
+  (assert (<= X_4 1200.0))
+  
+  ;; Requires
+  (assert (>= X_0 55947.6909999999988940544426441192626953125))
+  (assert (>= X_3 1145.0))
+  (assert (<= X_4 60.0))
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Y_1
+  (declare-const Y_1 Real)
+  
+  ;; Goal run1'vc
+  (assert (or (and (>= Y_0 1500.0))
+              (and (>= Y_1 1500.0))
+              ))
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; Requires
+  (assert (>= X_0 0.0))
+  (assert (<= X_0 60760.0))
+  (assert (>= X_1 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_1 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_2 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_2 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_3 100.0))
+  (assert (<= X_3 1200.0))
+  (assert (>= X_4 0.0))
+  (assert (<= X_4 1200.0))
+  
+  ;; Requires
+  (assert (>= X_0 55947.6909999999988940544426441192626953125))
+  (assert (>= X_3 1145.0))
+  (assert (<= X_4 60.0))
+  
+  ;; Y_1
+  (declare-const Y_1 Real)
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Goal run2'vc
+  (assert (or (and (>= Y_0 Y_1))
+              (and (>= Y_1 Y_0))
+              ))
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; X_5
+  (declare-const X_5 Real)
+  
+  ;; Requires
+  (assert (>= X_0 0.0))
+  (assert (<= X_0 60760.0))
+  (assert (>= X_1 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_1 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_2 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_2 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_3 100.0))
+  (assert (<= X_3 1200.0))
+  (assert (>= X_4 0.0))
+  (assert (<= X_4 1200.0))
+  
+  ;; Requires
+  (assert (>= X_0 55947.6909999999988940544426441192626953125))
+  (assert (>= X_3 1145.0))
+  (assert (<= X_4 60.0))
+  
+  ;; H
+  (assert (>= X_5 -1.0))
+  
+  ;; H
+  (assert (<= X_5 1.0))
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Y_1
+  (declare-const Y_1 Real)
+  
+  ;; Goal run3'vc
+  (assert (or (and (>= Y_0 Y_1))
+              (and (>= Y_1 Y_0))
+              ))
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  ;;; produced by PyRAT/VNN-LIB driver
+  ;;; produced by VNN-LIB driver
+  ;; X_0
+  (declare-const X_0 Real)
+  
+  ;; X_1
+  (declare-const X_1 Real)
+  
+  ;; X_2
+  (declare-const X_2 Real)
+  
+  ;; X_3
+  (declare-const X_3 Real)
+  
+  ;; X_4
+  (declare-const X_4 Real)
+  
+  ;; Requires
+  (assert (>= X_0 0.0))
+  (assert (<= X_0 60760.0))
+  (assert (>= X_1 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_1 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_2 -3.141592653589793115997963468544185161590576171875))
+  (assert (<= X_2 3.141592653589793115997963468544185161590576171875))
+  (assert (>= X_3 100.0))
+  (assert (<= X_3 1200.0))
+  (assert (>= X_4 0.0))
+  (assert (<= X_4 1200.0))
+  
+  ;; H
+  (assert (>= X_0 1500.0))
+  (assert (<= X_0 1800.0))
+  (assert (>= X_1 -0.059999999999999997779553950749686919152736663818359375))
+  (assert (<= X_1 0.059999999999999997779553950749686919152736663818359375))
+  
+  ;; H
+  (assert (>= X_2 3.100000000000000088817841970012523233890533447265625))
+  (assert (>= X_3 980.0))
+  (assert (>= X_4 960.0))
+  
+  ;; Y_0
+  (declare-const Y_0 Real)
+  
+  ;; Y_1
+  (declare-const Y_1 Real)
+  
+  ;; Y_2
+  (declare-const Y_2 Real)
+  
+  ;; Y_3
+  (declare-const Y_3 Real)
+  
+  ;; Y_4
+  (declare-const Y_4 Real)
+  
+  ;; Goal runP3'vc
+  (assert (<= Y_0 Y_1))
+  (assert (<= Y_0 Y_2))
+  (assert (<= Y_0 Y_3))
+  (assert (<= Y_0 Y_4))
+  
+  Goal run'vc: Unknown ()
+  Goal run1'vc: Unknown ()
+  Goal run2'vc: Unknown ()
+  Goal run3'vc: Unknown ()
+  Goal runP3'vc: Unknown ()
+
+  $ caisar verify --prover Marabou --ltag=ProverSpec file.mlw
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  y0 >= 1500.0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  y0 >= 1500.0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  y1 >= 1500.0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  +y0 -y1 >= 0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  +y1 -y0 >= 0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  x5 >= -1.0
+  x5 <= 1.0
+  +y0 -y1 >= 0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 55947.6909999999988940544426441192626953125
+  x3 >= 1145.0
+  x4 <= 60.0
+  x5 >= -1.0
+  x5 <= 1.0
+  +y1 -y0 >= 0
+  
+  [DEBUG]{ProverSpec} Prover-tailored specification:
+  x0 >= 0.0
+  x0 <= 60760.0
+  x1 >= -3.141592653589793115997963468544185161590576171875
+  x1 <= 3.141592653589793115997963468544185161590576171875
+  x2 >= -3.141592653589793115997963468544185161590576171875
+  x2 <= 3.141592653589793115997963468544185161590576171875
+  x3 >= 100.0
+  x3 <= 1200.0
+  x4 >= 0.0
+  x4 <= 1200.0
+  x0 >= 1500.0
+  x0 <= 1800.0
+  x1 >= -0.059999999999999997779553950749686919152736663818359375
+  x1 <= 0.059999999999999997779553950749686919152736663818359375
+  x2 >= 3.100000000000000088817841970012523233890533447265625
+  x3 >= 980.0
+  x4 >= 960.0
+  +y0 -y1 <= 0
+  +y0 -y2 <= 0
+  +y0 -y3 <= 0
+  +y0 -y4 <= 0
+  
+  Goal run'vc: Unknown ()
+  Goal run1'vc: Unknown ()
+  Goal run2'vc: Unknown ()
+  Goal run3'vc: Unknown ()
+  Goal runP3'vc: Unknown ()
+
+  $ python3 bin/inspect_onnx.py
+  out/caisar_0.onnx has 1 input nodes
+  {'name': '38', 'type': {'tensorType': {'elemType': 1, 'shape': {'dim': [{'dimValue': '5'}]}}}}
+  out/caisar_1.onnx has 1 input nodes
+  {'name': '221', 'type': {'tensorType': {'elemType': 1, 'shape': {'dim': [{'dimValue': '5'}]}}}}
+  out/caisar_2.onnx has 1 input nodes
+  {'name': '440', 'type': {'tensorType': {'elemType': 1, 'shape': {'dim': [{'dimValue': '5'}]}}}}
+  out/caisar_3.onnx has 1 input nodes
+  {'name': '588', 'type': {'tensorType': {'elemType': 1, 'shape': {'dim': [{'dimValue': '6'}]}}}}
+  out/caisar_4.onnx has 1 input nodes
+  {'name': '815', 'type': {'tensorType': {'elemType': 1, 'shape': {'dim': [{'dimValue': '5'}]}}}}
+  5 files checked
diff --git a/tests/autodetect.t b/tests/autodetect.t
index f5b80c12f5e82e3a0c9d8855b58d089e7df3a8e8..ba5051ce2d5e6b681575f0879917d4cf1aaaeb0b 100644
--- a/tests/autodetect.t
+++ b/tests/autodetect.t
@@ -37,6 +37,7 @@ Test autodetect
   Marabou 1.0.+
   PyRAT 1.1
   PyRAT 1.1 (ACAS)
+  PyRAT 1.1 (ACASd)
   PyRAT 1.1 (VNNLIB)
   SAVer v1.0
   alpha-beta-CROWN dummy-version
diff --git a/tests/bin/inspect_onnx.py b/tests/bin/inspect_onnx.py
index a109a495a11f9aef38d80f41c26d4cefb9e5559f..1708b6b3aa3acfcf6704a5b22c712429be85ef6a 100644
--- a/tests/bin/inspect_onnx.py
+++ b/tests/bin/inspect_onnx.py
@@ -1,17 +1,21 @@
 import onnx
-import os
+from google.protobuf.json_format import MessageToDict
+import os, sys
 
-for file in os.listdir("out"):
-    m = onnx.load(os.path.join("out", file))
-    if len(m.graph.initializer) >= 1:
-        initi = m.graph.initializer[0]
-        print(
-            "Initializer name {name} has data {val:3.3f}".format(
-                name=initi.name, val=initi.float_data[2]
-            )
-        )
-    if len(m.graph.input) >= 1:
-        print(
-            f"Input name {m.graph.input[0].name}"
-        )
+l = []
+if len(sys.argv) > 1:
+    l = sys.argv[1:]
+else:
+    l = os.listdir("out")
+    l.sort()
+    l = [os.path.join("out", file) for file in  l]
+
+for file in l:
+    m = onnx.load(file)
+    print (f"{file} has {len(m.graph.input)} input nodes")
     onnx.checker.check_model(m)
+    onnx.shape_inference.infer_shapes(m,check_type = True, strict_mode = True, data_prop = True)
+    for _input in m.graph.input:
+        print(MessageToDict(_input))
+    
+print(len(l),"files checked")
diff --git a/tests/dune b/tests/dune
index e65becccab35297919fb528089fd968cfa4318ec..aedd9f6e3bd4b2cefda847f122bd81c7e0f8680d 100644
--- a/tests/dune
+++ b/tests/dune
@@ -1,6 +1,6 @@
 (cram
  (alias local)
- (applies_to * \ nier_to_onnx)
+ (applies_to * \ nier_to_onnx acasxu_ci)
  (deps
   (package caisar)
   setup_env.sh
@@ -20,6 +20,8 @@
   setup_env.sh
   TestNetwork.nnet
   TestNetworkONNX.onnx
+  ../examples/acasxu/nets/onnx/ACASXU_1_1.onnx
+  ../examples/acasxu/nets/onnx/ACASXU_1_9.onnx
   TestSVM.ovo
   (glob_files bin/*)
   filter_tmpdir.sh
diff --git a/tests/nier_to_onnx.t b/tests/nier_to_onnx.t
index e062cca4fce43f3ba877669b068b67bc7b521a47..d77897c5db91be6e61a94e3b10273908ed802116 100644
--- a/tests/nier_to_onnx.t
+++ b/tests/nier_to_onnx.t
@@ -26,4 +26,6 @@ Test verify
 Data should be 0.135
 
   $ python3 bin/inspect_onnx.py
-  Input name 0
+  out/nn_onnx.nier.onnx has 1 input nodes
+  {'name': '0', 'type': {'tensorType': {'elemType': 1, 'shape': {'dim': [{'dimValue': '1'}, {'dimValue': '1'}, {'dimValue': '1'}, {'dimValue': '3'}]}}}}
+  1 files checked
diff --git a/utils/logging.ml b/utils/logging.ml
index e76e20b90e5f454a9b73f8af0f32868a31d07fac..afe6c3d5132e8bebfc474208afec04f9ed67bbc6 100644
--- a/utils/logging.ml
+++ b/utils/logging.ml
@@ -32,16 +32,7 @@ let src_interpret_goal =
 
 let src_nier = Logs.Src.create "NIER" ~doc:"Neural Intermediate Representation"
 let src_stack_trace = Logs.Src.create "StackTrace" ~doc:"Print stack trace"
-
-let all_srcs () =
-  [
-    src_autodetect;
-    src_prover_spec;
-    src_prover_call;
-    src_interpret_goal;
-    src_nier;
-    src_stack_trace;
-  ]
+let all_srcs () = Logs.Src.list ()
 
 let is_debug_level src =
   match Logs.Src.level src with Some Debug -> true | _ -> false
@@ -126,4 +117,5 @@ let () =
     | Invalid_argument msg -> Fmt.pf fmt "Invalid argument: %s" msg
     | Failure msg -> Fmt.pf fmt "Failure: %s" msg
     | Sys_error msg -> Fmt.pf fmt "%s" msg
+    | Sexplib0.Sexp.Not_found_s sexp -> Sexplib0.Sexp.pp_hum fmt sexp
     | _ -> raise exn)