diff --git a/config/caisar-detection-data.conf b/config/caisar-detection-data.conf
index 2f8003cb7a20b00c171f1be8a9288ef8c5f9333b..e3c57bcf181524d465dac2465ebbc600aaa1ca52 100644
--- a/config/caisar-detection-data.conf
+++ b/config/caisar-detection-data.conf
@@ -125,7 +125,7 @@ exec = "saver"
 version_switch = "--version 2>&1 | head -n1 && (which saver > /dev/null 2>&1)"
 version_regexp = "\\(v[0-9.]+\\)"
 version_ok = "v1.0"
-command = "%e %{svm} %{dataset} %{abstraction} %{distance} %{epsilon}"
+command = "%e %{svm} %{dataset} %{abstraction} %{perturbation} %{perturbation_param}"
 driver = "%{config}/drivers/saver.drv"
 use_at_auto_level = 1
 
diff --git a/config/drivers/marabou.drv b/config/drivers/marabou.drv
index 2b23a906ee2e127043cc25f0f8c7d0bfcc24031b..205174e450ab6490804abf914bcf2ae9e15ce58d 100644
--- a/config/drivers/marabou.drv
+++ b/config/drivers/marabou.drv
@@ -34,8 +34,8 @@ transformation "inline_trivial"
 transformation "introduce_premises"
 transformation "eliminate_builtin"
 transformation "simplify_formula"
-transformation "simplify_rel"
-transformation "vars_on_lhs"
+transformation "simplify_relop"
+transformation "vars_on_lhs_of_relop"
 
 theory BuiltIn
   syntax type int   "int"
diff --git a/config/drivers/pyrat.drv b/config/drivers/pyrat.drv
index ec19b024707e2756840eed529c3a8a7b9310a0db..bbb6cfdf61ae816c8523193734d07320a801c1a9 100644
--- a/config/drivers/pyrat.drv
+++ b/config/drivers/pyrat.drv
@@ -34,7 +34,7 @@ transformation "inline_trivial"
 transformation "introduce_premises"
 transformation "eliminate_builtin"
 transformation "simplify_formula"
-transformation "simplify_rel"
+transformation "simplify_relop"
 
 theory BuiltIn
   syntax type int   "int"
diff --git a/config/drivers/vnnlib.gen b/config/drivers/vnnlib.gen
index 59c1c5d92e7c8dba640d4af0e1cefe31036a29f6..b7d6d0258a56947de13993c3e9f3c35ec8ac8a59 100644
--- a/config/drivers/vnnlib.gen
+++ b/config/drivers/vnnlib.gen
@@ -32,8 +32,8 @@ transformation "inline_trivial"
 transformation "introduce_premises"
 transformation "eliminate_builtin"
 transformation "simplify_formula"
-transformation "simplify_rel"
-transformation "vars_on_lhs"
+transformation "simplify_relop"
+transformation "vars_on_lhs_of_relop"
 
 theory BuiltIn
   syntax type int   "Int"
diff --git a/src/dataset.ml b/src/dataset.ml
index 1c498b899998c1f855cc12a2568e91206b4fb22c..ac2d3f3b5a3964d13c1717ba285c21b0c8e3d097 100644
--- a/src/dataset.ml
+++ b/src/dataset.ml
@@ -45,9 +45,8 @@ type eps = float [@@deriving yojson, show]
 let string_of_eps eps = Float.to_string eps
 
 let term_of_eps env eps =
-  let th = Env.read_theory env [ "ieee_float" ] "Float64" in
-  let ty = Ty.ty_app (Theory.ns_find_ts th.th_export [ "t" ]) [] in
-  Term.t_const (real_constant_of_float eps) ty
+  let th = Symbols.Float64.create env in
+  Term.t_const (real_constant_of_float eps) th.ty
 
 type threshold = float [@@deriving yojson, show]
 
@@ -270,8 +269,8 @@ let add_output_decl ~id predicate_kind ~outputs ~record th task =
 let add_decls ~kind task =
   let n, fid, meta =
     match kind with
-    | `In n -> (n, Fmt.str "x%d", Utils.meta_input)
-    | `Out n -> (n, Fmt.str "y%d", Utils.meta_output)
+    | `In n -> (n, Fmt.str "x%d", Meta.meta_input)
+    | `Out n -> (n, Fmt.str "y%d", Meta.meta_output)
   in
   let id_lls =
     List.init n ~f:(fun id ->
diff --git a/src/interpretation.ml b/src/interpretation.ml
deleted file mode 100644
index 2357111c18e3ca253ec0bc09cd384e5b7d5d024d..0000000000000000000000000000000000000000
--- a/src/interpretation.ml
+++ /dev/null
@@ -1,579 +0,0 @@
-(**************************************************************************)
-(*                                                                        *)
-(*  This file is part of CAISAR.                                          *)
-(*                                                                        *)
-(*  Copyright (C) 2023                                                    *)
-(*    CEA (Commissariat à l'énergie atomique et aux énergies              *)
-(*         alternatives)                                                  *)
-(*                                                                        *)
-(*  You can redistribute it and/or modify it under the terms of the GNU   *)
-(*  Lesser General Public License as published by the Free Software       *)
-(*  Foundation, version 2.1.                                              *)
-(*                                                                        *)
-(*  It is distributed in the hope that it will be useful,                 *)
-(*  but WITHOUT ANY WARRANTY; without even the implied warranty of        *)
-(*  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the          *)
-(*  GNU Lesser General Public License for more details.                   *)
-(*                                                                        *)
-(*  See the GNU Lesser General Public License version 2.1                 *)
-(*  for more details (enclosed in the file licenses/LGPLv2.1).            *)
-(*                                                                        *)
-(**************************************************************************)
-
-module CRE = Reduction_engine (* CAISAR Reduction Engine *)
-open Why3
-open Base
-
-type nn =
-  | NNet of Term.lsymbol
-      [@printer
-        fun fmt nn ->
-          Fmt.pf fmt "NNet: %a"
-            Fmt.(option Language.pp_nn)
-            (Language.lookup_nn nn)]
-  | ONNX of Term.lsymbol
-      [@printer
-        fun fmt nn ->
-          Fmt.pf fmt "ONNX: %a"
-            Fmt.(option Language.pp_nn)
-            (Language.lookup_nn nn)]
-[@@deriving show]
-
-type dataset = DS_csv of Csv.t [@printer fun fmt _ -> Fmt.pf fmt "<csv>"]
-[@@deriving show]
-
-type data = D_csv of string list [@@deriving show]
-
-type caisar_op =
-  | NeuralNetwork of nn
-  | Dataset of Term.lsymbol * dataset
-      [@printer
-        fun fmt (ls, ds) ->
-          Fmt.pf fmt "%a %a"
-            Fmt.(option Language.pp_dataset)
-            (Language.lookup_dataset_csv ls)
-            pp_dataset ds]
-  | Data of data
-  | Vector of Term.lsymbol
-      [@printer
-        fun fmt v -> Fmt.pf fmt "%a" Fmt.(option int) (Language.lookup_vector v)]
-[@@deriving show]
-
-type caisar_env = {
-  caisar_op_of_ls : caisar_op Term.Hls.t;
-  ls_of_caisar_op : (caisar_op, Term.lsymbol) Hashtbl.t;
-  env : Env.env;
-  cwd : string;
-}
-
-let ls_of_caisar_op engine caisar_op ty_args ty =
-  let caisar_env = CRE.user_env engine in
-  Hashtbl.find_or_add caisar_env.ls_of_caisar_op caisar_op ~default:(fun () ->
-    let id = Ident.id_fresh "caisar_op" in
-    let ls =
-      match caisar_op with
-      | NeuralNetwork (NNet n | ONNX n) -> n
-      | Vector v -> v
-      | Dataset (d, _) -> d
-      | _ -> Term.create_lsymbol id ty_args ty
-    in
-    Hashtbl.Poly.add_exn caisar_env.ls_of_caisar_op ~key:caisar_op ~data:ls;
-    Term.Hls.add caisar_env.caisar_op_of_ls ls caisar_op;
-    ls)
-
-let caisar_op_of_ls engine ls =
-  let caisar_env = CRE.user_env engine in
-  Term.Hls.find caisar_env.caisar_op_of_ls ls
-
-let term_of_caisar_op ?(args = []) engine caisar_op ty =
-  let t_args, ty_args = List.unzip args in
-  Term.t_app_infer (ls_of_caisar_op engine caisar_op ty_args ty) t_args
-
-let caisar_env env cwd =
-  {
-    ls_of_caisar_op = Hashtbl.Poly.create ();
-    caisar_op_of_ls = Term.Hls.create 10;
-    env;
-    cwd;
-  }
-
-let print_caisar_op_of_ls fmt caisar_env =
-  Pp.print_iter2 Term.Hls.iter Pp.newline Pp.comma Pretty.print_ls pp_caisar_op
-    fmt caisar_env.caisar_op_of_ls
-  [@@warning "-32"]
-
-let const_real_of_float value =
-  let neg = Float.is_negative value in
-  let value = Fmt.str "%.64f" (Float.abs value) in
-  (* Split into integer and fractional parts. *)
-  let int_frac = String.split value ~on:'.' in
-  let int = List.hd_exn int_frac in
-  let frac =
-    match List.tl_exn int_frac with
-    | [] -> ""
-    | [ s ] -> s
-    | _ -> assert false (* Since every float has one '.' at most. *)
-  in
-  Constant.ConstReal (Number.real_literal ~radix:10 ~neg ~int ~frac ~exp:None)
-
-let value_term t = CRE.Value (Term t)
-let value_int i = CRE.Value (Int i)
-
-let caisar_builtins : caisar_env CRE.built_in_theories list =
-  let reconstruct () =
-    (* Force the engine to reconstruct the original term. *)
-    raise Stdlib.Not_found
-  in
-  let error_message ls =
-    Fmt.str "Invalid arguments for '%a'" Pretty.print_ls ls
-  in
-
-  (* Vector *)
-  let vget : _ CRE.builtin =
-   fun engine ls vl ty ->
-    match vl with
-    | [
-     Term ({ t_node = Tapp (ls1, tl1); _ } as t1);
-     Term ({ t_node = Tconst (ConstInt i); _ } as _t2);
-    ] -> (
-      let i = Number.to_small_integer i in
-      match caisar_op_of_ls engine ls1 with
-      | Dataset (_, DS_csv csv) ->
-        let row = List.nth_exn csv i in
-        let label, features =
-          match row with
-          | [] | [ _ ] -> assert false
-          | label :: features -> (label, features)
-        in
-        let ty_features =
-          match ty with
-          | Some { ty_node = Tyapp (_, [ _; ty ]); _ } -> Some ty
-          | _ -> assert false
-        in
-        let t_features, t_label =
-          ( term_of_caisar_op engine (Data (D_csv features)) ty_features,
-            Term.t_int_const (BigInt.of_int (Int.of_string label)) )
-        in
-        value_term (Term.t_tuple [ t_label; t_features ])
-      | Vector v ->
-        let n = Option.value_exn (Language.lookup_vector v) in
-        if List.length tl1 <> n
-        then
-          Logging.code_error ~src:Logging.src_interpret_goal (fun m ->
-            m
-              "Mismatch between (container) vector length and number of \
-               (contained) input variables.");
-        if i < n
-        then value_term (List.nth_exn tl1 i)
-        else
-          Logging.user_error ?loc:t1.t_loc (fun m ->
-            m "Index constant %d is out-of-bounds [0,%d]" i (n - 1))
-      | Data _ | NeuralNetwork _ -> assert false)
-    | [ Term _t1; Term _t2 ] -> reconstruct ()
-    | _ -> invalid_arg (error_message ls)
-  in
-  let vlength : _ CRE.builtin =
-   fun engine ls vl _ty ->
-    match vl with
-    | [ Term { t_node = Tapp (ls, []); _ } ] -> (
-      match caisar_op_of_ls engine ls with
-      | Dataset (_, DS_csv csv) -> value_int (BigInt.of_int (Csv.lines csv))
-      | Vector v ->
-        value_int (BigInt.of_int (Option.value_exn (Language.lookup_vector v)))
-      | Data (D_csv data) -> value_int (BigInt.of_int (List.length data))
-      | NeuralNetwork _ -> assert false)
-    | [ Term { t_node = Tapp (ls, tl); _ } ] -> (
-      match caisar_op_of_ls engine ls with
-      | Vector v ->
-        let n = Option.value_exn (Language.lookup_vector v) in
-        assert (List.length tl = n);
-        value_int (BigInt.of_int n)
-      | Dataset _ | Data _ | NeuralNetwork _ -> assert false)
-    | [ Term _t ] -> reconstruct ()
-    | _ -> invalid_arg (error_message ls)
-  in
-  let vminus : _ CRE.builtin =
-   fun engine ls vl ty ->
-    match vl with
-    | [
-     Term ({ t_node = Tapp (ls1, tl1); _ } as t1);
-     Term ({ t_node = Tapp (ls2, _); _ } as _t2);
-    ] -> (
-      match (caisar_op_of_ls engine ls1, caisar_op_of_ls engine ls2) with
-      | Vector v, Data (D_csv data) ->
-        let n = Option.value_exn (Language.lookup_vector v) in
-        assert (n = List.length data);
-        let ty_cst =
-          match ty with
-          | Some { ty_node = Tyapp (_, [ ty ]); _ } -> ty
-          | _ -> assert false
-        in
-        let csts =
-          List.map data ~f:(fun d ->
-            let cst = const_real_of_float (Float.of_string d) in
-            Term.t_const cst ty_cst)
-        in
-        let { env; _ } = CRE.user_env engine in
-        let args =
-          let minus =
-            (* TODO: generalize wrt the type of constants [csts]. *)
-            let th = Env.read_theory env [ "ieee_float" ] "Float64" in
-            Theory.(ns_find_ls th.th_export [ Ident.op_infix ".-" ])
-          in
-          List.map2_exn tl1 csts ~f:(fun tl c ->
-            (Term.t_app_infer minus [ tl; c ], ty_cst))
-        in
-        let caisar_op = Vector (Language.create_vector env n) in
-        value_term (term_of_caisar_op ~args engine caisar_op ty)
-      | Vector _, Vector _ ->
-        Logging.user_error ?loc:t1.t_loc (fun m ->
-          m "Invalid substraction operation involving two abstract vectors")
-      | _ ->
-        Logging.user_error ?loc:t1.t_loc (fun m ->
-          m "Invalid subtraction operation"))
-    | [ Term _t1; Term _t2 ] -> reconstruct ()
-    | _ -> invalid_arg (error_message ls)
-  in
-  let vplus : _ CRE.builtin =
-   fun engine ls vl ty ->
-    match vl with
-    | [
-     Term ({ t_node = Tapp (ls1, tl1); _ } as t1);
-     Term ({ t_node = Tapp (ls2, _); _ } as _t2);
-    ] -> (
-      match (caisar_op_of_ls engine ls1, caisar_op_of_ls engine ls2) with
-      | Data (D_csv data), Vector v | Vector v, Data (D_csv data) ->
-        let n = Option.value_exn (Language.lookup_vector v) in
-        assert (n = List.length data);
-        let ty_cst =
-          match ty with
-          | Some { ty_node = Tyapp (_, [ ty ]); _ } -> ty
-          | _ -> assert false
-        in
-        let csts =
-          List.map data ~f:(fun d ->
-            let cst = const_real_of_float (Float.of_string d) in
-            Term.t_const cst ty_cst)
-        in
-        let { env; _ } = CRE.user_env engine in
-        let args =
-          let plus =
-            (* TODO: generalize wrt the type of constants [csts]. *)
-            let th = Env.read_theory env [ "ieee_float" ] "Float64" in
-            Theory.(ns_find_ls th.th_export [ Ident.op_infix ".+" ])
-          in
-          List.map2_exn tl1 csts ~f:(fun tl c ->
-            (Term.t_app_infer plus [ tl; c ], ty_cst))
-        in
-        let caisar_op = Vector (Language.create_vector env n) in
-        value_term (term_of_caisar_op ~args engine caisar_op ty)
-      | Vector _, Vector _ ->
-        Logging.user_error ?loc:t1.t_loc (fun m ->
-          m "%a: Invalid addition operation involving two abstract vectors"
-            (Fmt.option Why3.Loc.pp_position)
-            t1.t_loc)
-      | _ ->
-        Logging.user_error ?loc:t1.t_loc (fun m ->
-          m "Invalid addition operation involving two non-vectors"))
-    | [ Term _t1; Term _t2 ] -> reconstruct ()
-    | _ -> invalid_arg (error_message ls)
-  in
-  let vmapi : _ CRE.builtin =
-   fun engine ls vl ty ->
-    match vl with
-    | [
-     Term ({ t_node = Tapp (ls1, tl1); _ } as _t1);
-     Term ({ t_node = Teps _tb; _ } as t2);
-    ] -> (
-      assert (Term.t_is_lambda t2);
-      match caisar_op_of_ls engine ls1 with
-      | Vector v ->
-        let n = Option.value_exn (Language.lookup_vector v) in
-        assert (List.length tl1 = n);
-        let args =
-          List.mapi tl1 ~f:(fun idx t ->
-            let idx = Term.t_int_const (BigInt.of_int idx) in
-            (Term.t_func_app_beta_l t2 [ idx; t ], Option.value_exn t.t_ty))
-        in
-        let caisar_op =
-          let { env; _ } = CRE.user_env engine in
-          Vector (Language.create_vector env n)
-        in
-        Eval (term_of_caisar_op ~args engine caisar_op ty)
-      | Dataset (_, DS_csv csv) -> value_int (BigInt.of_int (Csv.lines csv))
-      | Data _ | NeuralNetwork _ -> assert false)
-    | [ Term _t1; Term _t2 ] -> reconstruct ()
-    | _ -> invalid_arg (error_message ls)
-  in
-
-  (* Neural Network *)
-  let read_model : _ CRE.builtin =
-   fun engine ls vl ty ->
-    match vl with
-    | [ Term { t_node = Tconst (ConstStr neural_network); _ } ] ->
-      let { env; cwd; _ } = CRE.user_env engine in
-      let caisar_op =
-        let nn_format = Stdlib.Filename.extension neural_network in
-        let nn_filename = Stdlib.Filename.concat cwd neural_network in
-        let nn =
-          match String.lowercase nn_format with
-          | ".nnet" -> NNet (Language.create_nn_nnet env nn_filename)
-          | ".onnx" -> ONNX (Language.create_nn_onnx env nn_filename)
-          | _ ->
-            failwith
-              (Fmt.str "Unrecognized neural network format '%s'" nn_format)
-        in
-        NeuralNetwork nn
-      in
-      value_term (term_of_caisar_op engine caisar_op ty)
-    | [ Term _t1; Term _t2 ] -> reconstruct ()
-    | _ -> invalid_arg (error_message ls)
-  in
-  let apply_neural_network : _ CRE.builtin =
-   fun _engine ls vl _ty ->
-    match vl with
-    | [ Term _t1; Term _t2 ] -> reconstruct ()
-    | _ -> invalid_arg (error_message ls)
-  in
-
-  (* Dataset *)
-  let read_dataset : _ CRE.builtin =
-   fun engine ls vl ty ->
-    match vl with
-    | [ Term { t_node = Tconst (ConstStr dataset); _ } ] ->
-      let { env; cwd; _ } = CRE.user_env engine in
-      let caisar_op =
-        let filename = Stdlib.Filename.concat cwd dataset in
-        let ds = Language.create_dataset_csv env filename in
-        let dataset = DS_csv (Csv.load filename) in
-        Dataset (ds, dataset)
-      in
-      value_term (term_of_caisar_op engine caisar_op ty)
-    | [ Term _t1 ] -> reconstruct ()
-    | _ -> invalid_arg (error_message ls)
-  in
-  let infer_min_label : _ CRE.builtin =
-   fun engine ls vl _ty ->
-    match vl with
-    | [ Term ({ t_node = Tapp (ls1, _); _ } as _t1) ] -> (
-      match caisar_op_of_ls engine ls1 with
-      | Dataset (_, DS_csv csv) ->
-        (* TODO: test for non-empty csv. *)
-        let min_label =
-          List.fold_left csv ~init:None ~f:(fun min_label elt ->
-            let label = Int.of_string (List.hd_exn elt) in
-            Some
-              (match min_label with
-              | None -> label
-              | Some min_label -> min min_label label))
-        in
-        let min_label = Option.value_exn min_label in
-        value_int (BigInt.of_int min_label)
-      | _ -> assert false)
-    | [ Term _t1 ] -> reconstruct ()
-    | _ -> invalid_arg (error_message ls)
-  in
-  let infer_max_label : _ CRE.builtin =
-   fun engine ls vl _ty ->
-    match vl with
-    | [ Term ({ t_node = Tapp (ls1, _); _ } as _t1) ] -> (
-      match caisar_op_of_ls engine ls1 with
-      | Dataset (_, DS_csv csv) ->
-        (* TODO: test for non-empty csv. *)
-        let max_label =
-          List.fold_left csv ~init:None ~f:(fun max_label elt ->
-            let label = Int.of_string (List.hd_exn elt) in
-            Some
-              (match max_label with
-              | None -> label
-              | Some max_label -> max max_label label))
-        in
-        let max_label = Option.value_exn max_label in
-        value_int (BigInt.of_int max_label)
-      | _ -> assert false)
-    | [ Term _t1 ] -> reconstruct ()
-    | _ -> invalid_arg (error_message ls)
-  in
-
-  [
-    ( [ "caisar"; "types" ],
-      "Vector",
-      [],
-      [
-        ([ Ident.op_get "" ] (* ([]) *), None, vget);
-        ([ Ident.op_infix "-" ], None, vminus);
-        ([ Ident.op_infix "+" ], None, vplus);
-        ([ "length" ], None, vlength);
-        ([ "mapi" ], None, vmapi);
-      ] );
-    ( [ "caisar"; "model" ],
-      "NN",
-      [],
-      [
-        ([ "read_model" ], None, read_model);
-        ([ Ident.op_infix "@@" ], None, apply_neural_network);
-      ] );
-    ( [ "caisar"; "dataset" ],
-      "CSV",
-      [],
-      [
-        ([ "read_dataset" ], None, read_dataset);
-        ([ "infer_min_label" ], None, infer_min_label);
-        ([ "infer_max_label" ], None, infer_max_label);
-      ] );
-  ]
-
-let bounded_quant engine vs ~cond : CRE.bounded_quant_result option =
-  match vs.Term.vs_ty with
-  | {
-   ty_node = Tyapp ({ ts_name = { id_string = "vector"; _ }; _ }, ty :: _);
-   _;
-  } -> (
-    match cond.Term.t_node with
-    | Tapp
-        ( { ls_name = { id_string = "has_length"; _ }; _ },
-          [
-            ({ t_node = Tvar vs1; _ } as _t1);
-            ({ t_node = Tconst (ConstInt n); _ } as _t2);
-          ] ) ->
-      if not (Term.vs_equal vs vs1)
-      then None
-      else
-        let n = Number.to_small_integer n in
-        let new_quant =
-          List.init n ~f:(fun _ ->
-            let preid = Ident.id_fresh "caisar_x" in
-            Term.create_vsymbol preid ty)
-        in
-        let args = List.map new_quant ~f:(fun vs -> (Term.t_var vs, ty)) in
-        let caisar_op =
-          let { env; _ } = CRE.user_env engine in
-          Vector (Language.create_vector env n)
-        in
-        let substitutions =
-          [ term_of_caisar_op ~args engine caisar_op (Some vs.vs_ty) ]
-        in
-        Some { new_quant; substitutions }
-    | Tapp ({ ls_name = { id_string = "has_length"; _ }; _ }, _) -> None
-    | _ ->
-      Logging.user_error ?loc:vs.vs_name.id_loc (fun m ->
-        m
-          "Expecting 'has_length' predicate after universal quantifier on \
-           vector '%a'"
-          Pretty.print_vs vs))
-  | _ -> None
-
-let declare_language_lsymbols caisar_env task =
-  (* Declare [Language] logic symbols only. *)
-  Term.Hls.fold
-    (fun ls _ task ->
-      if Language.mem_vector ls || Language.mem_nn ls
-         || Language.mem_dataset_csv ls
-      then
-        (* Add meta for neural network and dataset declarations. *)
-        let task =
-          match (Language.lookup_nn ls, Language.lookup_dataset_csv ls) with
-          | None, None -> task
-          | Some { nn_filename; _ }, None ->
-            Task.add_meta task Utils.meta_nn_filename [ MAstr nn_filename ]
-          | None, Some (CSV ds_filename) ->
-            Task.add_meta task Utils.meta_dataset_filename [ MAstr ds_filename ]
-          | Some _, Some _ -> assert false
-        in
-        (* Add actual logic symbol declaration. *)
-        let decl = Decl.create_param_decl ls in
-        Task.add_decl task decl
-      else task)
-    caisar_env.caisar_op_of_ls task
-
-let builtin_of_constant env known_map (name, value) =
-  let decls =
-    Ident.Mid.fold_left
-      (fun acc id ls ->
-        if String.equal id.Ident.id_string name then ls :: acc else acc)
-      [] known_map
-  in
-  match decls with
-  | [] ->
-    Logging.user_error (fun m ->
-      m "'%s' is not a declared toplevel constant" name)
-  | _ :: _ :: _ ->
-    Logging.user_error (fun m ->
-      m "'%s' corresponds to multiple declared toplevel constants" name)
-  | [ { Decl.d_node = Dparam ls; _ } ] ->
-    let cst =
-      let ty_float64_t =
-        let th = Env.read_theory env [ "ieee_float" ] "Float64" in
-        Ty.ty_app (Theory.ns_find_ts th.th_export [ "t" ]) []
-      in
-      match ls.Term.ls_value with
-      | None -> (
-        match value with
-        | "true" -> Term.t_true
-        | "false" -> Term.t_false
-        | _ ->
-          Logging.user_error (fun m ->
-            m "'%s' expects 'true' or 'false', got '%s' instead" name value))
-      | Some ty when Ty.ty_equal ty Ty.ty_bool -> (
-        match value with
-        | "true" -> Term.t_bool_true
-        | "false" -> Term.t_bool_false
-        | _ ->
-          Logging.user_error (fun m ->
-            m "'%s' expects 'true' or 'false', got '%s' instead" name value))
-      | Some ty
-        when Ty.ty_equal ty Ty.ty_int || Ty.ty_equal ty Ty.ty_real
-             || Ty.ty_equal ty ty_float64_t ->
-        let lb = Lexing.from_string value in
-        Loc.set_file (Fmt.str "constant '%s' to be bound to '%s'" value name) lb;
-        let parsed = Lexer.parse_term lb in
-        let cst =
-          match parsed.term_desc with
-          | Ptree.Tconst cst -> cst
-          | _ ->
-            Logging.user_error (fun m ->
-              m "'%s' expects a numerical constant, got '%s' instead" name value)
-        in
-        Term.t_const cst ty
-      | Some ty when Ty.ty_equal ty Ty.ty_str ->
-        let cst = Constant.ConstStr value in
-        Term.t_const cst ty
-      | Some ty ->
-        Logging.not_implemented_yet (fun m ->
-          m
-            "'%s' has type '%a' but only toplevel constants of type bool, int, \
-             real and string can be defined"
-            name Pretty.print_ty ty)
-    in
-    (ls, fun _ _ _ _ -> CRE.Eval cst)
-  | _ ->
-    Logging.user_error (fun m ->
-      m "'%s' does not appear to be a declared toplevel constant" name)
-
-let interpret_task ~cwd ?(def_constants = []) env task =
-  let known_map = Task.task_known task in
-  let caisar_env = caisar_env env cwd in
-  let params =
-    {
-      CRE.compute_defs = true;
-      compute_builtin = true;
-      compute_def_set = Term.Sls.empty;
-      compute_max_quantifier_domain = Int.max_value;
-    }
-  in
-  let builtins =
-    List.map ~f:(builtin_of_constant env known_map) def_constants
-  in
-  let engine =
-    CRE.create ~bounded_quant ~builtins params env known_map caisar_env
-      caisar_builtins
-  in
-  let g, f = (Task.task_goal task, Task.task_goal_fmla task) in
-  let f = CRE.normalize ~limit:Int.max_value engine Term.Mvs.empty f in
-  Logs.debug ~src:Logging.src_interpret_goal (fun m ->
-    m "Interpreted formula for goal '%a':@.%a@.%a" Pretty.print_pr g
-      Pretty.print_term f print_caisar_op_of_ls caisar_env);
-  let _, task = Task.task_separate_goal task in
-  let task = declare_language_lsymbols caisar_env task in
-  let task = Task.(add_prop_decl task Pgoal g f) in
-  task
diff --git a/src/interpretation/interpreter.ml b/src/interpretation/interpreter.ml
new file mode 100644
index 0000000000000000000000000000000000000000..128ec43ae21132f9523436a8d8a006c9fa5408a7
--- /dev/null
+++ b/src/interpretation/interpreter.ml
@@ -0,0 +1,168 @@
+(**************************************************************************)
+(*                                                                        *)
+(*  This file is part of CAISAR.                                          *)
+(*                                                                        *)
+(*  Copyright (C) 2023                                                    *)
+(*    CEA (Commissariat à l'énergie atomique et aux énergies              *)
+(*         alternatives)                                                  *)
+(*                                                                        *)
+(*  You can redistribute it and/or modify it under the terms of the GNU   *)
+(*  Lesser General Public License as published by the Free Software       *)
+(*  Foundation, version 2.1.                                              *)
+(*                                                                        *)
+(*  It is distributed in the hope that it will be useful,                 *)
+(*  but WITHOUT ANY WARRANTY; without even the implied warranty of        *)
+(*  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the          *)
+(*  GNU Lesser General Public License for more details.                   *)
+(*                                                                        *)
+(*  See the GNU Lesser General Public License version 2.1                 *)
+(*  for more details (enclosed in the file licenses/LGPLv2.1).            *)
+(*                                                                        *)
+(**************************************************************************)
+
+module IRE = Interpreter_reduction_engine
+module ITypes = Interpreter_types
+open Base
+
+let builtin_of_constant env known_map (name, value) =
+  let decls =
+    Why3.Ident.Mid.fold_left
+      (fun acc id ls ->
+        if String.equal id.Why3.Ident.id_string name then ls :: acc else acc)
+      [] known_map
+  in
+  match decls with
+  | [] ->
+    Logging.user_error (fun m ->
+      m "'%s' is not a declared toplevel constant" name)
+  | _ :: _ :: _ ->
+    Logging.user_error (fun m ->
+      m "'%s' corresponds to multiple declared toplevel constants" name)
+  | [ { Why3.Decl.d_node = Dparam ls; _ } ] ->
+    let cst =
+      match ls.Why3.Term.ls_value with
+      | None -> (
+        match value with
+        | "true" -> Why3.Term.t_true
+        | "false" -> Why3.Term.t_false
+        | _ ->
+          Logging.user_error (fun m ->
+            m "'%s' expects 'true' or 'false', got '%s' instead" name value))
+      | Some ty when Why3.Ty.ty_equal ty Why3.Ty.ty_bool -> (
+        match value with
+        | "true" -> Why3.Term.t_bool_true
+        | "false" -> Why3.Term.t_bool_false
+        | _ ->
+          Logging.user_error (fun m ->
+            m "'%s' expects 'true' or 'false', got '%s' instead" name value))
+      | Some ty
+        when Why3.Ty.ty_equal ty Why3.Ty.ty_int
+             || Why3.Ty.ty_equal ty Why3.Ty.ty_real
+             || Why3.Ty.ty_equal ty (Symbols.Float64.create env).ty ->
+        let lb = Lexing.from_string value in
+        Why3.Loc.set_file
+          (Fmt.str "constant '%s' to be bound to '%s'" value name)
+          lb;
+        let parsed = Why3.Lexer.parse_term lb in
+        let cst =
+          match parsed.term_desc with
+          | Why3.Ptree.Tconst cst -> cst
+          | _ ->
+            Logging.user_error (fun m ->
+              m "'%s' expects a numerical constant, got '%s' instead" name value)
+        in
+        Why3.Term.t_const cst ty
+      | Some ty when Why3.Ty.ty_equal ty Why3.Ty.ty_str ->
+        let cst = Why3.Constant.ConstStr value in
+        Why3.Term.t_const cst ty
+      | Some ty ->
+        Logging.not_implemented_yet (fun m ->
+          m
+            "'%s' has type '%a' but only toplevel constants of type bool, int, \
+             real and string can be defined"
+            name Why3.Pretty.print_ty ty)
+    in
+    (ls, fun _ _ _ _ -> IRE.eval_term cst)
+  | _ ->
+    Logging.user_error (fun m ->
+      m "'%s' does not appear to be a declared toplevel constant" name)
+
+let bounded_quant engine vs ~cond : IRE.bounded_quant_result option =
+  match vs.Why3.Term.vs_ty with
+  | {
+   ty_node = Tyapp ({ ts_name = { id_string = "vector"; _ }; _ }, ty :: _);
+   _;
+  } -> (
+    match cond.Why3.Term.t_node with
+    | Tapp
+        ( { ls_name = { id_string = "has_length"; _ }; _ },
+          [
+            ({ t_node = Tvar vs1; _ } as _t1);
+            ({ t_node = Tconst (ConstInt n); _ } as _t2);
+          ] ) ->
+      if not (Why3.Term.vs_equal vs vs1)
+      then None
+      else
+        let n = Why3.Number.to_small_integer n in
+        let new_quant =
+          List.init n ~f:(fun _ ->
+            let preid = Why3.Ident.id_fresh "x" in
+            Why3.Term.create_vsymbol preid ty)
+        in
+        let args = List.map new_quant ~f:(fun vs -> (Why3.Term.t_var vs, ty)) in
+        let op =
+          let { ITypes.env; _ } = IRE.user_env engine in
+          ITypes.Vector (Language.create_vector env n)
+        in
+        let substitutions =
+          [ ITypes.term_of_op ~args engine op (Some vs.vs_ty) ]
+        in
+        Some { new_quant; substitutions }
+    | Tapp ({ ls_name = { id_string = "has_length"; _ }; _ }, _) -> None
+    | _ ->
+      Logging.user_error ?loc:vs.vs_name.id_loc (fun m ->
+        m
+          "Expecting 'has_length' predicate after universal quantifier on \
+           vector '%a'"
+          Why3.Pretty.print_vs vs))
+  | _ -> None
+
+let declare_language_lsymbols interpreter_env task =
+  (* Declare [Language] logic symbols only. *)
+  Why3.Term.Hls.fold
+    (fun ls _ task ->
+      (* Add meta corresponding to logic symbol. *)
+      let task = Language.add_meta_nn task ls in
+      let task = Language.add_meta_svm task ls in
+      let task = Language.add_meta_dataset_csv task ls in
+      (* Add actual logic symbol declaration. *)
+      let decl = Why3.Decl.create_param_decl ls in
+      Why3.Task.add_decl task decl)
+    interpreter_env.ITypes.op_of_ls task
+
+let interpret_task ~cwd ?(definitions = []) env task =
+  let known_map = Why3.Task.task_known task in
+  let interpreter_env = ITypes.interpreter_env ~cwd env in
+  let params =
+    {
+      IRE.compute_defs = true;
+      compute_builtin = true;
+      compute_def_set = Why3.Term.Sls.empty;
+      compute_max_quantifier_domain = Int.max_value;
+    }
+  in
+  let builtins = List.map ~f:(builtin_of_constant env known_map) definitions in
+  let engine =
+    IRE.create ~bounded_quant ~builtins params env known_map interpreter_env
+      Interpreter_theory.builtins
+  in
+  let g, f = (Why3.Task.task_goal task, Why3.Task.task_goal_fmla task) in
+  let f = IRE.normalize ~limit:Int.max_value engine Why3.Term.Mvs.empty f in
+  Logs.debug ~src:Logging.src_interpret_goal (fun m ->
+    m "Interpreted formula for goal '%a':@.%a@.%a" Why3.Pretty.print_pr g
+      Why3.Pretty.print_term f ITypes.pp_interpreter_op_hls
+      interpreter_env.op_of_ls);
+  let _, task = Why3.Task.task_separate_goal task in
+  let task = declare_language_lsymbols interpreter_env task in
+  let task = Why3.Task.(add_prop_decl task Pgoal g f) in
+  task
diff --git a/src/interpretation.mli b/src/interpretation/interpreter.mli
similarity index 94%
rename from src/interpretation.mli
rename to src/interpretation/interpreter.mli
index bb5c94f5c94a6838b5eb95709e9fb871cd62c651..5375df3cb68d1a1e1dc04668fe2d2c9305f0387b 100644
--- a/src/interpretation.mli
+++ b/src/interpretation/interpreter.mli
@@ -20,11 +20,9 @@
 (*                                                                        *)
 (**************************************************************************)
 
-open Why3
-
 val interpret_task :
   cwd:string ->
-  ?def_constants:(string * string) list ->
-  Env.env ->
-  Task.task ->
-  Task.task
+  ?definitions:(string * string) list ->
+  Why3.Env.env ->
+  Why3.Task.task ->
+  Why3.Task.task
diff --git a/src/reduction_engine.ml b/src/interpretation/interpreter_reduction_engine.ml
similarity index 99%
rename from src/reduction_engine.ml
rename to src/interpretation/interpreter_reduction_engine.ml
index 23965c77d9428b874a2e33a7ec4139c19c0e7019..6b9042886a8e502377b46d64c47f4b03a7b45a36 100644
--- a/src/reduction_engine.ml
+++ b/src/interpretation/interpreter_reduction_engine.ml
@@ -70,6 +70,11 @@ let user_env x = x.user_env
 let v_attr_copy orig v =
   match v with Int _ -> v | Real _ -> v | Term t -> Term (t_attr_copy orig t)
 
+let value_term t = Value (Term t)
+let value_int i = Value (Int i)
+let value_real r = Value (Real r)
+let eval_term t = Eval t
+
 let term_of_value v =
   let open Number in
   match v with
@@ -106,6 +111,10 @@ let real_of_value v =
   | Term { t_node = Tconst c } -> real_of_const c
   | Term _ -> raise NotNum
 
+let reconstruct_term () =
+  (* Force the engine to reconstruct the original term. *)
+  raise Stdlib.Not_found
+
 (* {2 Builtin symbols} *)
 
 (* all builtin functions *)
diff --git a/src/reduction_engine.mli b/src/interpretation/interpreter_reduction_engine.mli
similarity index 67%
rename from src/reduction_engine.mli
rename to src/interpretation/interpreter_reduction_engine.mli
index 68bbefaef429f50c78270c1fe3c2dc871082bbe1..644f50e51309bc37d19290f32297827e317d2287 100644
--- a/src/reduction_engine.mli
+++ b/src/interpretation/interpreter_reduction_engine.mli
@@ -71,19 +71,16 @@ open Why3
 type 'a engine
 (** abstract type for reduction engines *)
 
-val user_env : 'a engine -> 'a
-
 type params = {
   compute_defs : bool;
-  compute_builtin : bool;
+    (** When set to true, automatically compute symbols using known definitions.
+        Otherwise, only symbols in [compute_def_set] will be computed. *)
+  compute_builtin : bool;  (** When set to true, compute builtin functions. *)
   compute_def_set : Term.Sls.t;
   compute_max_quantifier_domain : int;
+    (** Maximum domain size for the reduction of bounded quantifications. *)
 }
-(** Configuration of the engine. . [compute_defs]: if set to true, automatically
-    compute symbols using known definitions. Otherwise, only symbols in
-    [compute_def_set] will be computed. . [compute_builtin]: if set to true,
-    compute builtin functions. . [compute_max_quantifier_domain]: maximum domain
-    size for the reduction of bounded quantifications *)
+(** Configuration of the engine. *)
 
 type value =
   | Term of Why3.Term.term (* invariant: is in normal form *)
@@ -113,6 +110,11 @@ type bounded_quant_result = {
 type 'a bounded_quant =
   'a engine -> Term.vsymbol -> cond:Term.term -> bounded_quant_result option
 
+val value_term : Term.term -> builtin_value
+val value_int : BigInt.t -> builtin_value
+val value_real : Number.real_value -> builtin_value
+val eval_term : Term.term -> builtin_value
+
 val create :
   ?bounded_quant:'a bounded_quant ->
   ?builtins:(Why3.Term.lsymbol * 'a builtin) list ->
@@ -122,17 +124,23 @@ val create :
   'a ->
   'a built_in_theories list ->
   'a engine
-(** [create env known_map] creates a reduction engine with . builtins theories
-    (int.Int, etc.) extracted from [env] . known declarations from [known_map] .
-    empty set of rewrite rules *)
+(** [create env known_map] creates a reduction engine with: builtins theories
+    (int.Int, etc.) extracted from [env], known declarations from [known_map],
+    and empty set of rewrite rules. *)
+
+val user_env : 'a engine -> 'a
+
+val reconstruct_term : unit -> 'a
+(** Force the engine to reconstruct the original term. *)
 
 exception NotARewriteRule of string
 
 val add_rule : Term.term -> 'a engine -> 'a engine
 (** [add_rule t e] turns [t] into a new rewrite rule and returns the new engine.
 
-    raise NotARewriteRule if [t] cannot be seen as a rewrite rule according to
-    the general rules given above. *)
+    @raise [NotARewriteRule]
+      if [t] cannot be seen as a rewrite rule according to the general rules
+      given above. *)
 
 val normalize :
   ?step_limit:int ->
@@ -144,20 +152,25 @@ val normalize :
 (** [normalize e sigma t] normalizes the term [t] with respect to the engine [e]
     with an initial variable environment [sigma].
 
-    parameter [limit] provides a maximum number of steps for execution. When
-    limit is reached, the partially reduced term is returned. parameter
-    [step_limit] provides a maximum number of steps on reductions that would
-    change the term even after reconstruction. *)
+    @param [limit]
+      provides a maximum number of steps for execution. When limit is reached,
+      the partially reduced term is returned.
+    @param [step_limit]
+      provides a maximum number of steps on reductions that would change the
+      term even after reconstruction. *)
 
-open Term
+exception NoMatch of (Term.term * Term.term * Term.term option) option
+(** [NoMatch (t1, t2, t3)]
 
-exception NoMatch of (term * term * term option) option
-(** [NoMatch (t1, t2, t3)] Cannot match [t1] with [t2]. If [t3] exists then [t1]
-    is already matched with [t3]. *)
+    Cannot match [t1] with [t2]. If [t3] exists then [t1] is already matched
+    with [t3]. *)
 
-exception NoMatchpat of (pattern * pattern) option
+exception NoMatchpat of (Term.pattern * Term.pattern) option
 
-type substitution = term Mvs.t
+type substitution = Term.term Term.Mvs.t
 
 val first_order_matching :
-  Svs.t -> term list -> term list -> Ty.ty Ty.Mtv.t * substitution
+  Term.Svs.t ->
+  Term.term list ->
+  Term.term list ->
+  Ty.ty Ty.Mtv.t * substitution
diff --git a/src/interpretation/interpreter_theory.ml b/src/interpretation/interpreter_theory.ml
new file mode 100644
index 0000000000000000000000000000000000000000..e94c88ff3e1c682fe5b8355610ae1e154790829f
--- /dev/null
+++ b/src/interpretation/interpreter_theory.ml
@@ -0,0 +1,435 @@
+(**************************************************************************)
+(*                                                                        *)
+(*  This file is part of CAISAR.                                          *)
+(*                                                                        *)
+(*  Copyright (C) 2023                                                    *)
+(*    CEA (Commissariat à l'énergie atomique et aux énergies              *)
+(*         alternatives)                                                  *)
+(*                                                                        *)
+(*  You can redistribute it and/or modify it under the terms of the GNU   *)
+(*  Lesser General Public License as published by the Free Software       *)
+(*  Foundation, version 2.1.                                              *)
+(*                                                                        *)
+(*  It is distributed in the hope that it will be useful,                 *)
+(*  but WITHOUT ANY WARRANTY; without even the implied warranty of        *)
+(*  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the          *)
+(*  GNU Lesser General Public License for more details.                   *)
+(*                                                                        *)
+(*  See the GNU Lesser General Public License version 2.1                 *)
+(*  for more details (enclosed in the file licenses/LGPLv2.1).            *)
+(*                                                                        *)
+(**************************************************************************)
+
+module IRE = Interpreter_reduction_engine
+module ITypes = Interpreter_types
+open Base
+
+(* TODO: Remove this and use common one. *)
+let const_real_of_float value =
+  let neg = Float.is_negative value in
+  let value = Fmt.str "%.64f" (Float.abs value) in
+  (* Split into integer and fractional parts. *)
+  let int_frac = String.split value ~on:'.' in
+  let int = List.hd_exn int_frac in
+  let frac =
+    match List.tl_exn int_frac with
+    | [] -> ""
+    | [ s ] -> s
+    | _ -> assert false (* Since every float has one '.' at most. *)
+  in
+  Why3.Constant.ConstReal
+    (Why3.Number.real_literal ~radix:10 ~neg ~int ~frac ~exp:None)
+
+let fail_on_unexpected_argument ls =
+  Logging.code_error ~src:Logging.src_interpret_goal (fun m ->
+    m "Unexpected argument(s) for '%a'" Why3.Pretty.print_ls ls)
+
+(* -------------------------------------------------------------------------- *)
+(* --- Vector Builtins                                                        *)
+(* -------------------------------------------------------------------------- *)
+
+module Vector = struct
+  let (get : _ IRE.builtin) =
+   fun engine ls vl ty ->
+    match vl with
+    | [
+     Term ({ t_node = Tapp (ls1, tl1); _ } as t1);
+     Term ({ t_node = Tconst (ConstInt i); _ } as _t2);
+    ] -> (
+      let i = Why3.Number.to_small_integer i in
+      match ITypes.op_of_ls engine ls1 with
+      | Dataset (_, DS_csv csv) ->
+        let row = List.nth_exn csv i in
+        let label, features =
+          match row with
+          | [] | [ _ ] -> assert false
+          | label :: features -> (label, features)
+        in
+        let ty_features =
+          match ty with
+          | Some { ty_node = Tyapp (_, [ _; ty ]); _ } -> Some ty
+          | _ -> assert false
+        in
+        let t_features, t_label =
+          ( ITypes.term_of_op engine (Data (D_csv features)) ty_features,
+            Why3.(Term.t_int_const (BigInt.of_int (Int.of_string label))) )
+        in
+        IRE.value_term (Why3.Term.t_tuple [ t_label; t_features ])
+      | Vector v ->
+        let n = Option.value_exn (Language.lookup_vector v) in
+        if List.length tl1 <> n
+        then
+          Logging.code_error ~src:Logging.src_interpret_goal (fun m ->
+            m
+              "Mismatch between (container) vector length and number of \
+               (contained) input variables.");
+        if i < n
+        then IRE.value_term (List.nth_exn tl1 i)
+        else
+          Logging.user_error ?loc:t1.t_loc (fun m ->
+            m "Index constant %d is out-of-bounds [0,%d]" i (n - 1))
+      | Data _ | Model _ -> assert false)
+    | [ Term _t1; Term _t2 ] -> IRE.reconstruct_term ()
+    | _ -> fail_on_unexpected_argument ls
+
+  let length : _ IRE.builtin =
+   fun engine ls vl _ty ->
+    match vl with
+    | [ Term { t_node = Tapp (ls, []); _ } ] -> (
+      match ITypes.op_of_ls engine ls with
+      | Dataset (_, DS_csv csv) ->
+        IRE.value_int (Why3.BigInt.of_int (Csv.lines csv))
+      | Data (D_csv data) ->
+        IRE.value_int (Why3.BigInt.of_int (List.length data))
+      | Vector _ | Model _ -> assert false)
+    | [ Term { t_node = Tapp (ls, tl); _ } ] -> (
+      match ITypes.op_of_ls engine ls with
+      | Vector v ->
+        let n = Option.value_exn (Language.lookup_vector v) in
+        assert (List.length tl = n);
+        IRE.value_int (Why3.BigInt.of_int n)
+      | Dataset _ | Data _ | Model _ -> assert false)
+    | [ Term _t ] -> IRE.reconstruct_term ()
+    | _ -> fail_on_unexpected_argument ls
+
+  let sub : _ IRE.builtin =
+   fun engine ls vl ty ->
+    match vl with
+    | [
+     Term ({ t_node = Tapp (ls1, tl1); _ } as t1);
+     Term ({ t_node = Tapp (ls2, _); _ } as _t2);
+    ] -> (
+      match ITypes.(op_of_ls engine ls1, op_of_ls engine ls2) with
+      | Vector v, Data (D_csv data) ->
+        let n = Option.value_exn (Language.lookup_vector v) in
+        assert (n = List.length data);
+        let ty_cst =
+          match ty with
+          | Some { ty_node = Tyapp (_, [ ty ]); _ } -> ty
+          | _ -> assert false
+        in
+        let csts =
+          List.map data ~f:(fun d ->
+            let cst = const_real_of_float (Float.of_string d) in
+            Why3.Term.t_const cst ty_cst)
+        in
+        let { ITypes.env; _ } = IRE.user_env engine in
+        let args =
+          let minus =
+            (* TODO: generalize wrt the type of constants [csts]. *)
+            let th = Symbols.Float64.create env in
+            th.infix.minus
+          in
+          List.map2_exn tl1 csts ~f:(fun tl c ->
+            (Why3.Term.t_app_infer minus [ tl; c ], ty_cst))
+        in
+        let op = ITypes.Vector (Language.create_vector env n) in
+        IRE.value_term (ITypes.term_of_op ~args engine op ty)
+      | Vector _, Vector _ ->
+        Logging.user_error ?loc:t1.t_loc (fun m ->
+          m "Invalid substraction operation involving two abstract vectors")
+      | _ ->
+        Logging.user_error ?loc:t1.t_loc (fun m ->
+          m "Invalid subtraction operation"))
+    | [ Term _t1; Term _t2 ] -> IRE.reconstruct_term ()
+    | _ -> fail_on_unexpected_argument ls
+
+  let add : _ IRE.builtin =
+   fun engine ls vl ty ->
+    match vl with
+    | [
+     Term ({ t_node = Tapp (ls1, tl1); _ } as t1);
+     Term ({ t_node = Tapp (ls2, _); _ } as _t2);
+    ] -> (
+      match ITypes.(op_of_ls engine ls1, op_of_ls engine ls2) with
+      | Data (D_csv data), Vector v | Vector v, Data (D_csv data) ->
+        let n = Option.value_exn (Language.lookup_vector v) in
+        assert (n = List.length data);
+        let ty_cst =
+          match ty with
+          | Some { ty_node = Tyapp (_, [ ty ]); _ } -> ty
+          | _ -> assert false
+        in
+        let csts =
+          List.map data ~f:(fun d ->
+            let cst = const_real_of_float (Float.of_string d) in
+            Why3.Term.t_const cst ty_cst)
+        in
+        let { ITypes.env; _ } = IRE.user_env engine in
+        let args =
+          let plus =
+            (* TODO: generalize wrt the type of constants [csts]. *)
+            let th = Symbols.Float64.create env in
+            th.infix.plus
+          in
+          List.map2_exn tl1 csts ~f:(fun tl c ->
+            (Why3.Term.t_app_infer plus [ tl; c ], ty_cst))
+        in
+        let op = ITypes.Vector (Language.create_vector env n) in
+        IRE.value_term (ITypes.term_of_op ~args engine op ty)
+      | Vector _, Vector _ ->
+        Logging.user_error ?loc:t1.t_loc (fun m ->
+          m "%a: Invalid addition operation involving two abstract vectors"
+            (Fmt.option Why3.Loc.pp_position)
+            t1.t_loc)
+      | _ ->
+        Logging.user_error ?loc:t1.t_loc (fun m ->
+          m "Invalid addition operation involving two non-vectors"))
+    | [ Term _t1; Term _t2 ] -> IRE.reconstruct_term ()
+    | _ -> fail_on_unexpected_argument ls
+
+  let mapi : _ IRE.builtin =
+   fun engine ls vl ty ->
+    match vl with
+    | [
+     Term ({ t_node = Tapp (ls1, tl1); _ } as _t1);
+     Term ({ t_node = Teps _tb; _ } as t2);
+    ] -> (
+      assert (Why3.Term.t_is_lambda t2);
+      match ITypes.op_of_ls engine ls1 with
+      | Vector v ->
+        let n = Option.value_exn (Language.lookup_vector v) in
+        assert (List.length tl1 = n);
+        let args =
+          List.mapi tl1 ~f:(fun idx t ->
+            let idx = Why3.Term.t_int_const (Why3.BigInt.of_int idx) in
+            (Why3.Term.t_func_app_beta_l t2 [ idx; t ], Option.value_exn t.t_ty))
+        in
+        let op =
+          let { ITypes.env; _ } = IRE.user_env engine in
+          ITypes.Vector (Language.create_vector env n)
+        in
+        IRE.eval_term (ITypes.term_of_op ~args engine op ty)
+      | Dataset (_, DS_csv csv) ->
+        IRE.value_int (Why3.BigInt.of_int (Csv.lines csv))
+      | Data _ | Model _ -> assert false)
+    | [ Term _t1; Term _t2 ] -> IRE.reconstruct_term ()
+    | _ -> fail_on_unexpected_argument ls
+
+  let builtins : _ IRE.built_in_theories =
+    ( [ "caisar"; "types" ],
+      "Vector",
+      [],
+      [
+        ([ Why3.Ident.op_get "" ] (* ([]) *), None, get);
+        ([ Why3.Ident.op_infix "-" ], None, sub);
+        ([ Why3.Ident.op_infix "+" ], None, add);
+        ([ "length" ], None, length);
+        ([ "mapi" ], None, mapi);
+      ] )
+end
+
+(* -------------------------------------------------------------------------- *)
+(* --- Model NN Builtins                                                      *)
+(* -------------------------------------------------------------------------- *)
+
+module NN = struct
+  let read_model : _ IRE.builtin =
+   fun engine ls vl ty ->
+    match vl with
+    | [ Term { t_node = Tconst (ConstStr neural_network); t_loc; _ } ] ->
+      let { ITypes.env; cwd; _ } = IRE.user_env engine in
+      let op =
+        let nn_ext = Stdlib.Filename.extension neural_network in
+        let nn_format, nn_type =
+          match String.lowercase nn_ext with
+          | ".nnet" -> (`NNet, ITypes.NNet)
+          | ".onnx" -> (`ONNX, ITypes.ONNX)
+          | _ ->
+            Logging.user_error ?loc:t_loc (fun m ->
+              m "Unrecognized neural network format '%s'" nn_ext)
+        in
+        let nn_filename = Stdlib.Filename.concat cwd neural_network in
+        let nn = Language.create_nn env nn_format nn_filename in
+        ITypes.Model (NN (nn, nn_type))
+      in
+      IRE.value_term (ITypes.term_of_op engine op ty)
+    | [ Term _t1; Term _t2 ] -> IRE.reconstruct_term ()
+    | _ -> fail_on_unexpected_argument ls
+
+  let apply : _ IRE.builtin =
+   fun _engine ls vl _ty ->
+    match vl with
+    | [ Term _t1; Term _t2 ] -> IRE.reconstruct_term ()
+    | _ -> fail_on_unexpected_argument ls
+
+  let builtins : _ IRE.built_in_theories =
+    ( [ "caisar"; "model" ],
+      "NN",
+      [],
+      [
+        ([ "read_model" ], None, read_model);
+        ([ Why3.Ident.op_infix "@@" ], None, apply);
+      ] )
+end
+
+(* -------------------------------------------------------------------------- *)
+(* --- Model SVM Builtins                                                     *)
+(* -------------------------------------------------------------------------- *)
+
+module SVM = struct
+  let read_model : _ IRE.builtin =
+   fun engine ls vl ty ->
+    match vl with
+    | [ Term { t_node = Tconst (ConstStr svm); t_loc; _ } ] ->
+      let { ITypes.env; cwd; _ } = IRE.user_env engine in
+      let op =
+        let svm_ext = Stdlib.Filename.extension svm in
+        let svm_filename = Stdlib.Filename.concat cwd svm in
+        let svm =
+          match String.lowercase svm_ext with
+          | ".ovo" -> Language.create_svm env svm_filename
+          | _ ->
+            Logging.user_error ?loc:t_loc (fun m ->
+              m "Unrecognized SVM format '%s'" svm_ext)
+        in
+        ITypes.Model (SVM svm)
+      in
+      IRE.value_term (ITypes.term_of_op engine op ty)
+    | [ Term _t1; Term _t2 ] -> IRE.reconstruct_term ()
+    | _ -> fail_on_unexpected_argument ls
+
+  let read_model_with_abstraction : _ IRE.builtin =
+   fun engine ls vl ty ->
+    match vl with
+    | [
+     Term ({ t_node = Tconst (ConstStr svm); _ } as t1);
+     Term ({ t_node = Tapp ({ ls_name = { id_string; _ }; _ }, []); _ } as t2);
+    ] ->
+      let { ITypes.env; cwd; _ } = IRE.user_env engine in
+      let op =
+        let svm_ext = Stdlib.Filename.extension svm in
+        let svm_filename = Stdlib.Filename.concat cwd svm in
+        let abstraction =
+          match id_string with
+          | "Interval" -> Language.Interval
+          | "Raf" -> Language.Raf
+          | "Hybrid" -> Language.Hybrid
+          | _ ->
+            Logging.user_error ?loc:t2.t_loc (fun m ->
+              m "Unrecognized SVM abstraction '%s'" id_string)
+        in
+        let svm =
+          match String.lowercase svm_ext with
+          | ".ovo" -> Language.create_svm env ~abstraction svm_filename
+          | _ ->
+            Logging.user_error ?loc:t1.t_loc (fun m ->
+              m "Unrecognized SVM format '%s'" svm_ext)
+        in
+        ITypes.Model (SVM svm)
+      in
+      IRE.value_term (ITypes.term_of_op engine op ty)
+    | [ Term _t1; Term _t2 ] -> IRE.reconstruct_term ()
+    | _ -> fail_on_unexpected_argument ls
+
+  let apply : _ IRE.builtin =
+   fun _engine ls vl _ty ->
+    match vl with
+    | [ Term _t1; Term _t2 ] -> IRE.reconstruct_term ()
+    | _ -> fail_on_unexpected_argument ls
+
+  let builtins : _ IRE.built_in_theories =
+    ( [ "caisar"; "model" ],
+      "SVM",
+      [],
+      [
+        ([ "read_model" ], None, read_model);
+        ([ "read_model_with_abstraction" ], None, read_model_with_abstraction);
+        ([ Why3.Ident.op_infix "@@" ], None, apply);
+      ] )
+end
+
+(* -------------------------------------------------------------------------- *)
+(* --- CSV Dataset Builtins                                                   *)
+(* -------------------------------------------------------------------------- *)
+
+module CSV = struct
+  let read_dataset : _ IRE.builtin =
+   fun engine ls vl ty ->
+    match vl with
+    | [ Term { t_node = Tconst (ConstStr dataset); _ } ] ->
+      let { ITypes.env; cwd; _ } = IRE.user_env engine in
+      let op =
+        let filename = Stdlib.Filename.concat cwd dataset in
+        let ds = Language.create_dataset_csv env filename in
+        let dataset = ITypes.DS_csv (Csv.load filename) in
+        ITypes.Dataset (ds, dataset)
+      in
+      IRE.value_term (ITypes.term_of_op engine op ty)
+    | [ Term _t1 ] -> IRE.reconstruct_term ()
+    | _ -> fail_on_unexpected_argument ls
+
+  let infer_min_label : _ IRE.builtin =
+   fun engine ls vl _ty ->
+    match vl with
+    | [ Term ({ t_node = Tapp (ls1, _); _ } as _t1) ] -> (
+      match ITypes.op_of_ls engine ls1 with
+      | Dataset (_, DS_csv csv) ->
+        (* TODO: test for non-empty csv. *)
+        let min_label =
+          List.fold_left csv ~init:None ~f:(fun min_label elt ->
+            let label = Int.of_string (List.hd_exn elt) in
+            Some
+              (match min_label with
+              | None -> label
+              | Some min_label -> min min_label label))
+        in
+        let min_label = Option.value_exn min_label in
+        IRE.value_int (Why3.BigInt.of_int min_label)
+      | _ -> assert false)
+    | [ Term _t1 ] -> IRE.reconstruct_term ()
+    | _ -> fail_on_unexpected_argument ls
+
+  let infer_max_label : _ IRE.builtin =
+   fun engine ls vl _ty ->
+    match vl with
+    | [ Term ({ t_node = Tapp (ls1, _); _ } as _t1) ] -> (
+      match ITypes.op_of_ls engine ls1 with
+      | Dataset (_, DS_csv csv) ->
+        (* TODO: test for non-empty csv. *)
+        let max_label =
+          List.fold_left csv ~init:None ~f:(fun max_label elt ->
+            let label = Int.of_string (List.hd_exn elt) in
+            Some
+              (match max_label with
+              | None -> label
+              | Some max_label -> max max_label label))
+        in
+        let max_label = Option.value_exn max_label in
+        IRE.value_int (Why3.BigInt.of_int max_label)
+      | _ -> assert false)
+    | [ Term _t1 ] -> IRE.reconstruct_term ()
+    | _ -> fail_on_unexpected_argument ls
+
+  let builtins : _ IRE.built_in_theories =
+    ( [ "caisar"; "dataset" ],
+      "CSV",
+      [],
+      [
+        ([ "read_dataset" ], None, read_dataset);
+        ([ "infer_min_label" ], None, infer_min_label);
+        ([ "infer_max_label" ], None, infer_max_label);
+      ] )
+end
+
+let builtins = [ Vector.builtins; NN.builtins; SVM.builtins; CSV.builtins ]
diff --git a/src/transformations/simplify_rel.mli b/src/interpretation/interpreter_theory.mli
similarity index 91%
rename from src/transformations/simplify_rel.mli
rename to src/interpretation/interpreter_theory.mli
index 6521123f56ad5e99247176780076181183ce3f7d..a5fd1ab4884837f1b14fe6188e6d9ae92845085a 100644
--- a/src/transformations/simplify_rel.mli
+++ b/src/interpretation/interpreter_theory.mli
@@ -20,5 +20,7 @@
 (*                                                                        *)
 (**************************************************************************)
 
-val init : unit -> unit
-(** Register the transformation. *)
+module IRE = Interpreter_reduction_engine
+module ITypes = Interpreter_types
+
+val builtins : ITypes.interpreter_env IRE.built_in_theories list
diff --git a/src/interpretation/interpreter_types.ml b/src/interpretation/interpreter_types.ml
new file mode 100644
index 0000000000000000000000000000000000000000..d029899b365c3f351383316d4a43569d73ce2e41
--- /dev/null
+++ b/src/interpretation/interpreter_types.ml
@@ -0,0 +1,112 @@
+(**************************************************************************)
+(*                                                                        *)
+(*  This file is part of CAISAR.                                          *)
+(*                                                                        *)
+(*  Copyright (C) 2023                                                    *)
+(*    CEA (Commissariat à l'énergie atomique et aux énergies              *)
+(*         alternatives)                                                  *)
+(*                                                                        *)
+(*  You can redistribute it and/or modify it under the terms of the GNU   *)
+(*  Lesser General Public License as published by the Free Software       *)
+(*  Foundation, version 2.1.                                              *)
+(*                                                                        *)
+(*  It is distributed in the hope that it will be useful,                 *)
+(*  but WITHOUT ANY WARRANTY; without even the implied warranty of        *)
+(*  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the          *)
+(*  GNU Lesser General Public License for more details.                   *)
+(*                                                                        *)
+(*  See the GNU Lesser General Public License version 2.1                 *)
+(*  for more details (enclosed in the file licenses/LGPLv2.1).            *)
+(*                                                                        *)
+(**************************************************************************)
+
+module IRE = Interpreter_reduction_engine
+open Base
+
+type nn =
+  | NNet
+  | ONNX
+[@@deriving show]
+
+type model =
+  | NN of Why3.Term.lsymbol * nn
+      [@printer
+        fun fmt (ls, nn) ->
+          Fmt.pf fmt "(%a, %a)" pp_nn nn
+            Fmt.(option Language.pp_nn)
+            (Language.lookup_nn ls)]
+  | SVM of Why3.Term.lsymbol
+      [@printer
+        fun fmt ls ->
+          Fmt.pf fmt "%a" Fmt.(option Language.pp_svm) (Language.lookup_svm ls)]
+[@@deriving show]
+
+type dataset = DS_csv of Csv.t [@printer fun fmt _ -> Fmt.pf fmt "<csv>"]
+[@@deriving show]
+
+type data = D_csv of string list [@@deriving show]
+
+type interpreter_op =
+  | Model of model
+  | Dataset of Why3.Term.lsymbol * dataset
+      [@printer
+        fun fmt (ls, ds) ->
+          Fmt.pf fmt "(%a, %a)"
+            Fmt.(option Language.pp_dataset)
+            (Language.lookup_dataset_csv ls)
+            pp_dataset ds]
+  | Data of data
+  | Vector of Why3.Term.lsymbol
+      [@printer
+        fun fmt v -> Fmt.pf fmt "%a" Fmt.(option int) (Language.lookup_vector v)]
+[@@deriving show]
+
+type interpreter_op_hls =
+  (interpreter_op Why3.Term.Hls.t
+  [@printer
+    fun fmt hls ->
+      Why3.(
+        Pp.print_iter2 Term.Hls.iter Pp.newline Pp.comma Pretty.print_ls
+          pp_interpreter_op fmt hls)])
+[@@deriving show]
+
+type ls_htbl_interpreter_op = (interpreter_op, Why3.Term.lsymbol) Base.Hashtbl.t
+
+type interpreter_env = {
+  op_of_ls : interpreter_op_hls;
+  ls_of_op : ls_htbl_interpreter_op;
+  env : Why3.Env.env;
+  cwd : string;
+}
+
+let ls_of_op engine interpreter_op ty_args ty =
+  let interpreter_env = IRE.user_env engine in
+  Hashtbl.find_or_add interpreter_env.ls_of_op interpreter_op
+    ~default:(fun () ->
+    let id = Why3.Ident.id_fresh "interpreter_op" in
+    let ls =
+      match interpreter_op with
+      | Model (NN (m, _) | SVM m) -> m
+      | Vector v -> v
+      | Dataset (d, _) -> d
+      | _ -> Why3.Term.create_lsymbol id ty_args ty
+    in
+    Hashtbl.Poly.add_exn interpreter_env.ls_of_op ~key:interpreter_op ~data:ls;
+    Why3.Term.Hls.add interpreter_env.op_of_ls ls interpreter_op;
+    ls)
+
+let op_of_ls engine ls =
+  let interpreter_env = IRE.user_env engine in
+  Why3.Term.Hls.find interpreter_env.op_of_ls ls
+
+let term_of_op ?(args = []) engine interpreter_op ty =
+  let t_args, ty_args = List.unzip args in
+  Why3.Term.t_app_infer (ls_of_op engine interpreter_op ty_args ty) t_args
+
+let interpreter_env ~cwd env =
+  {
+    ls_of_op = Hashtbl.Poly.create ();
+    op_of_ls = Why3.Term.Hls.create 10;
+    env;
+    cwd;
+  }
diff --git a/src/interpretation/interpreter_types.mli b/src/interpretation/interpreter_types.mli
new file mode 100644
index 0000000000000000000000000000000000000000..073cb45b964f31e2efad64b07caff8f277d6e414
--- /dev/null
+++ b/src/interpretation/interpreter_types.mli
@@ -0,0 +1,64 @@
+(**************************************************************************)
+(*                                                                        *)
+(*  This file is part of CAISAR.                                          *)
+(*                                                                        *)
+(*  Copyright (C) 2023                                                    *)
+(*    CEA (Commissariat à l'énergie atomique et aux énergies              *)
+(*         alternatives)                                                  *)
+(*                                                                        *)
+(*  You can redistribute it and/or modify it under the terms of the GNU   *)
+(*  Lesser General Public License as published by the Free Software       *)
+(*  Foundation, version 2.1.                                              *)
+(*                                                                        *)
+(*  It is distributed in the hope that it will be useful,                 *)
+(*  but WITHOUT ANY WARRANTY; without even the implied warranty of        *)
+(*  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the          *)
+(*  GNU Lesser General Public License for more details.                   *)
+(*                                                                        *)
+(*  See the GNU Lesser General Public License version 2.1                 *)
+(*  for more details (enclosed in the file licenses/LGPLv2.1).            *)
+(*                                                                        *)
+(**************************************************************************)
+
+module IRE = Interpreter_reduction_engine
+
+type nn =
+  | NNet
+  | ONNX
+[@@deriving show]
+
+type model =
+  | NN of Why3.Term.lsymbol * nn
+  | SVM of Why3.Term.lsymbol
+[@@deriving show]
+
+type dataset = DS_csv of Csv.t [@@deriving show]
+type data = D_csv of string list [@@deriving show]
+
+type interpreter_op =
+  | Model of model
+  | Dataset of Why3.Term.lsymbol * dataset
+  | Data of data
+  | Vector of Why3.Term.lsymbol
+[@@deriving show]
+
+type interpreter_op_hls = interpreter_op Why3.Term.Hls.t [@@deriving show]
+type ls_htbl_interpreter_op = (interpreter_op, Why3.Term.lsymbol) Base.Hashtbl.t
+
+type interpreter_env = private {
+  op_of_ls : interpreter_op_hls;
+  ls_of_op : ls_htbl_interpreter_op;
+  env : Why3.Env.env;
+  cwd : string;
+}
+
+val op_of_ls : interpreter_env IRE.engine -> Why3.Term.lsymbol -> interpreter_op
+
+val term_of_op :
+  ?args:(Why3.Term.term * Why3.Ty.ty) list ->
+  interpreter_env IRE.engine ->
+  interpreter_op ->
+  Why3.Ty.ty option ->
+  Why3.Term.term
+
+val interpreter_env : cwd:string -> Why3.Env.env -> interpreter_env
diff --git a/src/language.ml b/src/language.ml
index fc9d97b080eafce7800372684f0d0d104fe83f51..802d2696233512e78ce8861aee2adc83a734db65 100644
--- a/src/language.ml
+++ b/src/language.ml
@@ -160,13 +160,15 @@ let register_ovo_support () =
   Env.register_format ~desc:"OVO format" Pmodule.mlw_language "OVO" [ "ovo" ]
     (fun env _ filename _ -> ovo_parser env filename)
 
-(* -- Vectors *)
+(* -------------------------------------------------------------------------- *)
+(* --- Vectors                                                                *)
+(* -------------------------------------------------------------------------- *)
 
 let vectors = Term.Hls.create 10
 
 let ty_float64_t env =
-  let th = Env.read_theory env [ "ieee_float" ] "Float64" in
-  Ty.ty_app (Theory.ns_find_ts th.th_export [ "t" ]) []
+  let th = Symbols.Float64.create env in
+  th.ty
 
 let ty_vector env ty_elt =
   let th = Env.read_theory env [ "caisar"; "types" ] "Vector" in
@@ -188,17 +190,24 @@ let create_vector =
 let lookup_vector = Term.Hls.find_opt vectors
 let mem_vector = Term.Hls.mem vectors
 
-(* -- Neural Networks *)
+(* -------------------------------------------------------------------------- *)
+(* --- Neural Networks                                                        *)
+(* -------------------------------------------------------------------------- *)
 
 type nn = {
   nn_nb_inputs : int;
   nn_nb_outputs : int;
   nn_ty_elt : Ty.ty; [@printer fun fmt ty -> Fmt.pf fmt "%a" Pretty.print_ty ty]
   nn_filename : string;
-  nn_nier : Onnx.G.t option; [@opaque]
+  nn_format : nn_format;
 }
 [@@deriving show]
 
+and nn_format =
+  | NNet
+  | ONNX of Onnx.G.t option [@printer fun fmt _ -> Fmt.pf fmt "<nier>"]
+[@@deriving show]
+
 let nets = Term.Hls.create 10
 
 let fresh_nn_ls env name =
@@ -213,63 +222,146 @@ let fresh_nn_ls env name =
   let id = Ident.id_fresh name in
   Term.create_fsymbol id [] ty_model
 
-let create_nn_nnet =
+let create_nn_nnet env filename =
+  let model = Nnet.parse ~permissive:true filename in
+  match model with
+  | Error s -> Loc.errorm "%s" s
+  | Ok { n_inputs; n_outputs; _ } ->
+    {
+      nn_nb_inputs = n_inputs;
+      nn_nb_outputs = n_outputs;
+      nn_ty_elt = ty_float64_t env;
+      nn_filename = filename;
+      nn_format = NNet;
+    }
+
+let create_nn_onnx env filename =
+  let model = Onnx.parse filename in
+  match model with
+  | Error s -> Loc.errorm "%s" s
+  | Ok { n_inputs; n_outputs; nier } ->
+    let nier =
+      match nier with
+      | Error msg ->
+        Logs.warn (fun m ->
+          m "Cannot build network intermediate representation:@ %s" msg);
+        None
+      | Ok nier -> Some nier
+    in
+    {
+      nn_nb_inputs = n_inputs;
+      nn_nb_outputs = n_outputs;
+      nn_ty_elt = ty_float64_t env;
+      nn_filename = filename;
+      nn_format = ONNX nier;
+    }
+
+let create_nn =
   Env.Wenv.memoize 13 (fun env ->
-    let h = Hashtbl.create (module String) in
-    let ty_elt = ty_float64_t env in
-    Hashtbl.findi_or_add h ~default:(fun filename ->
-      let ls = fresh_nn_ls env "nnet_nn" in
-      let nn =
-        let model = Nnet.parse ~permissive:true filename in
-        match model with
-        | Error s -> Loc.errorm "%s" s
-        | Ok { n_inputs; n_outputs; _ } ->
-          {
-            nn_nb_inputs = n_inputs;
-            nn_nb_outputs = n_outputs;
-            nn_ty_elt = ty_elt;
-            nn_filename = filename;
-            nn_nier = None;
-          }
+    let h = Hashtbl.Poly.create () in
+    Hashtbl.Poly.findi_or_add h ~default:(fun format ->
+      let format_nn, create_nn =
+        match format with
+        | `NNet -> ("nnet", create_nn_nnet)
+        | `ONNX -> ("onnx", create_nn_onnx)
       in
-      Term.Hls.add nets ls nn;
-      ls))
+      let name_nn = "nn_" ^ format_nn in
+      let h = Hashtbl.create (module String) in
+      Hashtbl.findi_or_add h ~default:(fun filename ->
+        let ls = fresh_nn_ls env name_nn in
+        let nn = create_nn env filename in
+        Term.Hls.add nets ls nn;
+        ls)))
 
-let create_nn_onnx =
-  Env.Wenv.memoize 13 (fun env ->
+let lookup_nn = Term.Hls.find_opt nets
+let mem_nn = Term.Hls.mem nets
+let iter_nn f = Term.Hls.iter f nets
+
+let add_meta_nn task ls =
+  match lookup_nn ls with
+  | None -> task
+  | Some { nn_filename; _ } ->
+    Task.add_meta task Meta.meta_nn_filename [ MAstr nn_filename ]
+
+(* -------------------------------------------------------------------------- *)
+(* --- Support Vector Machines (SVM)                                          *)
+(* -------------------------------------------------------------------------- *)
+
+type svm = {
+  svm_nb_inputs : int;
+  svm_nb_outputs : int;
+  svm_abstraction : svm_abstraction;
+  svm_filename : string;
+}
+[@@deriving show]
+
+and svm_abstraction =
+  | Interval
+  | Raf
+  | Hybrid
+[@@deriving show]
+
+let string_of_svm_abstraction = function
+  | Interval -> "interval"
+  | Raf -> "raf"
+  | Hybrid -> "hybrid"
+
+let svm_abstraction_of_string s =
+  match String.lowercase s with
+  | "interval" -> Some Interval
+  | "raf" -> Some Raf
+  | "hybrid" -> Some Hybrid
+  | _ -> None
+
+let svms = Term.Hls.create 10
+
+let fresh_svm_ls env name =
+  let ty_kind =
+    let th = Env.read_theory env [ "caisar"; "model" ] "SVM" in
+    Ty.ty_app (Theory.ns_find_ts th.th_export [ "svm" ]) []
+  in
+  let ty_model =
+    let th = Env.read_theory env [ "caisar"; "model" ] "Model" in
+    Ty.ty_app (Theory.ns_find_ts th.th_export [ "model" ]) [ ty_kind ]
+  in
+  let id = Ident.id_fresh name in
+  Term.create_fsymbol id [] ty_model
+
+let create_svm =
+  Env.Wenv.memoize 13 (fun env ?(abstraction = Hybrid) ->
     let h = Hashtbl.create (module String) in
-    let ty_elt = ty_float64_t env in
     Hashtbl.findi_or_add h ~default:(fun filename ->
-      let ls = fresh_nn_ls env "onnx_nn" in
-      let onnx =
-        let model = Onnx.parse filename in
+      let name = "svm_ovo_" ^ string_of_svm_abstraction abstraction in
+      let ls = fresh_svm_ls env name in
+      let svm =
+        let model = Ovo.parse filename in
         match model with
         | Error s -> Loc.errorm "%s" s
-        | Ok { n_inputs; n_outputs; nier } ->
-          let nier =
-            match nier with
-            | Error msg ->
-              Logs.warn (fun m ->
-                m "Cannot build network intermediate representation:@ %s" msg);
-              None
-            | Ok nier -> Some nier
-          in
+        | Ok { n_inputs; n_outputs } ->
           {
-            nn_nb_inputs = n_inputs;
-            nn_nb_outputs = n_outputs;
-            nn_ty_elt = ty_elt;
-            nn_filename = filename;
-            nn_nier = nier;
+            svm_nb_inputs = n_inputs;
+            svm_nb_outputs = n_outputs;
+            svm_abstraction = abstraction;
+            svm_filename = filename;
           }
       in
-      Term.Hls.add nets ls onnx;
+      Term.Hls.add svms ls svm;
       ls))
 
-let lookup_nn = Term.Hls.find_opt nets
-let mem_nn = Term.Hls.mem nets
-let iter_nn f = Term.Hls.iter f nets
+let lookup_svm = Term.Hls.find_opt svms
+let mem_svm = Term.Hls.mem svms
+let iter_svm f = Term.Hls.iter f svms
 
-(* -- Datasets *)
+let add_meta_svm task ls =
+  match lookup_svm ls with
+  | None -> task
+  | Some { svm_filename; svm_abstraction; _ } ->
+    Task.add_meta task Meta.meta_svm_filename
+      [ MAstr svm_filename; MAstr (string_of_svm_abstraction svm_abstraction) ]
+
+(* -------------------------------------------------------------------------- *)
+(* --- Datasets                                                               *)
+(* -------------------------------------------------------------------------- *)
 
 type dataset = CSV of string [@@deriving show]
 
@@ -294,3 +386,9 @@ let create_dataset_csv =
 
 let lookup_dataset_csv = Term.Hls.find_opt datasets
 let mem_dataset_csv = Term.Hls.mem datasets
+
+let add_meta_dataset_csv task ls =
+  match lookup_dataset_csv ls with
+  | None -> task
+  | Some (CSV filename) ->
+    Task.add_meta task Meta.meta_dataset_filename [ MAstr filename ]
diff --git a/src/language.mli b/src/language.mli
index fbd165cf3e8fb1c293bf0f22131ceebe5a61e07b..999d338a768d2c5901890bf9d17713757abe68e3 100644
--- a/src/language.mli
+++ b/src/language.mli
@@ -76,15 +76,47 @@ type nn = private {
   nn_nb_outputs : int;
   nn_ty_elt : Ty.ty;
   nn_filename : string;
-  nn_nier : Onnx.G.t option;
+  nn_format : nn_format;
 }
 [@@deriving show]
 
-val create_nn_nnet : Env.env -> string -> Term.lsymbol
-val create_nn_onnx : Env.env -> string -> Term.lsymbol
+and nn_format =
+  | NNet
+  | ONNX of Onnx.G.t option
+[@@deriving show]
+
+val create_nn : Env.env -> [ `NNet | `ONNX ] -> string -> Term.lsymbol
 val lookup_nn : Term.lsymbol -> nn option
 val mem_nn : Term.lsymbol -> bool
 val iter_nn : (Term.lsymbol -> nn -> unit) -> unit
+val add_meta_nn : Task.task -> Term.lsymbol -> Task.task
+
+(** {2 Support Vector Machines (SVM)} *)
+
+type svm = private {
+  svm_nb_inputs : int;
+  svm_nb_outputs : int;
+  svm_abstraction : svm_abstraction;
+  svm_filename : string;
+}
+[@@deriving show]
+
+and svm_abstraction =
+  | Interval
+  | Raf
+  | Hybrid
+[@@deriving show]
+
+val string_of_svm_abstraction : svm_abstraction -> string
+val svm_abstraction_of_string : string -> svm_abstraction option
+
+val create_svm :
+  Env.env -> ?abstraction:svm_abstraction -> string -> Term.lsymbol
+
+val lookup_svm : Term.lsymbol -> svm option
+val mem_svm : Term.lsymbol -> bool
+val iter_svm : (Term.lsymbol -> svm -> unit) -> unit
+val add_meta_svm : Task.task -> Term.lsymbol -> Task.task
 
 (** {2 Datasets} *)
 
@@ -93,3 +125,4 @@ type dataset = private CSV of string [@@deriving show]
 val create_dataset_csv : Env.env -> string -> Term.lsymbol
 val lookup_dataset_csv : Term.lsymbol -> dataset option
 val mem_dataset_csv : Term.lsymbol -> bool
+val add_meta_dataset_csv : Task.task -> Term.lsymbol -> Task.task
diff --git a/src/main.ml b/src/main.ml
index a48d20e752292dfcb163c308e7a3ed3f192b8207..d9b804129873fa285e5bd4ff8bc9ce7626a274b5 100644
--- a/src/main.ml
+++ b/src/main.ml
@@ -26,15 +26,17 @@ open Cmdliner
 let caisar = "caisar"
 
 let () =
-  Simplify_rel.init ();
-  Vars_on_lhs.init ()
+  Relop.register_simplify_relop ();
+  Relop.register_vars_on_lhs_of_relop ()
 
 let () =
   Pyrat.init ();
   Marabou.init ();
   Vnnlib.init ()
 
-(* -- Logs *)
+(* -------------------------------------------------------------------------- *)
+(* --- Logs                                                                   *)
+(* -------------------------------------------------------------------------- *)
 
 let log_tags =
   let all_tags = Logging.all_srcs () in
@@ -58,7 +60,9 @@ let setup_logs =
     const Logging.setup $ Fmt_cli.style_renderer () $ Logs_cli.level ()
     $ log_tags)
 
-(* -- Commands *)
+(* -------------------------------------------------------------------------- *)
+(* --- Commands                                                               *)
+(* -------------------------------------------------------------------------- *)
 
 let config detect () =
   if detect
@@ -89,7 +93,7 @@ let memlimit_of_string s =
   | [ v ], ([] | [ "M" ] | [ "MB" ]) -> Int.of_string v
   | [ v ], ([ "G" ] | [ "GB" ]) -> Int.of_string v * 1000
   | [ v ], ([ "T" ] | [ "TB" ]) -> Int.of_string v * 1000000
-  | _ -> invalid_arg "Unrecognized memory limit"
+  | _ -> Logging.user_error (fun m -> m "Unrecognized memory limit")
 
 let timelimit_of_string s =
   let s = String.strip s in
@@ -105,7 +109,7 @@ let timelimit_of_string s =
   | [ v ], ([] | [ "s" ]) -> Int.of_string v
   | [ v ], [ "m" ] -> Int.of_string v * 60
   | [ v ], [ "h" ] -> Int.of_string v * 3600
-  | _ -> invalid_arg "Unrecognized time limit"
+  | _ -> Logging.user_error (fun m -> m "Unrecognized time limit")
 
 let log_theory_answer =
   Why3.Wstdlib.Mstr.iter (fun theory_name task_answers ->
@@ -123,14 +127,14 @@ let log_theory_answer =
           additional_info)))
 
 let verify ?format ~loadpath ?memlimit ?timelimit ?dataset prover ?prover_altern
-  ?def_constants ?theories ?goals ?onnx_out_dir files =
+  ?definitions ?theories ?goals ?onnx_out_dir files =
   let memlimit = Option.map memlimit ~f:memlimit_of_string in
   let timelimit = Option.map timelimit ~f:timelimit_of_string in
   let theory_answers =
     List.map files
       ~f:
         (Verification.verify ?format ~loadpath ?memlimit ?timelimit ?dataset
-           prover ?prover_altern ?def_constants ?theories ?goals ?onnx_out_dir)
+           prover ?prover_altern ?definitions ?theories ?goals ?onnx_out_dir)
   in
   List.iter theory_answers ~f:log_theory_answer;
   theory_answers
@@ -181,14 +185,16 @@ let verify_json ?memlimit ?timelimit ?outfile json =
   let infile = Result.ok_or_failwith (Verification.File.of_json_input jin) in
   let verification_results =
     verify ~loadpath:[] ?memlimit ?timelimit ~dataset:jin.property.dataset
-      jin.prover ~def_constants:[] [ infile ]
+      jin.prover ~definitions:[] [ infile ]
   in
   match verification_results with
   | [] -> assert false (* We always build one theory from the provided JSON. *)
   | [ verification_result ] ->
     Option.iter outfile
       ~f:(record_verification_result jin.id verification_result)
-  | _ -> failwith "Unexpected more than one theory from a JSON file"
+  | _ ->
+    Logging.user_error (fun m ->
+      m "Unexpected more than one theory from a JSON file")
 
 let verify_xgboost ?memlimit ?timelimit xgboost dataset prover =
   let memlimit = Option.map memlimit ~f:memlimit_of_string in
@@ -199,7 +205,9 @@ let exec_cmd cmdname cmd =
   Logs.debug (fun m -> m "Execution of command '%s'" cmdname);
   cmd ()
 
-(* -- Command line. *)
+(* -------------------------------------------------------------------------- *)
+(* --- Command line                                                           *)
+(* -------------------------------------------------------------------------- *)
 
 let memlimit =
   let doc =
@@ -263,8 +271,8 @@ let verify_cmd =
       & opt (some string) None
       & info [ "onnx-out-dir" ] ~doc ~docv:"DIRECTORY")
   in
-  let define_constants =
-    let doc = "Define a declared constant with the given value." in
+  let definitions =
+    let doc = "Define a toplevel constant declaration with the given value." in
     Arg.(
       value
       & opt_all (pair ~sep:':' string string) []
@@ -302,17 +310,17 @@ let verify_cmd =
   in
   let verify_term =
     let verify format loadpath memlimit timelimit prover prover_altern dataset
-      def_constants theories goals onnx_out_dir files () =
+      definitions theories goals onnx_out_dir files () =
       ignore
         (verify ?format ~loadpath ?memlimit ?timelimit ?dataset prover
-           ?prover_altern ~def_constants ~theories ~goals ?onnx_out_dir files)
+           ?prover_altern ~definitions ~theories ~goals ?onnx_out_dir files)
     in
     Term.(
       const (fun _ -> exec_cmd cmdname)
       $ setup_logs
       $ (const verify $ format $ loadpath $ memlimit $ timelimit $ prover
-       $ prover_altern $ dataset $ define_constants $ theories $ goals
-       $ onnx_out_dir $ files))
+       $ prover_altern $ dataset $ definitions $ theories $ goals $ onnx_out_dir
+       $ files))
   in
   Cmd.v info verify_term
 
diff --git a/src/meta.ml b/src/meta.ml
new file mode 100644
index 0000000000000000000000000000000000000000..ae6eb300890d16b13794cc8bbed98975f69b8ad9
--- /dev/null
+++ b/src/meta.ml
@@ -0,0 +1,51 @@
+(**************************************************************************)
+(*                                                                        *)
+(*  This file is part of CAISAR.                                          *)
+(*                                                                        *)
+(*  Copyright (C) 2023                                                    *)
+(*    CEA (Commissariat à l'énergie atomique et aux énergies              *)
+(*         alternatives)                                                  *)
+(*                                                                        *)
+(*  You can redistribute it and/or modify it under the terms of the GNU   *)
+(*  Lesser General Public License as published by the Free Software       *)
+(*  Foundation, version 2.1.                                              *)
+(*                                                                        *)
+(*  It is distributed in the hope that it will be useful,                 *)
+(*  but WITHOUT ANY WARRANTY; without even the implied warranty of        *)
+(*  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the          *)
+(*  GNU Lesser General Public License for more details.                   *)
+(*                                                                        *)
+(*  See the GNU Lesser General Public License version 2.1                 *)
+(*  for more details (enclosed in the file licenses/LGPLv2.1).            *)
+(*                                                                        *)
+(**************************************************************************)
+
+let meta_input =
+  Why3.Theory.(
+    register_meta "caisar_input"
+      ~desc:"Indicates an input position among the inputs of the neural network"
+      [ MTlsymbol; MTint ])
+
+let meta_output =
+  Why3.Theory.(
+    register_meta "caisar_output"
+      ~desc:
+        "Indicates an output position among the outputs of the neural network"
+      [ MTlsymbol; MTint ])
+
+let meta_nn_filename =
+  Why3.Theory.(
+    register_meta_excl "caisar_nnet_or_onnx"
+      ~desc:"Indicates the filename of the neural network" [ MTstring ])
+
+let meta_svm_filename =
+  Why3.Theory.(
+    register_meta_excl "caisar_svm"
+      ~desc:
+        "Indicates the filename and abstraction of the support vector machine"
+      [ MTstring; MTstring ])
+
+let meta_dataset_filename =
+  Why3.Theory.(
+    register_meta_excl "caisar_dataset"
+      ~desc:"Indicates the filename of the dataset" [ MTstring ])
diff --git a/src/transformations/vars_on_lhs.mli b/src/meta.mli
similarity index 76%
rename from src/transformations/vars_on_lhs.mli
rename to src/meta.mli
index 6521123f56ad5e99247176780076181183ce3f7d..0ed5f6a0b7ae2f7441905952ccbbcb38c808a70b 100644
--- a/src/transformations/vars_on_lhs.mli
+++ b/src/meta.mli
@@ -20,5 +20,17 @@
 (*                                                                        *)
 (**************************************************************************)
 
-val init : unit -> unit
-(** Register the transformation. *)
+val meta_input : Why3.Theory.meta
+(** Indicates an input position among the inputs of the neural network. *)
+
+val meta_output : Why3.Theory.meta
+(** Indicates an output position among the outputs of the neural network. *)
+
+val meta_nn_filename : Why3.Theory.meta
+(** The filename of the neural network. *)
+
+val meta_svm_filename : Why3.Theory.meta
+(** The filename and abstraction of the support vector machine. *)
+
+val meta_dataset_filename : Why3.Theory.meta
+(** The filename of the dataset. *)
diff --git a/src/printers/marabou.ml b/src/printers/marabou.ml
index fe887fb263cfefae6fbd1eefaf5b148006e02d97..052964adea5be903654075737b0470631b68f326 100644
--- a/src/printers/marabou.ml
+++ b/src/printers/marabou.ml
@@ -169,11 +169,11 @@ let rec print_tdecl info fmt task =
     print_tdecl info fmt task_prev;
     match task_decl.Theory.td_node with
     | Use _ | Clone _ -> ()
-    | Meta (meta, l) when Theory.meta_equal meta Utils.meta_input -> (
+    | Meta (meta, l) when Theory.meta_equal meta Meta.meta_input -> (
       match l with
       | [ MAls ls; MAint i ] -> Term.Hls.add info.variables ls (Fmt.str "x%i" i)
       | _ -> assert false)
-    | Meta (meta, l) when Theory.meta_equal meta Utils.meta_output -> (
+    | Meta (meta, l) when Theory.meta_equal meta Meta.meta_output -> (
       match l with
       | [ MAls ls; MAint i ] -> Term.Hls.add info.variables ls (Fmt.str "y%i" i)
       | _ -> assert false)
@@ -182,20 +182,12 @@ let rec print_tdecl info fmt task =
 
 let print_task args ?old:_ fmt task =
   let ls_rel_real =
-    let th = Env.read_theory args.Printer.env [ "real" ] "Real" in
-    let le = Theory.ns_find_ls th.th_export [ Ident.op_infix "<=" ] in
-    let lt = Theory.ns_find_ls th.th_export [ Ident.op_infix "<" ] in
-    let ge = Theory.ns_find_ls th.th_export [ Ident.op_infix ">=" ] in
-    let gt = Theory.ns_find_ls th.th_export [ Ident.op_infix ">" ] in
-    { le; ge; lt; gt }
+    let th = Symbols.Real.create args.Printer.env in
+    { le = th.le; ge = th.ge; lt = th.lt; gt = th.gt }
   in
   let ls_rel_float =
-    let th = Env.read_theory args.Printer.env [ "ieee_float" ] "Float64" in
-    let le = Theory.ns_find_ls th.th_export [ "le" ] in
-    let lt = Theory.ns_find_ls th.th_export [ "lt" ] in
-    let ge = Theory.ns_find_ls th.th_export [ "ge" ] in
-    let gt = Theory.ns_find_ls th.th_export [ "gt" ] in
-    { le; ge; lt; gt }
+    let th = Symbols.Float64.create args.Printer.env in
+    { le = th.le; ge = th.ge; lt = th.lt; gt = th.gt }
   in
   let info =
     {
diff --git a/src/printers/pyrat.ml b/src/printers/pyrat.ml
index 51573a768f143068e1f180943935d800543a6cc1..162d91e3f0b3a64af9ccc54171f61a07d3fc5273 100644
--- a/src/printers/pyrat.ml
+++ b/src/printers/pyrat.ml
@@ -155,11 +155,11 @@ let rec print_tdecl info fmt task =
     print_tdecl info fmt task_prev;
     match task_decl.Theory.td_node with
     | Use _ | Clone _ -> ()
-    | Meta (meta, l) when Theory.meta_equal meta Utils.meta_input -> (
+    | Meta (meta, l) when Theory.meta_equal meta Meta.meta_input -> (
       match l with
       | [ MAls ls; MAint i ] -> Term.Hls.add info.variables ls (Fmt.str "x%i" i)
       | _ -> assert false)
-    | Meta (meta, l) when Theory.meta_equal meta Utils.meta_output -> (
+    | Meta (meta, l) when Theory.meta_equal meta Meta.meta_output -> (
       match l with
       | [ MAls ls; MAint i ] -> Term.Hls.add info.variables ls (Fmt.str "y%i" i)
       | _ -> assert false)
@@ -168,20 +168,12 @@ let rec print_tdecl info fmt task =
 
 let print_task args ?old:_ fmt task =
   let ls_rel_real =
-    let th = Env.read_theory args.Printer.env [ "real" ] "Real" in
-    let le = Theory.ns_find_ls th.th_export [ Ident.op_infix "<=" ] in
-    let lt = Theory.ns_find_ls th.th_export [ Ident.op_infix "<" ] in
-    let ge = Theory.ns_find_ls th.th_export [ Ident.op_infix ">=" ] in
-    let gt = Theory.ns_find_ls th.th_export [ Ident.op_infix ">" ] in
-    { le; ge; lt; gt }
+    let th = Symbols.Real.create args.Printer.env in
+    { le = th.le; ge = th.ge; lt = th.lt; gt = th.gt }
   in
   let ls_rel_float =
-    let th = Env.read_theory args.Printer.env [ "ieee_float" ] "Float64" in
-    let le = Theory.ns_find_ls th.th_export [ "le" ] in
-    let lt = Theory.ns_find_ls th.th_export [ "lt" ] in
-    let ge = Theory.ns_find_ls th.th_export [ "ge" ] in
-    let gt = Theory.ns_find_ls th.th_export [ "gt" ] in
-    { le; ge; lt; gt }
+    let th = Symbols.Float64.create args.Printer.env in
+    { le = th.le; ge = th.ge; lt = th.lt; gt = th.gt }
   in
   let info =
     {
diff --git a/src/printers/vnnlib.ml b/src/printers/vnnlib.ml
index 90888b432613d15d0ffcd05cdc5b2f7a00c10cee..2bad9dec08f133c0b75b7905b1c58752258b8345 100644
--- a/src/printers/vnnlib.ml
+++ b/src/printers/vnnlib.ml
@@ -502,12 +502,12 @@ let rec print_tdecl info fmt task =
     print_tdecl info fmt task_prev;
     match task_decl.Theory.td_node with
     | Use _ | Clone _ -> ()
-    | Meta (meta, l) when Theory.meta_equal meta Utils.meta_input -> (
+    | Meta (meta, l) when Theory.meta_equal meta Meta.meta_input -> (
       match l with
       | [ MAls ls; MAint i ] ->
         Term.Hls.add info.variables ls (Fmt.str "X_%i" i)
       | _ -> assert false)
-    | Meta (meta, l) when Theory.meta_equal meta Utils.meta_output -> (
+    | Meta (meta, l) when Theory.meta_equal meta Meta.meta_output -> (
       match l with
       | [ MAls ls; MAint i ] ->
         Term.Hls.add info.variables ls (Fmt.str "Y_%i" i)
@@ -517,20 +517,12 @@ let rec print_tdecl info fmt task =
 
 let print_task args ?old:_ fmt task =
   let ls_rel_real =
-    let th = Env.read_theory args.Printer.env [ "real" ] "Real" in
-    let le = Theory.ns_find_ls th.th_export [ Ident.op_infix "<=" ] in
-    let lt = Theory.ns_find_ls th.th_export [ Ident.op_infix "<" ] in
-    let ge = Theory.ns_find_ls th.th_export [ Ident.op_infix ">=" ] in
-    let gt = Theory.ns_find_ls th.th_export [ Ident.op_infix ">" ] in
-    { le; ge; lt; gt }
+    let th = Symbols.Real.create args.Printer.env in
+    { le = th.le; ge = th.ge; lt = th.lt; gt = th.gt }
   in
   let ls_rel_float =
-    let th = Env.read_theory args.Printer.env [ "ieee_float" ] "Float64" in
-    let le = Theory.ns_find_ls th.th_export [ "le" ] in
-    let lt = Theory.ns_find_ls th.th_export [ "lt" ] in
-    let ge = Theory.ns_find_ls th.th_export [ "ge" ] in
-    let gt = Theory.ns_find_ls th.th_export [ "gt" ] in
-    { le; ge; lt; gt }
+    let th = Symbols.Float64.create args.Printer.env in
+    { le = th.le; ge = th.ge; lt = th.lt; gt = th.gt }
   in
   let info =
     {
diff --git a/src/proof_strategy.ml b/src/proof_strategy.ml
index 25d4c9b5f98573504ebd46ebfd0dcfd22ec2ff9e..1c89eb8a469d6e41af53d0a29c35b93eb82091c4 100644
--- a/src/proof_strategy.ml
+++ b/src/proof_strategy.ml
@@ -23,7 +23,7 @@
 open Base
 open Why3
 
-let set_of_nn_ls ~lookup sls =
+let sls_model ~lookup sls =
   let rec aux acc term =
     let acc = Term.t_fold aux acc term in
     match term.t_node with
@@ -33,35 +33,40 @@ let set_of_nn_ls ~lookup sls =
   in
   Trans.fold_decl (fun decl acc -> Decl.decl_fold aux acc decl) sls
 
-let do_apply_prover ~lookup ~trans tasks =
-  let set_nn_ls =
+let do_apply_prover_strategy ~lookup ~trans tasks =
+  let sls_model =
     List.fold tasks ~init:Term.Sls.empty ~f:(fun accum task ->
-      Trans.apply (set_of_nn_ls ~lookup accum) task)
+      Trans.apply (sls_model ~lookup accum) task)
   in
-  let count_nn_ls = Term.Sls.cardinal set_nn_ls in
-  match count_nn_ls with
+  match Term.Sls.cardinal sls_model with
   | 0 -> tasks
   | 1 -> List.map tasks ~f:(Trans.apply trans)
-  | _ ->
-    invalid_arg "Two or more neural network applications are not supported yet"
+  | _ -> Logging.user_error (fun m -> m "Multiple models are not supported yet")
 
-let apply_classic_prover env task =
+let apply_classic_prover_strategy env task =
   let lookup = Language.lookup_loaded_nets in
   let trans = Nn2smt.trans env in
-  do_apply_prover ~lookup ~trans [ task ]
+  do_apply_prover_strategy ~lookup ~trans [ task ]
 
-let apply_native_nn_prover task =
+let split_on_dataset_elts task =
+  match Task.on_meta_excl Meta.meta_dataset_filename task with
+  | None -> [ task ]
+  | Some _ ->
+    (* Property verification on a dataset typically involves goals with huge
+       toplevel conjunctions, one conjunct for each element of the dataset:
+       first thing to do is to destruct such toplevel conjunctions. *)
+    Trans.apply Split.split_goal_full_stop_on_quant task
+
+let apply_native_nn_prover_strategy task =
+  let tasks = split_on_dataset_elts task in
   let lookup = Language.lookup_nn in
   let trans =
     Trans.seq [ Introduction.introduce_premises; Native_nn_prover.trans ]
   in
-  let tasks =
-    match Task.on_meta_excl Utils.meta_dataset_filename task with
-    | None -> [ task ]
-    | Some _ ->
-      (* Property verification on a dataset typically involves goals with huge
-         toplevel conjunctions, one conjunct for each element of the dataset:
-         first thing to do is to destruct such toplevel conjunctions. *)
-      Trans.apply Split_goal.split_goal_full task
-  in
-  do_apply_prover ~lookup ~trans tasks
+  do_apply_prover_strategy ~lookup ~trans tasks
+
+let apply_svm_prover_strategy task =
+  let tasks = split_on_dataset_elts task in
+  let lookup = Language.lookup_svm in
+  let trans = Introduction.introduce_premises in
+  do_apply_prover_strategy ~lookup ~trans tasks
diff --git a/src/proof_strategy.mli b/src/proof_strategy.mli
index 6415431d425819147a4debe62b3c423837c0ac4c..6e388ce5e3fccd7b21aff8ece1cc7ac3b8005c95 100644
--- a/src/proof_strategy.mli
+++ b/src/proof_strategy.mli
@@ -22,8 +22,12 @@
 
 open Why3
 
-val apply_classic_prover : Env.env -> Task.task -> Task.task list
+val apply_classic_prover_strategy : Env.env -> Task.task -> Task.task list
 (** Detect and translate applications of neural networks into SMT-LIB. *)
 
-val apply_native_nn_prover : Task.task -> Task.task list
-(** Detect and execute applications of neural networks. *)
+val apply_native_nn_prover_strategy : Task.task -> Task.task list
+(** First detect and split on dataset, if any, intended for neural networks.
+    Then, detect and execute applications of neural networks. *)
+
+val apply_svm_prover_strategy : Task.task -> Task.task list
+(** Detect and split on dataset, if any, intended for support vector machines. *)
diff --git a/src/saver.ml b/src/saver.ml
index 767dda166fe711ccc51f372fec61cf2868a8dcd5..39e18c4761dc39fbb5c88872f26169cbefd7f949 100644
--- a/src/saver.ml
+++ b/src/saver.ml
@@ -23,21 +23,37 @@
 open Why3
 open Base
 
+let src = Logs.Src.create "SAVer" ~doc:"SAVer prover"
+
+(* SAVer command line parameters. *)
 let re_svm = Re__Core.(compile (str "%{svm}"))
 let re_dataset = Re__Core.(compile (str "%{dataset}"))
-let re_eps = Re__Core.(compile (str "%{epsilon}"))
 let re_abstraction = Re__Core.(compile (str "%{abstraction}"))
-let re_distance = Re__Core.(compile (str "%{distance}"))
+let re_perturbation = Re__Core.(compile (str "%{perturbation}"))
+let re_perturbation_param = Re__Core.(compile (str "%{perturbation_param}"))
+
+type perturbation =
+  | L_inf of Dataset.eps option
+  | From_file of string
+
+let string_of_perturbation = function
+  | L_inf _ -> "l_inf"
+  | From_file _ -> "from_file"
 
-let build_command config_prover svm_filename dataset_filename eps =
+let string_of_perturbation_param = function
+  | L_inf eps -> Option.(value (map ~f:Dataset.string_of_eps eps)) ~default:"0"
+  | From_file filename -> Unix.realpath filename
+
+let build_command config_prover ?(abstraction = "hybrid") ~svm ~dataset
+  perturbation =
   let command = Whyconf.get_complete_command ~with_steps:false config_prover in
   let params =
     [
-      (re_svm, Unix.realpath svm_filename);
-      (re_dataset, Unix.realpath dataset_filename);
-      (re_eps, Option.(value (map ~f:Dataset.string_of_eps eps)) ~default:"0");
-      (re_distance, "l_inf");
-      (re_abstraction, "hybrid");
+      (re_svm, Unix.realpath svm);
+      (re_dataset, Unix.realpath dataset);
+      (re_perturbation, string_of_perturbation perturbation);
+      (re_perturbation_param, string_of_perturbation_param perturbation);
+      (re_abstraction, abstraction);
     ]
   in
   List.fold params ~init:command ~f:(fun cmd (param, by) ->
@@ -49,7 +65,7 @@ type answer = {
   nb_proved : int;
 }
 
-(* SAVer output is made of 7 columns separated by (multiple) space(s) of the
+(* SAVer's output is made of 7 columns separated by (multiple) space(s) of the
    following form:
 
    [SUMMARY] integer float float integer integer integer
@@ -74,16 +90,20 @@ let re_group_number_of_predicate = function
   | Dataset.Correct -> 2
   | Robust _ -> 3
   | CondRobust _ -> 4
-  | MetaRobust _ -> failwith "Unsupported metamorphic robustness predicate"
+  | MetaRobust _ ->
+    Logging.user_error (fun m ->
+      m "Unsupported metamorphic robustness predicate")
 
 let negative_answer = function
   | Dataset.Correct ->
     (* SAVer is complete wrt [Correct] predicate. *)
     Call_provers.Invalid
   | Robust _ | CondRobust _ -> Call_provers.Unknown ""
-  | MetaRobust _ -> failwith "Unsupported metamorphic robustness predicate"
+  | MetaRobust _ ->
+    Logging.user_error (fun m ->
+      m "Unsupported metamorphic robustness predicate")
 
-let build_answer predicate_kind prover_result =
+let build_answer_on_dataset_predicate predicate_kind prover_result =
   match prover_result.Call_provers.pr_answer with
   | Call_provers.HighFailure -> (
     let pr_output = prover_result.pr_output in
@@ -102,13 +122,17 @@ let build_answer predicate_kind prover_result =
         else negative_answer predicate_kind
       in
       { prover_answer; nb_total; nb_proved }
-    | None -> failwith "Cannot interpret the output provided by SAVer")
-  | _ ->
+    | None ->
+      Logging.code_error ~src (fun m ->
+        m "Cannot interpret the output provided by SAVer"))
+  | unexpected_pr_answer ->
     (* Any other answer than HighFailure should never happen as we do not define
        anything in SAVer's driver. *)
-    assert false
+    Logging.code_error ~src (fun m ->
+      m "Unexpected SAVer prover answer '%a'"
+        Why3.Call_provers.print_prover_answer unexpected_pr_answer)
 
-let call_prover limit config config_prover predicate =
+let call_prover_on_dataset_predicate limit config config_prover predicate =
   let command =
     let svm = predicate.Dataset.model.Language.filename in
     let dataset = predicate.dataset in
@@ -116,9 +140,79 @@ let call_prover limit config config_prover predicate =
       match predicate.property with
       | Dataset.Correct -> None
       | Robust e | CondRobust e -> Some e
-      | MetaRobust _ -> failwith "Unsupported metamorphic robustness predicate"
+      | MetaRobust _ ->
+        Logging.user_error (fun m ->
+          m "Unsupported metamorphic robustness predicate")
+    in
+    build_command config_prover ~svm ~dataset (L_inf eps)
+  in
+  let prover_call =
+    let res_parser =
+      {
+        Call_provers.prp_regexps =
+          [ ("NeverMatch", Call_provers.Failure "Should not happen in CAISAR") ];
+        prp_timeregexps = [];
+        prp_stepregexps = [];
+        prp_exitcodes = [];
+        prp_model_parser = Model_parser.lookup_model_parser "no_model";
+      }
+    in
+    Call_provers.call_on_buffer ~command ~config ~limit ~res_parser
+      ~filename:" " ~get_model:None ~gen_new_file:false (Buffer.create 10)
+  in
+  let prover_result = Call_provers.wait_on_call prover_call in
+  build_answer_on_dataset_predicate predicate.property prover_result
+
+let build_answer prover_result =
+  match prover_result.Call_provers.pr_answer with
+  | Call_provers.HighFailure -> (
+    let pr_output = prover_result.pr_output in
+    match Re__Core.exec_opt re_saver_output pr_output with
+    | Some re_group ->
+      let nb_total =
+        Int.of_string (Re__Core.Group.get re_group re_group_number_dataset_size)
+      in
+      let nb_proved =
+        let re_group_number = 4 (* cond. robust: correct and robust *) in
+        Int.of_string (Re__Core.Group.get re_group re_group_number)
+      in
+      let prover_answer =
+        if nb_total = nb_proved
+        then Call_provers.Valid
+        else Call_provers.Unknown ""
+      in
+      { prover_answer; nb_total; nb_proved }
+    | None ->
+      Logging.code_error ~src (fun m ->
+        m "Cannot interpret the output provided by SAVer"))
+  | unexpected_pr_answer ->
+    (* Any other answer than HighFailure should never happen as we do not define
+       anything in SAVer's driver. *)
+    Logging.code_error ~src (fun m ->
+      m "Unexpected SAVer prover answer '%a'"
+        Why3.Call_provers.print_prover_answer unexpected_pr_answer)
+
+let call_prover limit config config_prover ?(abstraction = Language.Hybrid) ~svm
+  ~dataset ~perturbations () =
+  let command =
+    let abstraction = Language.string_of_svm_abstraction abstraction in
+    let dataset =
+      let filename = Stdlib.Filename.temp_file "dataset" "saver" in
+      Csv.save filename dataset;
+      filename
+    in
+    let perturbation =
+      let filename = Stdlib.Filename.temp_file "perturbations" "saver" in
+      let perturbations =
+        List.concat_map perturbations ~f:(fun (lower, upper) ->
+          [ Float.to_string lower; Float.to_string upper ])
+        |> List.return
+      in
+      Csv.save ~separator:' ' filename perturbations;
+      filename
     in
-    build_command config_prover svm dataset eps
+    build_command config_prover ~abstraction ~svm ~dataset
+      (From_file perturbation)
   in
   let prover_call =
     let res_parser =
@@ -135,4 +229,4 @@ let call_prover limit config config_prover predicate =
       ~filename:" " ~get_model:None ~gen_new_file:false (Buffer.create 10)
   in
   let prover_result = Call_provers.wait_on_call prover_call in
-  build_answer predicate.property prover_result
+  build_answer prover_result
diff --git a/src/saver.mli b/src/saver.mli
index c7f25e3c118ff630e09cf419f6eb46c48d4b4f30..1aad4ca744ea5688dad9863025dba2ef5ea572cf 100644
--- a/src/saver.mli
+++ b/src/saver.mli
@@ -20,17 +20,26 @@
 (*                                                                        *)
 (**************************************************************************)
 
-open Why3
-
 type answer = {
-  prover_answer : Call_provers.prover_answer;
+  prover_answer : Why3.Call_provers.prover_answer;
   nb_total : int;  (** Total number of data points. *)
   nb_proved : int;  (** Number of data points verifying the property. *)
 }
 
-val call_prover :
-  Call_provers.resource_limit ->
-  Whyconf.main ->
-  Whyconf.config_prover ->
+val call_prover_on_dataset_predicate :
+  Why3.Call_provers.resource_limit ->
+  Why3.Whyconf.main ->
+  Why3.Whyconf.config_prover ->
   (Language.svm_shape, string) Dataset.predicate ->
   answer
+
+val call_prover :
+  Why3.Call_provers.resource_limit ->
+  Why3.Whyconf.main ->
+  Why3.Whyconf.config_prover ->
+  ?abstraction:Language.svm_abstraction ->
+  svm:string ->
+  dataset:Csv.t ->
+  perturbations:(float * float) list ->
+  unit ->
+  answer
diff --git a/src/transformations/native_nn_prover.ml b/src/transformations/native_nn_prover.ml
index 45b9a408d3aa6be9550636ec4737f0e64a54a320..f7754ae48abff0021fe98ff4565b905337775756 100644
--- a/src/transformations/native_nn_prover.ml
+++ b/src/transformations/native_nn_prover.ml
@@ -23,35 +23,6 @@
 open Why3
 open Base
 
-(* Collects in a map the input variables, already declared in a task, and their
-   indices of appearance inside respective input vectors. Such collecting
-   process is memoized wrt lsymbols corresponding to input vectors. *)
-let collect_input_vars =
-  let hls = Term.Hls.create 13 in
-  let add index mls = function
-    | { Term.t_node = Tapp (ls, []); _ } -> Term.Mls.add ls index mls
-    | t -> failwith (Fmt.str "Not an input variable: %a" Pretty.print_term t)
-  in
-  let rec do_collect mls (term : Term.term) =
-    match term.t_node with
-    | Term.Tapp
-        ( ls1 (* @@ *),
-          [
-            { t_node = Tapp (ls2 (* nn *), _); _ };
-            { t_node = Tapp (ls3 (* input vector *), tl (* input vars *)); _ };
-          ] )
-      when String.equal ls1.ls_name.id_string (Ident.op_infix "@@") -> (
-      match (Language.lookup_nn ls2, Language.lookup_vector ls3) with
-      | Some { nn_nb_inputs; _ }, Some vector_length ->
-        assert (nn_nb_inputs = vector_length && vector_length = List.length tl);
-        if Term.Hls.mem hls ls3 then mls else List.foldi ~init:mls ~f:add tl
-      | _, _ -> mls)
-    | _ -> Term.t_fold do_collect mls term
-  in
-  Trans.fold_decl
-    (fun decl mls -> Decl.decl_fold do_collect mls decl)
-    Term.Mls.empty
-
 (* Creates a list of pairs made of output variables and respective indices in
    the list, for each neural network application to an input vector appearing in
    a task. Such a list stands for the resulting output vector of a neural
@@ -70,8 +41,7 @@ let create_output_vars =
         else
           let output_vars =
             List.init nn_nb_outputs ~f:(fun index ->
-              ( index,
-                Term.create_fsymbol (Ident.id_fresh "caisar_y") [] nn_ty_elt ))
+              (index, Term.create_fsymbol (Ident.id_fresh "y") [] nn_ty_elt))
           in
           Term.Mterm.add term output_vars mt
       | None -> mt)
@@ -84,20 +54,20 @@ let create_output_vars =
 (* Simplifies a task goal exhibiting a vector selection on a neural network
    application to an input vector (ie, (nn @@ v)[_]) by the corresponding output
    variable. Morevoer, each input variable declaration is annotated with a meta
-   that describes the respective index in the input vector. Ouput variables are
+   that describes the respective index in the input vector. Output variables are
    all declared, each with a meta that describes the respective index in the
    output vector. *)
 let simplify_nn_application input_vars output_vars =
   let rec do_simplify (term : Term.term) =
     match term.t_node with
     | Term.Tapp
-        ( ls1 (* [_] *),
+        ( ls_get (* [_] *),
           [
-            ({ t_node = Tapp (ls2 (* @@ *), _); _ } as t1);
+            ({ t_node = Tapp (ls_atat (* @@ *), _); _ } as t1);
             ({ t_node = Tconst (ConstInt index); _ } as _t2);
           ] )
-      when String.equal ls1.ls_name.id_string (Ident.op_get "")
-           && String.equal ls2.ls_name.id_string (Ident.op_infix "@@") -> (
+      when String.equal ls_get.ls_name.id_string (Ident.op_get "")
+           && String.equal ls_atat.ls_name.id_string (Ident.op_infix "@@") -> (
       match Term.Mterm.find_opt t1 output_vars with
       | None -> Term.t_map do_simplify term
       | Some output_vars ->
@@ -118,7 +88,7 @@ let simplify_nn_application input_vars output_vars =
           match Term.Mls.find_opt ls input_vars with
           | None -> task
           | Some index ->
-            Task.add_meta task Utils.meta_input [ MAls ls; MAint index ]
+            Task.add_meta task Meta.meta_input [ MAls ls; MAint index ]
         in
         Task.add_tdecl task task_hd.task_decl
       | Decl ({ d_node = Dprop (Pgoal, _, _); _ } as decl) ->
@@ -134,7 +104,7 @@ let simplify_nn_application input_vars output_vars =
               List.fold output_vars ~init:task
                 ~f:(fun task (index, output_var) ->
                 let task =
-                  Task.add_meta task Utils.meta_output
+                  Task.add_meta task Meta.meta_output
                     [ MAls output_var; MAint index ]
                 in
                 let decl = Decl.create_param_decl output_var in
@@ -147,6 +117,6 @@ let simplify_nn_application input_vars output_vars =
     None
 
 let trans =
-  Trans.bind collect_input_vars (fun input_vars ->
+  Trans.bind Utils.input_vars (fun input_vars ->
     Trans.bind create_output_vars (fun output_vars ->
       simplify_nn_application input_vars output_vars))
diff --git a/src/transformations/nn2smt.ml b/src/transformations/nn2smt.ml
index e8708252646498cd387be7b7517c8ef876f7b7da..c82cefd86eda5d6c847a9a4ab8696acccca8188d 100644
--- a/src/transformations/nn2smt.ml
+++ b/src/transformations/nn2smt.ml
@@ -387,7 +387,7 @@ let actual_nn_flow env =
         in
         let task =
           List.fold !meta ~init:task ~f:(fun task s ->
-            Task.add_meta task Utils.meta_nn_filename [ MAstr s ])
+            Task.add_meta task Meta.meta_nn_filename [ MAstr s ])
         in
         Task.add_decl task decl)
     None
diff --git a/src/transformations/simplify_rel.ml b/src/transformations/relop.ml
similarity index 56%
rename from src/transformations/simplify_rel.ml
rename to src/transformations/relop.ml
index cf2dbce6d7321a9c77421f730c1089d3c2ab8bab..77b9ef72a2547ef65cef6a6b107b925f287377ff 100644
--- a/src/transformations/simplify_rel.ml
+++ b/src/transformations/relop.ml
@@ -46,60 +46,53 @@ let real_constant_of_float value =
   Constant.ConstReal (Number.real_literal ~radix:10 ~neg ~int ~frac ~exp:None)
 
 let term_of_float env f =
-  let th = Env.read_theory env [ "ieee_float" ] "Float64" in
-  let ty = Ty.ty_app (Theory.ns_find_ts th.th_export [ "t" ]) [] in
-  Term.t_const (real_constant_of_float f) ty
+  let th = Symbols.Float64.create env in
+  Term.t_const (real_constant_of_float f) th.ty
 
-let make_rt env =
-  let th_f64 = Env.read_theory env [ "ieee_float" ] "Float64" in
-  let ty_f64 = Ty.ty_app (Theory.ns_find_ts th_f64.th_export [ "t" ]) [] in
-  let relop_f64_le = Theory.ns_find_ls th_f64.th_export [ "le" ] in
-  let relop_f64_lt = Theory.ns_find_ls th_f64.th_export [ "lt" ] in
-  let relop_f64_ge = Theory.ns_find_ls th_f64.th_export [ "ge" ] in
-  let relop_f64_gt = Theory.ns_find_ls th_f64.th_export [ "gt" ] in
-  let relop_f64 = [ relop_f64_le; relop_f64_lt; relop_f64_ge; relop_f64_gt ] in
-  let aop_f64_add = Theory.ns_find_ls th_f64.th_export [ "add" ] in
-  let aop_f64_sub = Theory.ns_find_ls th_f64.th_export [ "sub" ] in
-  let aop_f64_mul = Theory.ns_find_ls th_f64.th_export [ "mul" ] in
-  let aop_f64_div = Theory.ns_find_ls th_f64.th_export [ "div" ] in
-  let aop_f64_neg = Theory.ns_find_ls th_f64.th_export [ "neg" ] in
+(* -------------------------------------------------------------------------- *)
+(* --- Simplify Relops                                                        *)
+(* -------------------------------------------------------------------------- *)
+
+let make_rewriter env =
+  let th_f64 = Symbols.Float64.create env in
+  let relop_f64 = [ th_f64.le; th_f64.lt; th_f64.ge; th_f64.gt ] in
   let aop_f64 =
-    [ aop_f64_add; aop_f64_sub; aop_f64_mul; aop_f64_div; aop_f64_neg ]
+    [ th_f64.add; th_f64.sub; th_f64.mul; th_f64.div; th_f64.neg ]
   in
   let aop_f64_of_ls ls_op =
-    if Term.ls_equal ls_op aop_f64_sub
+    if Term.ls_equal ls_op th_f64.sub
     then Float.( - )
-    else if Term.ls_equal ls_op aop_f64_add
+    else if Term.ls_equal ls_op th_f64.add
     then Float.( + )
-    else if Term.ls_equal ls_op aop_f64_mul
+    else if Term.ls_equal ls_op th_f64.mul
     then Float.( * )
-    else if Term.ls_equal ls_op aop_f64_div
+    else if Term.ls_equal ls_op th_f64.div
     then Float.( / )
     else assert false
   in
   let inv_aop_f64_of_ls ls_op =
-    if Term.ls_equal ls_op aop_f64_sub
+    if Term.ls_equal ls_op th_f64.sub
     then Float.( + )
-    else if Term.ls_equal ls_op aop_f64_add
+    else if Term.ls_equal ls_op th_f64.add
     then Float.( - )
-    else if Term.ls_equal ls_op aop_f64_mul
+    else if Term.ls_equal ls_op th_f64.mul
     then Float.( / )
-    else if Term.ls_equal ls_op aop_f64_div
+    else if Term.ls_equal ls_op th_f64.div
     then Float.( * )
     else assert false
   in
-  let rec rt t =
-    let t = Term.t_map rt t in
+  let rec rewriter t =
+    let t = Term.t_map rewriter t in
     match t.t_node with
     | Tapp (ls, [ { t_node = Tconst (ConstReal rc); _ } ])
-      when Term.ls_equal ls aop_f64_neg ->
+      when Term.ls_equal ls th_f64.neg ->
       (* [.- rc] => [-rc] *)
       let rc = Number.neg_real rc in
-      Term.t_const (ConstReal rc) ty_f64
+      Term.t_const (ConstReal rc) th_f64.ty
     | Tapp
         ( ls_op,
           [
-            _mode;
+            _ (* mode *);
             { t_node = Tconst (ConstReal rc1); _ };
             { t_node = Tconst (ConstReal rc2); _ };
           ] )
@@ -110,7 +103,7 @@ let make_rt env =
       let rc2_float = float_of_real_constant rc2 in
       let res_rc_float = (aop_f64_of_ls ls_op) rc1_float rc2_float in
       let res_rc = real_constant_of_float res_rc_float in
-      Term.t_const res_rc ty_f64
+      Term.t_const res_rc th_f64.ty
     | Tapp
         ( ls_rel,
           [
@@ -118,50 +111,121 @@ let make_rt env =
             {
               t_node =
                 Tapp
-                  (ls_op, [ _mode; t'; { t_node = Tconst (ConstReal rc2); _ } ]);
+                  ( ls_op,
+                    [ _ (* mode *); t'; { t_node = Tconst (ConstReal rc2); _ } ]
+                  );
               _;
             };
           ] )
       when List.exists ~f:(Term.ls_equal ls_rel) relop_f64
            && List.exists ~f:(Term.ls_equal ls_op) aop_f64 ->
-      (* [ rc1 ls_rel t' ls_op rc2] => [(rc1 inv_ls_op rc2) ls_rel t'], where
+      (* [rc1 ls_rel t' ls_op rc2] => [(rc1 inv_ls_op rc2) ls_rel t'], where
          [inv_ls_op] is the inverse operation of the one provided by [ls_op]. *)
       let rc1_float = float_of_real_constant rc1 in
       let rc2_float = float_of_real_constant rc2 in
       let res_rc_float = (inv_aop_f64_of_ls ls_op) rc1_float rc2_float in
       let res_rc_t = term_of_float env res_rc_float in
       let t = Term.t_app_infer ls_rel [ res_rc_t; t' ] in
-      rt t
+      rewriter t
     | Tapp
         ( ls_rel,
           [
             {
               t_node =
                 Tapp
-                  (ls_op, [ _mode; t'; { t_node = Tconst (ConstReal rc2); _ } ]);
+                  ( ls_op,
+                    [ _ (* mode *); t'; { t_node = Tconst (ConstReal rc2); _ } ]
+                  );
               _;
             };
             { t_node = Tconst (ConstReal rc1); _ };
           ] )
       when List.exists ~f:(Term.ls_equal ls_rel) relop_f64
            && List.exists ~f:(Term.ls_equal ls_op) aop_f64 ->
-      (* [ t' ls_op rc2 ls_rel rc1] => [t' ls_rel (rc1 inv_ls_op rc2)], where
+      (* [t' ls_op rc2 ls_rel rc1] => [t' ls_rel (rc1 inv_ls_op rc2)], where
          [inv_ls_op] is the inverse operation of the one provided by [ls_op]. *)
       let rc1_float = float_of_real_constant rc1 in
       let rc2_float = float_of_real_constant rc2 in
       let res_rc_float = (inv_aop_f64_of_ls ls_op) rc1_float rc2_float in
       let res_rc_t = term_of_float env res_rc_float in
       let t = Term.t_app_infer ls_rel [ t'; res_rc_t ] in
-      rt t
+      rewriter t
     | _ -> t
   in
-  rt
+  (rewriter, None)
+
+let simplify env =
+  let rewriter, task = make_rewriter env in
+  Trans.rewrite rewriter task
+
+let register_simplify_relop () =
+  Trans.register_env_transform
+    ~desc:"Simplify relop expressions involving IEEE float64 values."
+    "simplify_relop" simplify
+
+(* -------------------------------------------------------------------------- *)
+(* --- Variables on LHS                                                       *)
+(* -------------------------------------------------------------------------- *)
+
+let make_rewriter env =
+  let th_64 = Symbols.Float64.create env in
+  let th_real = Symbols.Real.create env in
+  let rec rewriter t =
+    let t = Term.t_map rewriter t in
+    match t.t_node with
+    | Tapp
+        ( ls,
+          [
+            ({ t_node = Tconst _; _ } as const);
+            ({ t_node = Tapp (_, []); _ } as var);
+          ] ) ->
+      let tt = [ var; const ] in
+      let ls_rel =
+        if Term.ls_equal ls th_64.le
+        then th_64.ge
+        else if Term.ls_equal ls th_64.ge
+        then th_64.le
+        else if Term.ls_equal ls th_64.lt
+        then th_64.gt
+        else if Term.ls_equal ls th_64.gt
+        then th_64.lt
+        else ls
+      in
+      let ls_rel =
+        if Term.ls_equal ls th_real.le
+        then th_real.ge
+        else if Term.ls_equal ls th_real.ge
+        then th_real.le
+        else if Term.ls_equal ls th_real.lt
+        then th_real.gt
+        else if Term.ls_equal ls th_real.gt
+        then th_real.lt
+        else ls_rel
+      in
+      if Term.ls_equal ls_rel ls then t else Term.ps_app ls_rel tt
+    | _ -> t
+  in
+  let task =
+    List.fold
+      [ th_64.le; th_64.lt; th_64.ge; th_64.gt ]
+      ~init:(Task.add_ty_decl None th_64.tysymbol)
+      ~f:Task.add_param_decl
+  in
+  let task =
+    List.fold
+      [ th_real.le; th_real.lt; th_real.ge; th_real.gt ]
+      ~init:(Task.add_ty_decl task th_real.tysymbol)
+      ~f:Task.add_param_decl
+  in
+  (rewriter, task)
 
-let simplify_rel env =
-  let rt = make_rt env in
-  Trans.rewrite rt None
+let vars_on_lhs env =
+  let rewriter, task = make_rewriter env in
+  Trans.rewrite rewriter task
 
-let init () =
+let register_vars_on_lhs_of_relop () =
   Trans.register_env_transform
-    ~desc:"Simplify linear inequalities with float values." "simplify_rel"
-    simplify_rel
+    ~desc:
+      "Move variables on the left-hand-side of relop symbols in expressions \
+       involving IEEE float64 values."
+    "vars_on_lhs_of_relop" vars_on_lhs
diff --git a/src/transformations/relop.mli b/src/transformations/relop.mli
new file mode 100644
index 0000000000000000000000000000000000000000..d5a662fe1c59ce676d03ff00133a71373fe2e7a8
--- /dev/null
+++ b/src/transformations/relop.mli
@@ -0,0 +1,34 @@
+(**************************************************************************)
+(*                                                                        *)
+(*  This file is part of CAISAR.                                          *)
+(*                                                                        *)
+(*  Copyright (C) 2023                                                    *)
+(*    CEA (Commissariat à l'énergie atomique et aux énergies              *)
+(*         alternatives)                                                  *)
+(*                                                                        *)
+(*  You can redistribute it and/or modify it under the terms of the GNU   *)
+(*  Lesser General Public License as published by the Free Software       *)
+(*  Foundation, version 2.1.                                              *)
+(*                                                                        *)
+(*  It is distributed in the hope that it will be useful,                 *)
+(*  but WITHOUT ANY WARRANTY; without even the implied warranty of        *)
+(*  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the          *)
+(*  GNU Lesser General Public License for more details.                   *)
+(*                                                                        *)
+(*  See the GNU Lesser General Public License version 2.1                 *)
+(*  for more details (enclosed in the file licenses/LGPLv2.1).            *)
+(*                                                                        *)
+(**************************************************************************)
+
+val simplify : Why3.Env.env -> Why3.Task.task Why3.Trans.trans
+(** Simplify relop expressions involving IEEE float64 values. *)
+
+val vars_on_lhs : Why3.Env.env -> Why3.Task.task Why3.Trans.trans
+(** Move variables on the left-hand-side of relop symbols in expressions
+    involving IEEE float64 values. *)
+
+val register_simplify_relop : unit -> unit
+(** Register the [simplify_relop] transformation. *)
+
+val register_vars_on_lhs_of_relop : unit -> unit
+(** Register the [vars_on_lhs_of_relop] transformation. *)
diff --git a/src/transformations/split.ml b/src/transformations/split.ml
index c5338da72da5d5d0d3264ded9671ec3be1c167d1..88fcbff8b7a15578dd2addafc276895e3b5c21d0 100644
--- a/src/transformations/split.ml
+++ b/src/transformations/split.ml
@@ -21,21 +21,34 @@
 (**************************************************************************)
 
 open Base
-open Why3
 
 let split_generic filter_prop_kind =
-  Trans.decl_l
+  Why3.Trans.decl_l
     (fun d ->
       match d.d_node with
       | Dprop (k, pr, t) when filter_prop_kind k ->
         let l =
           match k with
-          | Paxiom | Plemma -> Split_goal.split_neg_full t
-          | Pgoal -> Split_goal.split_pos_full t
+          | Paxiom | Plemma -> Why3.Split_goal.split_neg_full t
+          | Pgoal -> Why3.Split_goal.split_pos_full t
         in
-        List.map l ~f:(fun t -> [ Decl.create_prop_decl k pr t ])
+        List.map l ~f:(fun t -> [ Why3.Decl.create_prop_decl k pr t ])
       | _ -> [ [ d ] ])
     None
 
 let split_premises = split_generic (function Paxiom -> true | _ -> false)
 let split_all = split_generic (fun _ -> true)
+
+let split_goal_full_stop_on_quant =
+  let trans_add_stop_split =
+    let rec add_stop_split_on_quant t =
+      match t.Why3.Term.t_node with
+      | Tquant _ -> Why3.Term.t_attr_add Why3.Term.stop_split t
+      | _ -> Why3.Term.t_map add_stop_split_on_quant t
+    in
+    Why3.Trans.goal (fun pr t ->
+      let t = add_stop_split_on_quant t in
+      [ Why3.Decl.create_prop_decl Pgoal pr t ])
+  in
+  Why3.Trans.bind trans_add_stop_split (fun task ->
+    Why3.Trans.return (Why3.Trans.apply Why3.Split_goal.split_goal_full task))
diff --git a/src/transformations/split.mli b/src/transformations/split.mli
index 9007dd39730ddf98d6401ae86a737d4d5805d913..1592aea9018ae714730bab6e5059fa6732d67968 100644
--- a/src/transformations/split.mli
+++ b/src/transformations/split.mli
@@ -20,11 +20,13 @@
 (*                                                                        *)
 (**************************************************************************)
 
-open Why3
+val split_premises : Why3.Task.task Why3.Trans.tlist
+(** Split disjunctions appearing in axioms. *)
 
-val split_premises : Task.task Trans.tlist
-(** Split disjunctions appearing in axioms into different tasks. *)
+val split_all : Why3.Task.task Why3.Trans.tlist
+(** Split disjunctions appearing in premises, and conjunctions appearing in
+    goals. *)
 
-val split_all : Task.task Trans.tlist
-(** Split disjunctions appearing in premises, and conjunctions appearing in the
-    goal, into different tasks. *)
+val split_goal_full_stop_on_quant : Why3.Task.task Why3.Trans.tlist
+(** Like [Why3.Split_goal.split_goal_full] but stop split on quantified
+    (sub)formulas. *)
diff --git a/src/transformations/symbols.ml b/src/transformations/symbols.ml
new file mode 100644
index 0000000000000000000000000000000000000000..c07edad17db54d12f0d9a40c24a36ea26d822ce4
--- /dev/null
+++ b/src/transformations/symbols.ml
@@ -0,0 +1,116 @@
+(**************************************************************************)
+(*                                                                        *)
+(*  This file is part of CAISAR.                                          *)
+(*                                                                        *)
+(*  Copyright (C) 2023                                                    *)
+(*    CEA (Commissariat à l'énergie atomique et aux énergies              *)
+(*         alternatives)                                                  *)
+(*                                                                        *)
+(*  You can redistribute it and/or modify it under the terms of the GNU   *)
+(*  Lesser General Public License as published by the Free Software       *)
+(*  Foundation, version 2.1.                                              *)
+(*                                                                        *)
+(*  It is distributed in the hope that it will be useful,                 *)
+(*  but WITHOUT ANY WARRANTY; without even the implied warranty of        *)
+(*  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the          *)
+(*  GNU Lesser General Public License for more details.                   *)
+(*                                                                        *)
+(*  See the GNU Lesser General Public License version 2.1                 *)
+(*  for more details (enclosed in the file licenses/LGPLv2.1).            *)
+(*                                                                        *)
+(**************************************************************************)
+
+module Float64 = struct
+  type t = {
+    ty : Why3.Ty.ty;
+    tysymbol : Why3.Ty.tysymbol;
+    add : Why3.Term.lsymbol; (* Float64.add *)
+    sub : Why3.Term.lsymbol; (* Float64.sub *)
+    mul : Why3.Term.lsymbol; (* Float64.mul *)
+    div : Why3.Term.lsymbol; (* Float64.div *)
+    neg : Why3.Term.lsymbol; (* Float64.neg *)
+    le : Why3.Term.lsymbol; (* Float64.le *)
+    ge : Why3.Term.lsymbol; (* Float64.ge *)
+    lt : Why3.Term.lsymbol; (* Float64.lt *)
+    gt : Why3.Term.lsymbol; (* Float64.gt *)
+    infix : infix;
+  }
+
+  and infix = {
+    plus : Why3.Term.lsymbol; (* Float64.( .+ ) *)
+    minus : Why3.Term.lsymbol; (* Float64.( .- ) *)
+    times : Why3.Term.lsymbol; (* Float64.( .* ) *)
+    divide : Why3.Term.lsymbol; (* Float64.( ./ ) *)
+    negation : Why3.Term.lsymbol; (* Float64.( .-_ ) *)
+  }
+
+  let create_t env =
+    let th = Why3.Env.read_theory env [ "ieee_float" ] "Float64" in
+    let tysymbol = Why3.Theory.ns_find_ts th.th_export [ "t" ] in
+    let ty = Why3.Ty.ty_app tysymbol [] in
+    let add = Why3.Theory.ns_find_ls th.th_export [ "add" ] in
+    let sub = Why3.Theory.ns_find_ls th.th_export [ "sub" ] in
+    let mul = Why3.Theory.ns_find_ls th.th_export [ "mul" ] in
+    let div = Why3.Theory.ns_find_ls th.th_export [ "div" ] in
+    let neg = Why3.Theory.ns_find_ls th.th_export [ "neg" ] in
+    let le = Why3.Theory.ns_find_ls th.th_export [ "le" ] in
+    let ge = Why3.Theory.ns_find_ls th.th_export [ "ge" ] in
+    let lt = Why3.Theory.ns_find_ls th.th_export [ "lt" ] in
+    let gt = Why3.Theory.ns_find_ls th.th_export [ "gt" ] in
+    let infix =
+      let plus =
+        Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_infix ".+" ]
+      in
+      let minus =
+        Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_infix ".-" ]
+      in
+      let times =
+        Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_infix ".*" ]
+      in
+      let divide =
+        Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_infix "./" ]
+      in
+      let negation =
+        Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_prefix ".-" ]
+      in
+      { plus; minus; times; divide; negation }
+    in
+    { ty; tysymbol; add; sub; mul; div; neg; le; ge; lt; gt; infix }
+
+  let create = Why3.Env.Wenv.memoize 10 (fun env -> create_t env)
+end
+
+module Real = struct
+  type t = {
+    ty : Why3.Ty.ty;
+    tysymbol : Why3.Ty.tysymbol;
+    add : Why3.Term.lsymbol;  (** Real.add *)
+    sub : Why3.Term.lsymbol;  (** Real.sub *)
+    mul : Why3.Term.lsymbol;  (** Real.mul *)
+    div : Why3.Term.lsymbol;  (** Real.div *)
+    neg : Why3.Term.lsymbol;  (** Real.neg *)
+    le : Why3.Term.lsymbol;  (** Real.le *)
+    ge : Why3.Term.lsymbol;  (** Real.ge *)
+    lt : Why3.Term.lsymbol;  (** Real.lt *)
+    gt : Why3.Term.lsymbol;  (** Real.gt *)
+  }
+
+  let create_t env =
+    let th = Why3.Env.read_theory env [ "real" ] "Real" in
+    let tysymbol = Why3.Ty.ts_real in
+    let ty = Why3.Ty.ty_app tysymbol [] in
+    let add = Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_infix "+" ] in
+    let sub = Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_infix "-" ] in
+    let mul = Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_infix "*" ] in
+    let div = Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_infix "/" ] in
+    let neg =
+      Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_prefix "-" ]
+    in
+    let le = Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_infix "<=" ] in
+    let ge = Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_infix ">=" ] in
+    let lt = Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_infix "<" ] in
+    let gt = Why3.Theory.ns_find_ls th.th_export [ Why3.Ident.op_infix ">" ] in
+    { ty; tysymbol; add; sub; mul; div; neg; le; ge; lt; gt }
+
+  let create = Why3.Env.Wenv.memoize 10 (fun env -> create_t env)
+end
diff --git a/src/transformations/symbols.mli b/src/transformations/symbols.mli
new file mode 100644
index 0000000000000000000000000000000000000000..e7c19c9e8232a57cd0761771bc0f3391d1751b1d
--- /dev/null
+++ b/src/transformations/symbols.mli
@@ -0,0 +1,66 @@
+(**************************************************************************)
+(*                                                                        *)
+(*  This file is part of CAISAR.                                          *)
+(*                                                                        *)
+(*  Copyright (C) 2023                                                    *)
+(*    CEA (Commissariat à l'énergie atomique et aux énergies              *)
+(*         alternatives)                                                  *)
+(*                                                                        *)
+(*  You can redistribute it and/or modify it under the terms of the GNU   *)
+(*  Lesser General Public License as published by the Free Software       *)
+(*  Foundation, version 2.1.                                              *)
+(*                                                                        *)
+(*  It is distributed in the hope that it will be useful,                 *)
+(*  but WITHOUT ANY WARRANTY; without even the implied warranty of        *)
+(*  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the          *)
+(*  GNU Lesser General Public License for more details.                   *)
+(*                                                                        *)
+(*  See the GNU Lesser General Public License version 2.1                 *)
+(*  for more details (enclosed in the file licenses/LGPLv2.1).            *)
+(*                                                                        *)
+(**************************************************************************)
+
+module Float64 : sig
+  type t = private {
+    ty : Why3.Ty.ty;
+    tysymbol : Why3.Ty.tysymbol;
+    add : Why3.Term.lsymbol;  (** Float64.add *)
+    sub : Why3.Term.lsymbol;  (** Float64.sub *)
+    mul : Why3.Term.lsymbol;  (** Float64.mul *)
+    div : Why3.Term.lsymbol;  (** Float64.div *)
+    neg : Why3.Term.lsymbol;  (** Float64.neg *)
+    le : Why3.Term.lsymbol;  (** Float64.le *)
+    ge : Why3.Term.lsymbol;  (** Float64.ge *)
+    lt : Why3.Term.lsymbol;  (** Float64.lt *)
+    gt : Why3.Term.lsymbol;  (** Float64.gt *)
+    infix : infix;
+  }
+
+  and infix = private {
+    plus : Why3.Term.lsymbol;  (** Float64.( .+ ) *)
+    minus : Why3.Term.lsymbol;  (** Float64.( .- ) *)
+    times : Why3.Term.lsymbol;  (** Float64.( .* ) *)
+    divide : Why3.Term.lsymbol;  (** Float64.( ./ ) *)
+    negation : Why3.Term.lsymbol;  (** Float64.( .-_ ) *)
+  }
+
+  val create : Why3.Env.env -> t
+end
+
+module Real : sig
+  type t = private {
+    ty : Why3.Ty.ty;
+    tysymbol : Why3.Ty.tysymbol;
+    add : Why3.Term.lsymbol;  (** Real.add *)
+    sub : Why3.Term.lsymbol;  (** Real.sub *)
+    mul : Why3.Term.lsymbol;  (** Real.mul *)
+    div : Why3.Term.lsymbol;  (** Real.div *)
+    neg : Why3.Term.lsymbol;  (** Real.neg *)
+    le : Why3.Term.lsymbol;  (** Real.le *)
+    ge : Why3.Term.lsymbol;  (** Real.ge *)
+    lt : Why3.Term.lsymbol;  (** Real.lt *)
+    gt : Why3.Term.lsymbol;  (** Real.gt *)
+  }
+
+  val create : Why3.Env.env -> t
+end
\ No newline at end of file
diff --git a/src/transformations/utils.ml b/src/transformations/utils.ml
index fa4e92e885701c8c8774baa0566a76b9dddf3027..8638ff6a954795b532fe0215164b883a7fae3b67 100644
--- a/src/transformations/utils.ml
+++ b/src/transformations/utils.ml
@@ -20,27 +20,287 @@
 (*                                                                        *)
 (**************************************************************************)
 
-open Why3
-
-let meta_input =
-  Theory.(
-    register_meta "caisar_input"
-      ~desc:"Indicates an input position among the inputs of the neural network"
-      [ MTlsymbol; MTint ])
-
-let meta_output =
-  Theory.(
-    register_meta "caisar_output"
-      ~desc:
-        "Indicates an output position among the outputs of the neural network"
-      [ MTlsymbol; MTint ])
-
-let meta_nn_filename =
-  Theory.(
-    register_meta_excl "caisar_nnet_or_onnx"
-      ~desc:"Indicates the filename of the neural network" [ MTstring ])
-
-let meta_dataset_filename =
-  Theory.(
-    register_meta_excl "caisar_dataset"
-      ~desc:"Indicates the filename of the dataset" [ MTstring ])
+open Base
+
+let src = Logs.Src.create "TransformationsUtils" ~doc:"Transformation utils"
+
+let float_of_real_constant rc =
+  let is_neg, rc =
+    ( Why3.(BigInt.lt rc.Number.rl_real.rv_sig BigInt.zero),
+      Why3.Number.abs_real rc )
+  in
+  let rc_str = Fmt.str "%a" Why3.Number.(print_real_constant full_support) rc in
+  let f = Float.of_string rc_str in
+  if is_neg then Float.neg f else f
+
+(* Collects input variables and their position inside input vectors. Such
+   process is memoized wrt lsymbols corresponding to input vectors. *)
+
+type position_input_vars =
+  (int Why3.Term.Mls.t
+  [@printer
+    fun fmt mls ->
+      Why3.(
+        Pp.print_iter2 Term.Mls.iter Pp.newline Pp.comma Pretty.print_ls Pp.int
+          fmt mls)])
+[@@deriving show]
+
+let input_vars : position_input_vars Why3.Trans.trans =
+  let hls = Why3.Term.Hls.create 13 in
+  let add index mls = function
+    | { Why3.Term.t_node = Tapp (ls, []); _ } -> Why3.Term.Mls.add ls index mls
+    | t ->
+      Logging.code_error ~src (fun m ->
+        m "Not an input variable: %a" Why3.Pretty.print_term t)
+  in
+  let rec do_collect mls t =
+    match t.Why3.Term.t_node with
+    | Why3.Term.Tapp
+        ( ls1 (* @@ *),
+          [
+            { t_node = Tapp (_ (* model *), _); _ };
+            { t_node = Tapp (ls2 (* input vector *), tl (* input vars *)); _ };
+          ] )
+      when String.equal ls1.ls_name.id_string (Why3.Ident.op_infix "@@") -> (
+      match Language.lookup_vector ls2 with
+      | Some vector_length ->
+        assert (vector_length = List.length tl);
+        if Why3.Term.Hls.mem hls ls2
+        then mls
+        else List.foldi ~init:mls ~f:add tl
+      | None -> mls)
+    | _ -> Why3.Term.t_fold do_collect mls t
+  in
+  Why3.Trans.fold_decl
+    (fun decl mls -> Why3.Decl.decl_fold do_collect mls decl)
+    Why3.Term.Mls.empty
+
+(* Collects input feature values (these are typically coming from a data point
+   of a data set).
+
+   Such process is done by visiting the task and retrieving the real constants
+   [rc] in expressions of the form [_ <= x .- rc] or [x .- rc <= _]. *)
+
+type features =
+  (Float.t Why3.Term.Mls.t
+  [@printer
+    fun fmt mls ->
+      Why3.(
+        Pp.print_iter2 Term.Mls.iter Pp.newline Pp.comma Pretty.print_ls
+          Pp.float fmt mls)])
+[@@deriving show]
+
+let collect_features env vars =
+  let th_f64 = Symbols.Float64.create env in
+  let rec do_collect features t =
+    match t.Why3.Term.t_node with
+    | Why3.Term.Tapp
+        ( ls_le (* less-equal *),
+          [
+            _ (* eps *);
+            ( {
+                t_node =
+                  Why3.Term.Tapp
+                    ( _ (* typically, .- *),
+                      [
+                        { t_node = Tapp (ls_var (* input var *), []); _ };
+                        { t_node = Tconst (ConstReal rc_feature_value); _ };
+                      ] );
+                _;
+              }
+            | {
+                t_node =
+                  Why3.Term.Tapp
+                    ( _ (* typically, sub *),
+                      [
+                        _ (* mode *);
+                        { t_node = Tapp (ls_var (* input var *), []); _ };
+                        { t_node = Tconst (ConstReal rc_feature_value); _ };
+                      ] );
+                _;
+              } );
+          ] )
+      when Why3.Term.ls_equal ls_le th_f64.le
+           && List.mem vars ls_var ~equal:Why3.Term.ls_equal ->
+      let feature_value = float_of_real_constant rc_feature_value in
+      Why3.Term.Mls.add ls_var feature_value features
+    | Why3.Term.Tapp
+        ( ls_le (* less-equal *),
+          [
+            ( {
+                t_node =
+                  Why3.Term.Tapp
+                    ( _ (* typically, .- *),
+                      [
+                        { t_node = Tapp (ls_var (* input var *), []); _ };
+                        { t_node = Tconst (ConstReal rc_feature_value); _ };
+                      ] );
+                _;
+              }
+            | {
+                t_node =
+                  Why3.Term.Tapp
+                    ( _ (* typically, sub *),
+                      [
+                        _ (* mode*);
+                        { t_node = Tapp (ls_var (* input var *), []); _ };
+                        { t_node = Tconst (ConstReal rc_feature_value); _ };
+                      ] );
+                _;
+              } );
+            _ (* eps *);
+          ] )
+      when Why3.Term.ls_equal ls_le th_f64.le
+           && List.mem vars ls_var ~equal:Why3.Term.ls_equal ->
+      let feature_value = float_of_real_constant rc_feature_value in
+      Why3.Term.Mls.add ls_var feature_value features
+    | _ -> Why3.Term.t_fold do_collect features t
+  in
+  Why3.Trans.fold_decl
+    (fun decl features -> Why3.Decl.decl_fold do_collect features decl)
+    Why3.Term.Mls.empty
+
+let input_features env ~vars : features Why3.Trans.trans =
+  Why3.Trans.bind (collect_features env vars) (fun features ->
+    Why3.Trans.return features)
+
+(* Collects input feature perturbations as lower and upper bounds.
+
+   Such process is done by visiting the task and retrieving the real constants
+   [rc] in expressions of the form [rc <= x] and [x <= rc] respectively. If more
+   than one lower (resp. upper) bound is present, the max (resp. min) is
+   considered.
+
+   Assumption: [inline_trivial] and [simplify_formula] transformations have been
+   performed beforehand. *)
+
+type interval = float option * float option [@@deriving show]
+
+type perturbation =
+  (interval Why3.Term.Mls.t
+  [@printer
+    fun fmt mls ->
+      Why3.(
+        Pp.print_iter2 Term.Mls.iter Pp.newline Pp.comma Pretty.print_ls
+          pp_interval fmt mls)])
+[@@deriving show]
+
+let collect_perturbation env vars =
+  let th_f64 = Symbols.Float64.create env in
+  let rec do_collect perturbation t =
+    match t.Why3.Term.t_node with
+    | Why3.Term.Tapp
+        ( ls_le (* less-equal *),
+          [
+            { t_node = Tconst (ConstReal rc_lower_bound); _ } (* lower bound *);
+            { t_node = Tapp (ls_var (* input var *), []); _ };
+          ] )
+      when Why3.Term.ls_equal ls_le th_f64.le
+           && List.mem vars ls_var ~equal:Why3.Term.ls_equal ->
+      let lower_bound = float_of_real_constant rc_lower_bound in
+      let interval =
+        match Why3.Term.Mls.find_opt ls_var perturbation with
+        | None -> (Some lower_bound, None)
+        | Some (current_lower_bound, current_upper_bound) ->
+          let lower_bound =
+            Option.value_map current_lower_bound ~default:lower_bound
+              ~f:(Float.max lower_bound)
+          in
+          (Some lower_bound, current_upper_bound)
+      in
+      Why3.Term.Mls.add ls_var interval perturbation
+    | Why3.Term.Tapp
+        ( ls_le (* less-equal *),
+          [
+            { t_node = Tapp (ls_var (* input var *), []); _ };
+            { t_node = Tconst (ConstReal rc_upper_bound); _ } (* upper bound *);
+          ] )
+      when Why3.Term.ls_equal ls_le th_f64.le
+           && List.mem vars ls_var ~equal:Why3.Term.ls_equal ->
+      let upper_bound = float_of_real_constant rc_upper_bound in
+      let interval =
+        match Why3.Term.Mls.find_opt ls_var perturbation with
+        | None -> (Some upper_bound, None)
+        | Some (current_lower_bound, current_upper_bound) ->
+          let upper_bound =
+            Option.value_map current_upper_bound ~default:upper_bound
+              ~f:(Float.min upper_bound)
+          in
+          (current_lower_bound, Some upper_bound)
+      in
+      Why3.Term.Mls.add ls_var interval perturbation
+    | _ -> Why3.Term.t_fold do_collect perturbation t
+  in
+  Why3.Trans.fold_decl
+    (fun decl perturbation -> Why3.Decl.decl_fold do_collect perturbation decl)
+    Why3.Term.Mls.empty
+
+let input_perturbations env ~vars : perturbation Why3.Trans.trans =
+  Why3.Trans.bind (collect_perturbation env vars) (fun perturbation ->
+    Why3.Trans.return perturbation)
+
+(* Collects the output (classification) label (this is typically associated to
+   the feature values coming from a data point of a data set).
+
+   Such process is done by visiting the task and retrieving the vector index
+   (typically, appearing inside lsymbol [_]) that appears the most in
+   expressions of the form [(model @@ vector)[i1] <= (model @@ vector)[i2]]. *)
+
+type label = int [@@deriving show]
+
+let collect_label env =
+  let th_f64 = Symbols.Float64.create env in
+  let rec do_collect label t =
+    match t.Why3.Term.t_node with
+    | Why3.Term.Tapp
+        ( ls_le (* less-equal *),
+          [
+            {
+              t_node =
+                Why3.Term.Tapp
+                  ( ls_get1 (* [_] *),
+                    [
+                      ({ t_node = Tapp (ls_atat1 (* @@ *), _); _ } as _t1);
+                      ({ t_node = Tconst (ConstInt index1); _ } as _t2);
+                    ] );
+              _;
+            };
+            {
+              t_node =
+                Why3.Term.Tapp
+                  ( ls_get2 (* [_] *),
+                    [
+                      ({ t_node = Tapp (ls_atat2 (* @@ *), _); _ } as _t3);
+                      ({ t_node = Tconst (ConstInt index2); _ } as _t4);
+                    ] );
+              _;
+            };
+          ] )
+      when Why3.Term.ls_equal ls_le th_f64.le
+           && String.equal ls_get1.ls_name.id_string (Why3.Ident.op_get "")
+           && String.equal ls_atat1.ls_name.id_string (Why3.Ident.op_infix "@@")
+           && String.equal ls_get2.ls_name.id_string (Why3.Ident.op_get "")
+           && String.equal ls_atat2.ls_name.id_string (Why3.Ident.op_infix "@@")
+      -> (
+      let index2 = Why3.Number.to_small_integer index2 in
+      match label with
+      | None -> Some index2
+      | Some label ->
+        let index1 = Why3.Number.to_small_integer index1 in
+        if label = index2 || label = index1
+        then if label = index2 then Some label else Some index1
+        else
+          Logging.user_error (fun m ->
+            m "Cannot determine (classification) label"))
+    | _ -> Why3.Term.t_fold do_collect label t
+  in
+  Why3.Trans.fold
+    (fun task_hd label ->
+      match task_hd.Why3.Task.task_decl.td_node with
+      | Why3.Theory.Decl ({ d_node = Dprop (Pgoal, _, _); _ } as decl) ->
+        Why3.Decl.decl_fold do_collect label decl
+      | _ -> label)
+    None
+
+let output_label env : label option Why3.Trans.trans =
+  Why3.Trans.bind (collect_label env) (fun label -> Why3.Trans.return label)
diff --git a/src/transformations/utils.mli b/src/transformations/utils.mli
index 1f40cfc3b7917e6f0ea2e24cf379de2cee967624..63ad5a212a63e7d65c2f9cef1d79f62425a5e8d7 100644
--- a/src/transformations/utils.mli
+++ b/src/transformations/utils.mli
@@ -20,16 +20,26 @@
 (*                                                                        *)
 (**************************************************************************)
 
-open Why3
+type position_input_vars = int Why3.Term.Mls.t [@@deriving show]
+(** Map input variable lsymbols to corresponding position in input vectors. *)
 
-val meta_input : Theory.meta
-(** Indicates an input position among the inputs of the neural network. *)
+type features = Float.t Why3.Term.Mls.t [@@deriving show]
+(** Input feature values. *)
 
-val meta_output : Theory.meta
-(** Indicates an output position among the outputs of the neural network. *)
+type perturbation = interval Why3.Term.Mls.t [@@deriving show]
 
-val meta_nn_filename : Theory.meta
-(** The filename of the neural network. *)
+and interval = float option * float option [@@deriving show]
+(** Perturbation interval for each input variable: lower and uppper bounds. *)
 
-val meta_dataset_filename : Theory.meta
-(** The filename of the dataset. *)
+type label = int [@@deriving show]
+(** Output label. *)
+
+val input_vars : position_input_vars Why3.Trans.trans
+
+val input_features :
+  Why3.Env.env -> vars:Why3.Term.lsymbol list -> features Why3.Trans.trans
+
+val input_perturbations :
+  Why3.Env.env -> vars:Why3.Term.lsymbol list -> perturbation Why3.Trans.trans
+
+val output_label : Why3.Env.env -> label option Why3.Trans.trans
diff --git a/src/transformations/vars_on_lhs.ml b/src/transformations/vars_on_lhs.ml
deleted file mode 100644
index 684a6612ba6bd8f23a84f890b40077bd41dadd8a..0000000000000000000000000000000000000000
--- a/src/transformations/vars_on_lhs.ml
+++ /dev/null
@@ -1,95 +0,0 @@
-(**************************************************************************)
-(*                                                                        *)
-(*  This file is part of CAISAR.                                          *)
-(*                                                                        *)
-(*  Copyright (C) 2023                                                    *)
-(*    CEA (Commissariat à l'énergie atomique et aux énergies              *)
-(*         alternatives)                                                  *)
-(*                                                                        *)
-(*  You can redistribute it and/or modify it under the terms of the GNU   *)
-(*  Lesser General Public License as published by the Free Software       *)
-(*  Foundation, version 2.1.                                              *)
-(*                                                                        *)
-(*  It is distributed in the hope that it will be useful,                 *)
-(*  but WITHOUT ANY WARRANTY; without even the implied warranty of        *)
-(*  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the          *)
-(*  GNU Lesser General Public License for more details.                   *)
-(*                                                                        *)
-(*  See the GNU Lesser General Public License version 2.1                 *)
-(*  for more details (enclosed in the file licenses/LGPLv2.1).            *)
-(*                                                                        *)
-(**************************************************************************)
-
-open Why3
-open Base
-
-let make_rt env =
-  let th = Env.read_theory env [ "ieee_float" ] "Float64" in
-  let t = Theory.ns_find_ts th.th_export [ "t" ] in
-  let le_float = Theory.ns_find_ls th.th_export [ "le" ] in
-  let lt_float = Theory.ns_find_ls th.th_export [ "lt" ] in
-  let ge_float = Theory.ns_find_ls th.th_export [ "ge" ] in
-  let gt_float = Theory.ns_find_ls th.th_export [ "gt" ] in
-  let th = Env.read_theory env [ "real" ] "Real" in
-  let le_real = Theory.ns_find_ls th.th_export [ Ident.op_infix "<=" ] in
-  let lt_real = Theory.ns_find_ls th.th_export [ Ident.op_infix "<" ] in
-  let ge_real = Theory.ns_find_ls th.th_export [ Ident.op_infix ">=" ] in
-  let gt_real = Theory.ns_find_ls th.th_export [ Ident.op_infix ">" ] in
-  let rec rt t =
-    let t = Term.t_map rt t in
-    match t.t_node with
-    | Tapp
-        ( ls,
-          [
-            ({ t_node = Tconst _; _ } as const);
-            ({ t_node = Tapp (_, []); _ } as var);
-          ] ) ->
-      let tt = [ var; const ] in
-      let ls_rel =
-        if Term.ls_equal ls le_float
-        then ge_float
-        else if Term.ls_equal ls ge_float
-        then le_float
-        else if Term.ls_equal ls lt_float
-        then gt_float
-        else if Term.ls_equal ls gt_float
-        then lt_float
-        else ls
-      in
-      let ls_rel =
-        if Term.ls_equal ls le_real
-        then ge_real
-        else if Term.ls_equal ls ge_real
-        then le_real
-        else if Term.ls_equal ls lt_real
-        then gt_real
-        else if Term.ls_equal ls gt_real
-        then lt_real
-        else ls_rel
-      in
-      if Term.ls_equal ls_rel ls then t else Term.ps_app ls_rel tt
-    | _ -> t
-  in
-  let task =
-    List.fold
-      [ le_float; lt_float; ge_float; gt_float ]
-      ~init:(Task.add_ty_decl None t) ~f:Task.add_param_decl
-  in
-  let task =
-    List.fold
-      [ le_real; lt_real; ge_real; gt_real ]
-      ~init:(Task.add_ty_decl task Ty.ts_real)
-      ~f:Task.add_param_decl
-  in
-  (rt, task)
-
-let vars_on_lhs env =
-  let rt, task = make_rt env in
-  Trans.rewrite rt task
-
-let init () =
-  Trans.register_env_transform
-    ~desc:
-      "Transformation for provers that need variables on the left-hand-side of \
-       logic symbols."
-    "vars_on_lhs" vars_on_lhs
diff --git a/src/verification.ml b/src/verification.ml
index c1041ffddf9b9f03b2f76fd8efcb3eb8c9435c15..1c29e26bb0ccf000d2b3232be6727d49ecb1ea48 100644
--- a/src/verification.ml
+++ b/src/verification.ml
@@ -92,8 +92,8 @@ let create_env loadpath =
 
 let write_nier_as_onnx onnx_out_dir =
   Language.iter_nn (fun ls nn ->
-    match nn.nn_nier with
-    | Some nn_nier -> (
+    match nn.nn_format with
+    | ONNX (Some nn_nier) -> (
       try
         if not (Stdlib.Sys.file_exists onnx_out_dir)
         then Stdlib.Sys.mkdir onnx_out_dir 0o755;
@@ -108,12 +108,12 @@ let write_nier_as_onnx onnx_out_dir =
         Logging.user_error (fun m ->
           m "@[Cannot write NIER as ONNX in folder '%s': '%s'@]" onnx_out_dir
             msg))
-    | None ->
+    | _ ->
       Logs.warn (fun m ->
         m "@[No available NIER to write as ONNX for logic symbol '%a'@]"
           Pretty.print_ls ls))
 
-let answer_saver limit config env config_prover dataset task =
+let answer_saver_on_dataset limit config env config_prover ~dataset task =
   let dataset_predicate =
     let on_model ls =
       Option.value_or_thunk
@@ -123,18 +123,17 @@ let answer_saver limit config env config_prover dataset task =
         (Language.lookup_loaded_svms ls)
     in
     let on_dataset _ls =
-      match dataset with
-      | None -> invalid_arg "No dataset provided"
-      | Some filename ->
-        if String.is_suffix filename ~suffix:".csv"
-        then filename
-        else
-          invalid_arg
-            (Fmt.str "File '%s' has an unsupported extension" filename)
+      if String.is_suffix dataset ~suffix:".csv"
+      then dataset
+      else
+        invalid_arg (Fmt.str "File '%s' has an unsupported extension" dataset)
     in
     Dataset.interpret_predicate env ~on_model ~on_dataset task
   in
-  let answer = Saver.call_prover limit config config_prover dataset_predicate in
+  let answer =
+    Saver.call_prover_on_dataset_predicate limit config config_prover
+      dataset_predicate
+  in
   match answer.prover_answer with
   | Call_provers.Unknown "" ->
     let additional_info = Fmt.str "%d/%d" answer.nb_proved answer.nb_total in
@@ -184,13 +183,10 @@ let build_command config_prover ~nn_filename =
   in
   Re__Core.replace_string re_config ~by:config_site command
 
-let log_prover_task driver task =
-  let print fmt task = ignore (Driver.print_task_prepared driver fmt task) in
-  Logs.debug ~src:Logging.src_prover_spec (fun m ->
-    m "@[Prover-tailored specification:@.%a@]" print task)
-
 let call_prover_on_task limit config command driver task =
-  log_prover_task driver task;
+  Logs.debug ~src:Logging.src_prover_spec (fun m ->
+    let print fmt task = ignore (Driver.print_task_prepared driver fmt task) in
+    m "Prover-tailored specification:@.%a" print task);
   let prover_call =
     Driver.prove_task_prepared ~command ~config ~limit driver task
   in
@@ -266,6 +262,67 @@ let answer_dataset limit config env prover config_prover driver dataset task =
   in
   (prover_answer, additional_info)
 
+let answer_saver limit config env config_prover ~proof_strategy task =
+  let tasks = proof_strategy task in
+  let answers =
+    List.map tasks ~f:(fun task ->
+      let svm, abstraction =
+        match Task.on_meta_excl Meta.meta_svm_filename task with
+        | None -> Logging.user_error (fun m -> m "No SVM model found")
+        | Some [ MAstr svm_filename; MAstr svm_abstraction ] ->
+          (svm_filename, Language.svm_abstraction_of_string svm_abstraction)
+        | Some _ -> assert false (* By construction of the meta. *)
+      in
+      let vars = Why3.Term.Mls.keys (Trans.apply Utils.input_vars task) in
+      let dataset : Csv.t =
+        let features = Trans.apply (Utils.input_features env ~vars) task in
+        let features =
+          List.map vars ~f:(fun v ->
+            match Why3.Term.Mls.find_opt v features with
+            | None ->
+              Logging.user_error (fun m ->
+                m "Cannot find feature for input variable '%a'"
+                  Why3.Pretty.print_ls v)
+            | Some feature -> Float.to_string feature)
+        in
+        let label = Trans.apply (Utils.output_label env) task in
+        let label =
+          match label with
+          | None ->
+            Logging.user_error (fun m -> m "Cannot find (classification) label")
+          | Some label -> Int.to_string label
+        in
+        let vector = label :: features in
+        let header = [ Fmt.str "# 1 %d" (List.length features) ] in
+        [ header; vector ]
+      in
+      let perturbations =
+        let perturbation =
+          Trans.apply (Trans.seq [ Inlining.trivial; Relop.simplify env ]) task
+          |> Trans.apply (Utils.input_perturbations env ~vars)
+        in
+        List.map vars ~f:(fun v ->
+          match Why3.Term.Mls.find_opt v perturbation with
+          | None ->
+            Logging.user_error (fun m ->
+              m "Cannot find input perturbation for variable '%a'"
+                Why3.Pretty.print_ls v)
+          | Some (None, _ | _, None) ->
+            Logging.user_error (fun m ->
+              m "Partial input perturbation for variable '%a'"
+                Why3.Pretty.print_ls v)
+          | Some (Some lower, Some upper) -> (lower, upper))
+      in
+      let saver_answer =
+        Saver.call_prover limit config config_prover ?abstraction ~svm ~dataset
+          ~perturbations ()
+      in
+      saver_answer.prover_answer)
+  in
+  let prover_answer = combine_prover_answers answers in
+  let additional_info = Generic None in
+  (prover_answer, additional_info)
+
 let answer_generic limit config prover config_prover driver ~proof_strategy task
     =
   let tasks = proof_strategy task in
@@ -273,10 +330,11 @@ let answer_generic limit config prover config_prover driver ~proof_strategy task
     List.concat_map tasks ~f:(fun task ->
       let task = Driver.prepare_task driver task in
       let nn_filename =
-        match Task.on_meta_excl Utils.meta_nn_filename task with
+        match Task.on_meta_excl Meta.meta_nn_filename task with
+        | None ->
+          Logging.user_error (fun m -> m "No neural network model found")
         | Some [ MAstr nn_filename ] -> nn_filename
         | Some _ -> assert false (* By construction of the meta. *)
-        | None -> invalid_arg "No neural network model found in task"
       in
       let tasks =
         (* Turn [task] into a list (ie, conjunction) of disjunctions of
@@ -294,22 +352,28 @@ let answer_generic limit config prover config_prover driver ~proof_strategy task
   (prover_answer, additional_info)
 
 let call_prover ~cwd ~limit config env prover config_prover driver ?dataset
-  def_constants task =
+  definitions task =
   let prover_answer, additional_info =
     match prover with
-    | Prover.Saver -> answer_saver limit config env config_prover dataset task
+    | Prover.Saver when Option.is_some dataset ->
+      let dataset = Option.value_exn dataset in
+      answer_saver_on_dataset limit config env config_prover ~dataset task
+    | Saver ->
+      let task = Interpreter.interpret_task ~cwd env ~definitions task in
+      let proof_strategy = Proof_strategy.apply_svm_prover_strategy in
+      answer_saver limit config env config_prover ~proof_strategy task
     | Aimos -> answer_aimos limit config env config_prover dataset task
     | (Marabou | Pyrat | Nnenum | ABCrown) when Option.is_some dataset ->
       let dataset = Unix.realpath (Option.value_exn dataset) in
       answer_dataset limit config env prover config_prover driver dataset task
     | Marabou | Pyrat | Nnenum | ABCrown ->
-      let task = Interpretation.interpret_task ~cwd env ~def_constants task in
-      let proof_strategy = Proof_strategy.apply_native_nn_prover in
+      let task = Interpreter.interpret_task ~cwd env ~definitions task in
+      let proof_strategy = Proof_strategy.apply_native_nn_prover_strategy in
       answer_generic limit config prover config_prover driver ~proof_strategy
         task
     | CVC5 ->
-      let task = Interpretation.interpret_task ~cwd env ~def_constants task in
-      let proof_strategy = Proof_strategy.apply_classic_prover env in
+      let task = Interpreter.interpret_task ~cwd env ~definitions task in
+      let proof_strategy = Proof_strategy.apply_classic_prover_strategy env in
       answer_generic limit config prover config_prover driver ~proof_strategy
         task
   in
@@ -351,7 +415,7 @@ let tasks_of_theory ~theories ~goals theory =
         List.exists goals_of_theory ~f:(String.equal task_goal_id))
 
 let verify ?format ~loadpath ?memlimit ?timelimit ?dataset prover ?prover_altern
-  ?(def_constants = []) ?(theories = []) ?(goals = []) ?onnx_out_dir file =
+  ?(definitions = []) ?(theories = []) ?(goals = []) ?onnx_out_dir file =
   let debug = Logging.(is_debug_level src_prover_call) in
   (if debug then Debug.(set_flag (lookup_flag "call_prover")));
   let env, config = create_env loadpath in
@@ -383,19 +447,19 @@ let verify ?format ~loadpath ?memlimit ?timelimit ?dataset prover ?prover_altern
       Whyconf.(filter_one_prover config (mk_filter_prover ~altern prover))
     with
     | Whyconf.ProverNotFound _ ->
-      invalid_arg
-        (Fmt.str "No prover corresponds to %s%a" prover
-           (if String.equal altern ""
-            then Fmt.nop
-            else Fmt.(any " " ++ parens string))
-           altern)
+      Logging.user_error (fun m ->
+        m "No prover corresponds to %s%a" prover
+          (if String.equal altern ""
+           then Fmt.nop
+           else Fmt.(any " " ++ parens string))
+          altern)
     | Whyconf.ProverAmbiguity _ ->
-      invalid_arg
-        (Fmt.str "More than one prover corresponds to %s%a" prover
-           (if String.equal altern ""
-            then Fmt.nop
-            else Fmt.(any " " ++ parens string))
-           altern)
+      Logging.user_error (fun m ->
+        m "More than one prover corresponds to %s%a" prover
+          (if String.equal altern ""
+           then Fmt.nop
+           else Fmt.(any " " ++ parens string))
+          altern)
   in
   let driver =
     Re__Core.replace_string re_config ~by:config_site config_prover.driver
@@ -415,7 +479,7 @@ let verify ?format ~loadpath ?memlimit ?timelimit ?dataset prover ?prover_altern
         List.map
           ~f:
             (call_prover ~cwd ~limit main env prover config_prover driver
-               ?dataset def_constants)
+               ?dataset definitions)
           tasks)
       mstr_theory
   in
diff --git a/src/verification.mli b/src/verification.mli
index d65b7e1b1ec83c5b8a5155725a2cd521ac4cd225..935f619fade3bd23f0cf2704bee4150fc0b61551 100644
--- a/src/verification.mli
+++ b/src/verification.mli
@@ -52,7 +52,7 @@ val verify :
   ?dataset:string ->
   Prover.t ->
   ?prover_altern:string ->
-  ?def_constants:(string * string) list ->
+  ?definitions:(string * string) list ->
   ?theories:string list ->
   ?goals:(string * string list) list ->
   ?onnx_out_dir:string ->
@@ -69,7 +69,7 @@ val verify :
       is the filepath of a dataset (eg, in CSV format) possibly appearing in
       [file].
     @param prover_altern is the alternative [prover] configuration.
-    @param def_constants is a key:value list defining toplevel constants.
+    @param definitions is a key:value list defining toplevel constants.
     @param theories
       identifies the theories whose goals are the only ones to verify.
     @param goals
diff --git a/stdlib/caisar/model.mlw b/stdlib/caisar/model.mlw
index 2fff26737dc3969e364ecac9bcfe7413ffc0edad..c9643ca058dd261c49937706ce053515a6f98231 100644
--- a/stdlib/caisar/model.mlw
+++ b/stdlib/caisar/model.mlw
@@ -45,4 +45,18 @@ theory NN
   use Model
   clone export Model with type model 'kind = model nn
 
-end
\ No newline at end of file
+end
+
+(** {2 Support Vector Machines} *)
+
+theory SVM
+
+  type svm
+  type abstraction = Interval | Raf | Hybrid
+
+  use Model
+  clone export Model with type kind = svm, type model 'kind = model svm
+
+  function read_model_with_abstraction (n: string) (a: abstraction) : model svm
+
+end
diff --git a/tests/bin/saver b/tests/bin/saver
index 012a62a7bb831fc4a90e2eb683195dacb35e1901..d666762230e7bbc1d3427f5c70364f12d4b01cc8 100644
--- a/tests/bin/saver
+++ b/tests/bin/saver
@@ -9,5 +9,13 @@ case $1 in
         echo "SVM: $1"
         echo "Goal:"
         cat $2
+        echo "abstraction: $3"
+        echo "perturbation: $4"
+        if [ "from_file" = "$4" ]; then
+          echo "hyperrectangle:"
+          cat $5
+        else
+          echo "eps: $5"
+        fi;
         echo "[SUMMARY] 2 8 0.017000 1 2 0"
 esac
diff --git a/tests/dataset.t b/tests/dataset.t
index 5a67f956a11709fe7c35f0a790d62427da0fb915..19eb49462e2ffa02b0a1d90e1d0b3f0aa5ca8f25 100644
--- a/tests/dataset.t
+++ b/tests/dataset.t
@@ -130,583 +130,13 @@ Test verify on dataset
   (declare-const Y_4 Real)
   
   ;; Goal H
-  (assert (>= Y_0 Y_1))
-  
-  [DEBUG]{ProverSpec} Prover-tailored specification:
-  ;;; produced by PyRAT/VNN-LIB driver
-  ;;; produced by VNN-LIB driver
-  ;; X_0
-  (declare-const X_0 Real)
-  
-  ;; X_1
-  (declare-const X_1 Real)
-  
-  ;; X_2
-  (declare-const X_2 Real)
-  
-  ;; X_3
-  (declare-const X_3 Real)
-  
-  ;; X_4
-  (declare-const X_4 Real)
-  
-  ;; H
-  (assert (>= X_0 0.0))
-  
-  ;; H
-  (assert (<= X_0 1.0))
-  
-  ;; H
-  (assert (>= X_1 0.0))
-  
-  ;; H
-  (assert (<= X_1 1.0))
-  
-  ;; H
-  (assert (>= X_2 0.0))
-  
-  ;; H
-  (assert (<= X_2 1.0))
-  
-  ;; H
-  (assert (>= X_3 0.0))
-  
-  ;; H
-  (assert (<= X_3 1.0))
-  
-  ;; H
-  (assert (>= X_4 0.0))
-  
-  ;; H
-  (assert (<= X_4 1.0))
-  
-  ;; H
-  (assert (>= X_0 -0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (<= X_0 0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (>= X_1 0.9899999999999999911182158029987476766109466552734375))
-  
-  ;; H
-  (assert (<= X_1 1.0100000000000000088817841970012523233890533447265625))
-  
-  ;; H
-  (assert (>= X_2 0.77431372499999995273611830270965583622455596923828125))
-  
-  ;; H
-  (assert (<= X_2 0.79431372499999997049968669671216048300266265869140625))
-  
-  ;; H
-  (assert (>= X_3 0.00960784299999999959196461674082456738688051700592041015625))
-  
-  ;; H
-  (assert (<= X_3 0.029607843000000001743021726952065364457666873931884765625))
-  
-  ;; H
-  (assert (>= X_4 0.7664705880000000082219457908649928867816925048828125))
-  
-  ;; H
-  (assert (<= X_4 0.7864705880000000259855141848674975335597991943359375))
-  
-  ;; Y_0
-  (declare-const Y_0 Real)
-  
-  ;; Y_1
-  (declare-const Y_1 Real)
-  
-  ;; Y_2
-  (declare-const Y_2 Real)
-  
-  ;; Y_3
-  (declare-const Y_3 Real)
-  
-  ;; Y_4
-  (declare-const Y_4 Real)
-  
-  ;; Goal H
-  (assert (>= Y_2 Y_1))
-  
-  [DEBUG]{ProverSpec} Prover-tailored specification:
-  ;;; produced by PyRAT/VNN-LIB driver
-  ;;; produced by VNN-LIB driver
-  ;; X_0
-  (declare-const X_0 Real)
-  
-  ;; X_1
-  (declare-const X_1 Real)
-  
-  ;; X_2
-  (declare-const X_2 Real)
-  
-  ;; X_3
-  (declare-const X_3 Real)
-  
-  ;; X_4
-  (declare-const X_4 Real)
-  
-  ;; H
-  (assert (>= X_0 0.0))
-  
-  ;; H
-  (assert (<= X_0 1.0))
-  
-  ;; H
-  (assert (>= X_1 0.0))
-  
-  ;; H
-  (assert (<= X_1 1.0))
-  
-  ;; H
-  (assert (>= X_2 0.0))
-  
-  ;; H
-  (assert (<= X_2 1.0))
-  
-  ;; H
-  (assert (>= X_3 0.0))
-  
-  ;; H
-  (assert (<= X_3 1.0))
-  
-  ;; H
-  (assert (>= X_4 0.0))
-  
-  ;; H
-  (assert (<= X_4 1.0))
-  
-  ;; H
-  (assert (>= X_0 -0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (<= X_0 0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (>= X_1 0.9899999999999999911182158029987476766109466552734375))
-  
-  ;; H
-  (assert (<= X_1 1.0100000000000000088817841970012523233890533447265625))
-  
-  ;; H
-  (assert (>= X_2 0.77431372499999995273611830270965583622455596923828125))
-  
-  ;; H
-  (assert (<= X_2 0.79431372499999997049968669671216048300266265869140625))
-  
-  ;; H
-  (assert (>= X_3 0.00960784299999999959196461674082456738688051700592041015625))
-  
-  ;; H
-  (assert (<= X_3 0.029607843000000001743021726952065364457666873931884765625))
-  
-  ;; H
-  (assert (>= X_4 0.7664705880000000082219457908649928867816925048828125))
-  
-  ;; H
-  (assert (<= X_4 0.7864705880000000259855141848674975335597991943359375))
-  
-  ;; Y_0
-  (declare-const Y_0 Real)
-  
-  ;; Y_1
-  (declare-const Y_1 Real)
-  
-  ;; Y_2
-  (declare-const Y_2 Real)
-  
-  ;; Y_3
-  (declare-const Y_3 Real)
-  
-  ;; Y_4
-  (declare-const Y_4 Real)
-  
-  ;; Goal H
-  (assert (>= Y_3 Y_1))
-  
-  [DEBUG]{ProverSpec} Prover-tailored specification:
-  ;;; produced by PyRAT/VNN-LIB driver
-  ;;; produced by VNN-LIB driver
-  ;; X_0
-  (declare-const X_0 Real)
-  
-  ;; X_1
-  (declare-const X_1 Real)
-  
-  ;; X_2
-  (declare-const X_2 Real)
-  
-  ;; X_3
-  (declare-const X_3 Real)
-  
-  ;; X_4
-  (declare-const X_4 Real)
-  
-  ;; H
-  (assert (>= X_0 0.0))
-  
-  ;; H
-  (assert (<= X_0 1.0))
-  
-  ;; H
-  (assert (>= X_1 0.0))
-  
-  ;; H
-  (assert (<= X_1 1.0))
-  
-  ;; H
-  (assert (>= X_2 0.0))
-  
-  ;; H
-  (assert (<= X_2 1.0))
-  
-  ;; H
-  (assert (>= X_3 0.0))
-  
-  ;; H
-  (assert (<= X_3 1.0))
-  
-  ;; H
-  (assert (>= X_4 0.0))
-  
-  ;; H
-  (assert (<= X_4 1.0))
-  
-  ;; H
-  (assert (>= X_0 -0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (<= X_0 0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (>= X_1 0.9899999999999999911182158029987476766109466552734375))
-  
-  ;; H
-  (assert (<= X_1 1.0100000000000000088817841970012523233890533447265625))
-  
-  ;; H
-  (assert (>= X_2 0.77431372499999995273611830270965583622455596923828125))
-  
-  ;; H
-  (assert (<= X_2 0.79431372499999997049968669671216048300266265869140625))
-  
-  ;; H
-  (assert (>= X_3 0.00960784299999999959196461674082456738688051700592041015625))
-  
-  ;; H
-  (assert (<= X_3 0.029607843000000001743021726952065364457666873931884765625))
-  
-  ;; H
-  (assert (>= X_4 0.7664705880000000082219457908649928867816925048828125))
-  
-  ;; H
-  (assert (<= X_4 0.7864705880000000259855141848674975335597991943359375))
-  
-  ;; Y_0
-  (declare-const Y_0 Real)
-  
-  ;; Y_1
-  (declare-const Y_1 Real)
-  
-  ;; Y_2
-  (declare-const Y_2 Real)
-  
-  ;; Y_3
-  (declare-const Y_3 Real)
-  
-  ;; Y_4
-  (declare-const Y_4 Real)
-  
-  ;; Goal H
-  (assert (>= Y_4 Y_1))
-  
-  [DEBUG]{ProverSpec} Prover-tailored specification:
-  ;;; produced by PyRAT/VNN-LIB driver
-  ;;; produced by VNN-LIB driver
-  ;; X_0
-  (declare-const X_0 Real)
-  
-  ;; X_1
-  (declare-const X_1 Real)
-  
-  ;; X_2
-  (declare-const X_2 Real)
-  
-  ;; X_3
-  (declare-const X_3 Real)
-  
-  ;; X_4
-  (declare-const X_4 Real)
-  
-  ;; H
-  (assert (>= X_0 0.0))
-  
-  ;; H
-  (assert (<= X_0 1.0))
-  
-  ;; H
-  (assert (>= X_1 0.0))
-  
-  ;; H
-  (assert (<= X_1 1.0))
-  
-  ;; H
-  (assert (>= X_2 0.0))
-  
-  ;; H
-  (assert (<= X_2 1.0))
-  
-  ;; H
-  (assert (>= X_3 0.0))
-  
-  ;; H
-  (assert (<= X_3 1.0))
-  
-  ;; H
-  (assert (>= X_4 0.0))
-  
-  ;; H
-  (assert (<= X_4 1.0))
-  
-  ;; H
-  (assert (>= X_0 0.93117647100000000559560930923908017575740814208984375))
-  
-  ;; H
-  (assert (<= X_0 0.95117647100000002335917770324158482253551483154296875))
-  
-  ;; H
-  (assert (>= X_1 -0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (<= X_1 0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (>= X_2 -0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (<= X_2 0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (>= X_3 0.00176470599999999956664087363833459676243364810943603515625))
-  
-  ;; H
-  (assert (<= X_3 0.021764706000000001717697983849575393833220005035400390625))
-  
-  ;; H
-  (assert (>= X_4 0.029215685999999997657372574622058891691267490386962890625))
-  
-  ;; H
-  (assert (<= X_4 0.049215686000000001543153160810106783173978328704833984375))
-  
-  ;; Y_0
-  (declare-const Y_0 Real)
-  
-  ;; Y_1
-  (declare-const Y_1 Real)
-  
-  ;; Y_2
-  (declare-const Y_2 Real)
-  
-  ;; Y_3
-  (declare-const Y_3 Real)
-  
-  ;; Y_4
-  (declare-const Y_4 Real)
-  
-  ;; Goal H
-  (assert (>= Y_1 Y_0))
-  
-  [DEBUG]{ProverSpec} Prover-tailored specification:
-  ;;; produced by PyRAT/VNN-LIB driver
-  ;;; produced by VNN-LIB driver
-  ;; X_0
-  (declare-const X_0 Real)
-  
-  ;; X_1
-  (declare-const X_1 Real)
-  
-  ;; X_2
-  (declare-const X_2 Real)
-  
-  ;; X_3
-  (declare-const X_3 Real)
-  
-  ;; X_4
-  (declare-const X_4 Real)
-  
-  ;; H
-  (assert (>= X_0 0.0))
-  
-  ;; H
-  (assert (<= X_0 1.0))
-  
-  ;; H
-  (assert (>= X_1 0.0))
-  
-  ;; H
-  (assert (<= X_1 1.0))
-  
-  ;; H
-  (assert (>= X_2 0.0))
-  
-  ;; H
-  (assert (<= X_2 1.0))
-  
-  ;; H
-  (assert (>= X_3 0.0))
-  
-  ;; H
-  (assert (<= X_3 1.0))
-  
-  ;; H
-  (assert (>= X_4 0.0))
-  
-  ;; H
-  (assert (<= X_4 1.0))
-  
-  ;; H
-  (assert (>= X_0 0.93117647100000000559560930923908017575740814208984375))
-  
-  ;; H
-  (assert (<= X_0 0.95117647100000002335917770324158482253551483154296875))
-  
-  ;; H
-  (assert (>= X_1 -0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (<= X_1 0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (>= X_2 -0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (<= X_2 0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (>= X_3 0.00176470599999999956664087363833459676243364810943603515625))
-  
-  ;; H
-  (assert (<= X_3 0.021764706000000001717697983849575393833220005035400390625))
-  
-  ;; H
-  (assert (>= X_4 0.029215685999999997657372574622058891691267490386962890625))
-  
-  ;; H
-  (assert (<= X_4 0.049215686000000001543153160810106783173978328704833984375))
-  
-  ;; Y_0
-  (declare-const Y_0 Real)
-  
-  ;; Y_1
-  (declare-const Y_1 Real)
-  
-  ;; Y_2
-  (declare-const Y_2 Real)
-  
-  ;; Y_3
-  (declare-const Y_3 Real)
-  
-  ;; Y_4
-  (declare-const Y_4 Real)
-  
-  ;; Goal H
-  (assert (>= Y_2 Y_0))
-  
-  [DEBUG]{ProverSpec} Prover-tailored specification:
-  ;;; produced by PyRAT/VNN-LIB driver
-  ;;; produced by VNN-LIB driver
-  ;; X_0
-  (declare-const X_0 Real)
-  
-  ;; X_1
-  (declare-const X_1 Real)
-  
-  ;; X_2
-  (declare-const X_2 Real)
-  
-  ;; X_3
-  (declare-const X_3 Real)
-  
-  ;; X_4
-  (declare-const X_4 Real)
-  
-  ;; H
-  (assert (>= X_0 0.0))
-  
-  ;; H
-  (assert (<= X_0 1.0))
-  
-  ;; H
-  (assert (>= X_1 0.0))
-  
-  ;; H
-  (assert (<= X_1 1.0))
-  
-  ;; H
-  (assert (>= X_2 0.0))
-  
-  ;; H
-  (assert (<= X_2 1.0))
-  
-  ;; H
-  (assert (>= X_3 0.0))
-  
-  ;; H
-  (assert (<= X_3 1.0))
-  
-  ;; H
-  (assert (>= X_4 0.0))
-  
-  ;; H
-  (assert (<= X_4 1.0))
-  
-  ;; H
-  (assert (>= X_0 0.93117647100000000559560930923908017575740814208984375))
-  
-  ;; H
-  (assert (<= X_0 0.95117647100000002335917770324158482253551483154296875))
-  
-  ;; H
-  (assert (>= X_1 -0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (<= X_1 0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (>= X_2 -0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (<= X_2 0.01000000000000000020816681711721685132943093776702880859375))
-  
-  ;; H
-  (assert (>= X_3 0.00176470599999999956664087363833459676243364810943603515625))
-  
-  ;; H
-  (assert (<= X_3 0.021764706000000001717697983849575393833220005035400390625))
-  
-  ;; H
-  (assert (>= X_4 0.029215685999999997657372574622058891691267490386962890625))
-  
-  ;; H
-  (assert (<= X_4 0.049215686000000001543153160810106783173978328704833984375))
-  
-  ;; Y_0
-  (declare-const Y_0 Real)
-  
-  ;; Y_1
-  (declare-const Y_1 Real)
-  
-  ;; Y_2
-  (declare-const Y_2 Real)
-  
-  ;; Y_3
-  (declare-const Y_3 Real)
-  
-  ;; Y_4
-  (declare-const Y_4 Real)
-  
-  ;; Goal H
-  (assert (>= Y_3 Y_0))
+  (assert
+    (or
+      (and (>= Y_0 Y_1))
+      (and (>= Y_2 Y_1))
+      (and (>= Y_3 Y_1))
+      (and (>= Y_4 Y_1))
+      ))
   
   [DEBUG]{ProverSpec} Prover-tailored specification:
   ;;; produced by PyRAT/VNN-LIB driver
@@ -802,7 +232,13 @@ Test verify on dataset
   (declare-const Y_4 Real)
   
   ;; Goal H
-  (assert (>= Y_4 Y_0))
+  (assert
+    (or
+      (and (>= Y_1 Y_0))
+      (and (>= Y_2 Y_0))
+      (and (>= Y_3 Y_0))
+      (and (>= Y_4 Y_0))
+      ))
   
   Goal H: Unknown ()
 
diff --git a/tests/define.t b/tests/define.t
index b4a88529b2df664d5d0e3f51f10fa82f1a65822f..6b27141cb85234c964fef15fa2ebb86329a9dcdd 100644
--- a/tests/define.t
+++ b/tests/define.t
@@ -18,10 +18,10 @@ Test interpretation of on-the-fly definition of toplevel constants
   [DEBUG]{InterpretGoal} Interpreted formula for goal 'G':
   true
   
-  [ERROR] Invalid argument: No neural network model found in task
+  [ERROR] No neural network model found
 
   $ caisar verify --prover PyRAT file.mlw -Dn:2
-  [ERROR] Invalid argument: No neural network model found in task
+  [ERROR] No neural network model found
 
   $ cat > file.mlw <<EOF
   > theory T
@@ -37,10 +37,10 @@ Test interpretation of on-the-fly definition of toplevel constants
   [DEBUG]{InterpretGoal} Interpreted formula for goal 'G':
   true
   
-  [ERROR] Invalid argument: No neural network model found in task
+  [ERROR] No neural network model found
 
   $ caisar verify --prover PyRAT file.mlw -D n:bar
-  [ERROR] Invalid argument: No neural network model found in task
+  [ERROR] No neural network model found
 
   $ cat > file.mlw <<EOF
   > theory T
@@ -54,7 +54,7 @@ Test interpretation of on-the-fly definition of toplevel constants
   [DEBUG]{InterpretGoal} Interpreted formula for goal 'G':
   true
   
-  [ERROR] Invalid argument: No neural network model found in task
+  [ERROR] No neural network model found
 
   $ caisar verify --prover PyRAT file.mlw -D n:foo
   [ERROR] 'n' expects a numerical constant, got 'foo' instead
@@ -72,7 +72,7 @@ Test interpretation of on-the-fly definition of toplevel constants
   [DEBUG]{InterpretGoal} Interpreted formula for goal 'G':
   true
   
-  [ERROR] Invalid argument: No neural network model found in task
+  [ERROR] No neural network model found
 
   $ cat > file.mlw <<EOF
   > theory T
@@ -86,10 +86,10 @@ Test interpretation of on-the-fly definition of toplevel constants
   [DEBUG]{InterpretGoal} Interpreted formula for goal 'G':
   true
   
-  [ERROR] Invalid argument: No neural network model found in task
+  [ERROR] No neural network model found
 
   $ caisar verify --prover PyRAT file.mlw -D n:false
-  [ERROR] Invalid argument: No neural network model found in task
+  [ERROR] No neural network model found
 
   $ cat > file.mlw <<EOF
   > theory T
@@ -103,13 +103,13 @@ Test interpretation of on-the-fly definition of toplevel constants
   [DEBUG]{InterpretGoal} Interpreted formula for goal 'G':
   true
   
-  [ERROR] Invalid argument: No neural network model found in task
+  [ERROR] No neural network model found
 
   $ caisar verify --prover PyRAT file.mlw -D n:false --ltag=InterpretGoal
   [DEBUG]{InterpretGoal} Interpreted formula for goal 'G':
   false
   
-  [ERROR] Invalid argument: No neural network model found in task
+  [ERROR] No neural network model found
 
   $ caisar verify --prover PyRAT file.mlw -D n:boh --ltag=InterpretGoal
   [ERROR] 'n' expects 'true' or 'false', got 'boh' instead
diff --git a/tests/nier_to_onnx.t b/tests/nier_to_onnx.t
index 9074b74cf2cf215062fd5a26073f36c71bc27e0d..866359fa03735cf4107ba5b95f0b2337382ec92c 100644
--- a/tests/nier_to_onnx.t
+++ b/tests/nier_to_onnx.t
@@ -20,7 +20,7 @@ Test verify
   >       (0.5:t) .< (nn @@ i)[0] .< (0.5:t)
   > end
   > EOF
-  [DEBUG]{NIER} Wrote NIER as ONNX in file 'out/onnx_nn.nier.onnx'
+  [DEBUG]{NIER} Wrote NIER as ONNX in file 'out/nn_onnx.nier.onnx'
   Goal G: Unknown ()
 
 Data should be 0.135
diff --git a/tests/saver.t b/tests/saver.t
index aa06068757c49342f67975ea0128d0eed0f10c3b..aacb8835bdeea445d9e62208c564a04d8936eee4 100644
--- a/tests/saver.t
+++ b/tests/saver.t
@@ -1,7 +1,7 @@
 Test verify
-  $ cat - > test_data.csv << EOF
+  $ cat - > dataset.csv << EOF
   > # 1 3
-  > 1,0.5,0.5,0.0,
+  > 1,0.5,0.5,0.0
   > EOF
 
   $ . ./setup_env.sh
@@ -9,7 +9,7 @@ Test verify
   $ bin/saver --version
   v1.0
 
-  $ caisar verify -L . --format whyml --prover=SAVer --dataset=test_data.csv - <<EOF
+  $ caisar verify -L . --format whyml --prover=SAVer --dataset=dataset.csv - <<EOF
   > theory T
   >   use TestSVM.AsArray
   >   use ieee_float.Float64
@@ -24,3 +24,63 @@ Test verify
   Goal G: Valid (2/2)
   Goal H: Invalid (1/2)
   Goal I: Unknown (0/2)
+
+  $ cat - > dataset.csv << EOF
+  > 1,0.5,0.5,0.0
+  > 3,0.3,0.1,0.8
+  > EOF
+
+  $ caisar verify --format whyml --prover=SAVer - <<EOF
+  > theory T
+  >   use ieee_float.Float64
+  >   use int.Int
+  >   use caisar.types.Float64WithBounds as Feature
+  >   use caisar.types.IntWithBounds as Label
+  >   use caisar.model.SVM
+  >   use caisar.dataset.CSV
+  >   use caisar.robust.ClassRobustCSV
+  > 
+  >   constant label_bounds: Label.bounds =
+  >     Label.{ lower = 0; upper = 9 }
+  > 
+  >   constant feature_bounds: Feature.bounds =
+  >     Feature.{ lower = (0.0:t); upper = (1.0:t) }
+  > 
+  >   goal G:
+  >     let svm = read_model "TestSVM.ovo" in
+  >     let dataset = read_dataset "dataset.csv" in
+  >     let eps = (0.0100000000000000002081668171172168513294309377670288085937500000:t) in
+  >     robust feature_bounds label_bounds svm dataset eps
+  > 
+  >   goal H:
+  >     let svm = read_model_with_abstraction "TestSVM.ovo" Raf in
+  >     let dataset = read_dataset "dataset.csv" in
+  >     let eps = (0.0100000000000000002081668171172168513294309377670288085937500000:t) in
+  >     robust feature_bounds label_bounds svm dataset eps
+  > end
+  > EOF
+  Goal G: Unknown ()
+  Goal H: Unknown ()
+
+  $ caisar verify --format whyml --prover=SAVer - <<EOF
+  > theory T
+  >   use ieee_float.Float64
+  >   use int.Int
+  >   use caisar.types.Vector
+  >   use caisar.types.Float64WithBounds as Feature
+  >   use caisar.types.IntWithBounds as Label
+  >   use caisar.model.SVM
+  >   use caisar.dataset.CSV
+  >   use caisar.robust.ClassRobustCSV
+  > 
+  >   goal G:
+  >     let svm = read_model "TestSVM.ovo" in
+  >     let eps = (0.375:t) in
+  >     forall x: vector t. has_length x 3 ->
+  >       (.- eps) .<= x[0] .- (0.299999999999999988897769753748434595763683319091796875:t) .<= eps
+  >       /\ (.- eps) .<= x[1] .- (0.8000000000000000444089209850062616169452667236328125:t) .<= eps
+  >       /\ (.- eps) .<= x[2] .- (0.5:t) .<= eps ->
+  >       (svm @@ x)[0] .<= (svm @@ x)[1]
+  > end
+  > EOF
+  Goal G: Unknown ()