diff --git a/lib/ir/dune b/lib/ir/dune index a0fa35532b0d61a3eeb6729f67a90259145fd13d..ff97750bd5db22e23ce3c9ea75a9f252d7d6555c 100644 --- a/lib/ir/dune +++ b/lib/ir/dune @@ -11,7 +11,7 @@ ppx_deriving.iter ppx_deriving.fold)) (inline_tests) - (libraries base ocamlgraph fmt stdio)) + (libraries base ocamlgraph fmt stdio unix)) (env (dev diff --git a/lib/ir/nier_simple.ml b/lib/ir/nier_simple.ml index eba5bd2c92826df32a2185105d756f92d1cd0d60..ad9ff4d9ba14f498af79674aa51b052d4169edac 100644 --- a/lib/ir/nier_simple.ml +++ b/lib/ir/nier_simple.ml @@ -661,3 +661,17 @@ module Dot = Graph.Graphviz.Dot (struct let default_vertex_attributes _ = [] let graph_attributes _ = [] end) + +let grapheasy g = + try + let cin, cout = + Unix.open_process_args "graph-easy" + [| "graph-easy"; "--from=graphviz"; "--as=boxart" |] + in + Dot.output_graph cout g; + Stdlib.close_out cout; + let ascii = Stdio.In_channel.input_all cin in + ignore (Unix.close_process (cin, cout)); + ascii + with exn -> + Fmt.str "Error graph-easy call: %s" (Stdlib.Printexc.to_string exn) diff --git a/lib/ir/nier_simple.mli b/lib/ir/nier_simple.mli index c928e3090854ac94dfdae4804089e17ec752b7ba..53d8888f06fc162a00b555d53c04daf126f05e50 100644 --- a/lib/ir/nier_simple.mli +++ b/lib/ir/nier_simple.mli @@ -245,3 +245,6 @@ module Dot : sig val fprint_graph : Format.formatter -> GFloat.t -> unit val output_graph : out_channel -> GFloat.t -> unit end + +val grapheasy: t -> string +(** Return an ascii representation of the graph using "graph-easy" external command*) \ No newline at end of file diff --git a/src/logging.ml b/src/logging.ml index e76e20b90e5f454a9b73f8af0f32868a31d07fac..77cd61598d29c2601bf1465a4283546cc66a665e 100644 --- a/src/logging.ml +++ b/src/logging.ml @@ -32,16 +32,7 @@ let src_interpret_goal = let src_nier = Logs.Src.create "NIER" ~doc:"Neural Intermediate Representation" let src_stack_trace = Logs.Src.create "StackTrace" ~doc:"Print stack trace" - -let all_srcs () = - [ - src_autodetect; - src_prover_spec; - src_prover_call; - src_interpret_goal; - src_nier; - src_stack_trace; - ] +let all_srcs () = Logs.Src.list () let is_debug_level src = match Logs.Src.level src with Some Debug -> true | _ -> false diff --git a/src/transformations/native_nn_prover.ml b/src/transformations/native_nn_prover.ml index a119a2fd665f501aec8e5b90d5c7820619f23b88..4e18e859b0f49d847757cd1e8311197aaecbf5bf 100644 --- a/src/transformations/native_nn_prover.ml +++ b/src/transformations/native_nn_prover.ml @@ -23,6 +23,10 @@ open Base let src = Logs.Src.create "NN-Gen" ~doc:"Generation of neural networks" +let src_term = Logs.Src.create "NN-Gen-Term" ~doc:"Generation of new formula" + +let src_show = + Logs.Src.create "NN-Gen-Show" ~doc:"Show generated neural networks" type new_output = { index : int; @@ -192,6 +196,7 @@ let create_new_nn env input_vars outputs : string = assert ( IR.Shape.equal output.shape (IR.Shape.of_array [| List.length outputs |])); let nn = IR.create output in + Logs.debug ~src:src_show (fun f -> f "@.%s@." (IR.grapheasy nn)); Logs.debug ~src (fun f -> f "@[<v>%a@]@." IR.pp_debug nn); let filename = tempfile () in Onnx.Simple.write nn filename; @@ -289,6 +294,8 @@ let abstract_nn_term env = (input_vars, task) | Decl { d_node = Dprop (Pgoal, pr, goal); _ } -> let output_vars, goal = simplify_as_output_vars goal in + Logs.debug ~src:src_term (fun f -> + f "new goal:%a" Why3.Pretty.print_term goal); (* Add output variable declarations and corresponding meta. *) let task = Why3.Term.Mterm.fold_left diff --git a/tests/acasxu.t b/tests/acasxu.t index 5f4fb0653afd4b83ea3e5906550a3a00f5bc431b..8684658a34e9a7702fa048c8943da676b6e9cd5e 100644 --- a/tests/acasxu.t +++ b/tests/acasxu.t @@ -164,7 +164,7 @@ Test verify on acasxu > end > EOF - $ caisar verify --prover PyRAT --ltag=ProverSpec --ltag=InterpretGoal file.mlw + $ caisar verify --prover PyRAT --ltag=ProverSpec --ltag=InterpretGoal --ltag=NN-Gen-Term file.mlw [DEBUG]{InterpretGoal} Interpreted formula for goal 'runP1'vc': forall x:t, x1:t, x2:t, x3:t, x4:t. ((le (0.0:t) x /\ le x (60760.0:t)) /\ @@ -198,6 +198,12 @@ Test verify on acasxu nn_format = <nier> })) vector, 5 + [DEBUG]{NN-Gen-Term} new goal:le + (add RNE + (mul RNE y + (373.9499200000000200816430151462554931640625:t)) + (7.51888402010059753166615337249822914600372314453125:t)) + (1500.0:t) [DEBUG]{ProverSpec} Prover-tailored specification: 0.0 <= x0 x0 <= 60760.0 @@ -268,6 +274,18 @@ Test verify on acasxu nn_format = <nier> })) vector, 5 + [DEBUG]{NN-Gen-Term} new goal:le + (add RNE + (mul RNE y1 + (373.9499200000000200816430151462554931640625:t)) + (7.51888402010059753166615337249822914600372314453125:t)) + (1500.0:t) /\ + le + (add RNE + (mul RNE y2 + (373.9499200000000200816430151462554931640625:t)) + (7.51888402010059753166615337249822914600372314453125:t)) + (1500.0:t) [DEBUG]{ProverSpec} Prover-tailored specification: 0.0 <= x0 x0 <= 60760.0 @@ -341,6 +359,7 @@ Test verify on acasxu nn_format = <nier> })) vector, 5 + [DEBUG]{NN-Gen-Term} new goal:le y3 y4 /\ le y4 y3 [DEBUG]{ProverSpec} Prover-tailored specification: 0.0 <= x0 x0 <= 60760.0 @@ -416,6 +435,7 @@ Test verify on acasxu nn_format = <nier> })) vector, 5 + [DEBUG]{NN-Gen-Term} new goal:le y5 y6 /\ le y6 y5 [DEBUG]{ProverSpec} Prover-tailored specification: 0.0 <= x0 x0 <= 60760.0 @@ -493,6 +513,7 @@ Test verify on acasxu nn_format = <nier> })) vector, 5 + [DEBUG]{NN-Gen-Term} new goal:le y7 y8 /\ le y8 y7 [DEBUG]{ProverSpec} Prover-tailored specification: 0.0 <= x0 x0 <= 60760.0