Skip to content
Snippets Groups Projects
Commit b142a947 authored by Virgile Prevosto's avatar Virgile Prevosto
Browse files

[aorai] add missing mli and do some slight refactoring.

all lexers now aborts in the same way in case of lexing/parsing error.
parent fe290f3e
No related branches found
No related tags found
No related merge requests found
Showing
with 332 additions and 261 deletions
......@@ -761,6 +761,7 @@ src/plugins/aorai/aorai_register.mli: AORAI_LGPL
src/plugins/aorai/aorai_utils.ml: AORAI_LGPL
src/plugins/aorai/aorai_utils.mli: AORAI_LGPL
src/plugins/aorai/aorai_visitors.ml: AORAI_LGPL
src/plugins/aorai/aorai_visitors.mli: AORAI_LGPL
src/plugins/aorai/bool3.ml: AORAI_LGPL
src/plugins/aorai/bool3.mli: AORAI_LGPL
src/plugins/aorai/configure.ac: AORAI_LGPL
......@@ -771,17 +772,23 @@ src/plugins/aorai/logic_simplification.mli: AORAI_LGPL
src/plugins/aorai/ltl_output.ml: AORAI_LGPL
src/plugins/aorai/ltl_output.mli: AORAI_LGPL
src/plugins/aorai/ltlast.mli: AORAI_LGPL
src/plugins/aorai/ltllexer.mli: AORAI_LGPL
src/plugins/aorai/ltllexer.mll: AORAI_LGPL
src/plugins/aorai/ltlparser.mly: AORAI_LGPL
src/plugins/aorai/path_analysis.ml: AORAI_LGPL
src/plugins/aorai/path_analysis.mli: AORAI_LGPL
src/plugins/aorai/promelaast.mli: AORAI_LGPL
src/plugins/aorai/promelalexer.mli: AORAI_LGPL
src/plugins/aorai/promelalexer.mll: AORAI_LGPL
src/plugins/aorai/promelalexer_withexps.mli: AORAI_LGPL
src/plugins/aorai/promelalexer_withexps.mll: AORAI_LGPL
src/plugins/aorai/promelaoutput.ml: AORAI_LGPL
src/plugins/aorai/promelaoutput.mli: AORAI_LGPL
src/plugins/aorai/promelaparser.mly: AORAI_LGPL
src/plugins/aorai/promelaparser_withexps.mly: AORAI_LGPL
src/plugins/aorai/utils_parser.ml: AORAI_LGPL
src/plugins/aorai/utils_parser.mli: AORAI_LGPL
src/plugins/aorai/yalexer.mli: AORAI_LGPL
src/plugins/aorai/yalexer.mll: AORAI_LGPL
src/plugins/aorai/yaparser.mly: AORAI_LGPL
src/plugins/callgraph/Callgraph.mli: CEA_LGPL_OR_PROPRIETARY
......
......@@ -19,3 +19,4 @@
(* for more details (enclosed in the file licenses/LGPLv2.1). *)
(* *)
(**************************************************************************)
......@@ -68,16 +68,6 @@ let convert_ltl_exprs t =
(* Promela file *)
let syntax_error loc msg =
Aorai_option.abort
"File %S, line %d, characters %d-%d:@\nSyntax error: %s"
(Filepath.Normalized.to_pretty_string
(Datatype.Filepath.of_string (fst loc).Lexing.pos_fname))
(fst loc).Lexing.pos_lnum
((fst loc).Lexing.pos_cnum - (fst loc).Lexing.pos_bol)
((snd loc).Lexing.pos_cnum - (fst loc).Lexing.pos_bol)
msg
(* Performs some checks before calling [open_in f], reporting ["errmsg: <f>"]
in case of error. *)
let check_and_open_in (f : Filepath.Normalized.t) errmsg =
......@@ -86,30 +76,11 @@ let check_and_open_in (f : Filepath.Normalized.t) errmsg =
open_in (f :> string)
let ltl_to_ltlLight f_ltl (f_out : Filepath.Normalized.t) =
try
let c = check_and_open_in f_ltl "invalid LTL file" in
let (ltl_form,exprs) = Ltllexer.parse c in
close_in c;
Ltl_output.output ltl_form (f_out :> string);
set_ltl_correspondence exprs
with
| Ltllexer.Error (loc,msg) -> syntax_error loc msg
let parse_error' lexbuf msg =
let open Lexing in
let start_p = Cil_datatype.Position.of_lexing_pos (lexeme_start_p lexbuf)
and end_p = Cil_datatype.Position.of_lexing_pos (lexeme_end_p lexbuf)
and lexeme = Lexing.lexeme lexbuf in
let start_line = start_p.Filepath.pos_lnum in
let abort str =
Aorai_option.feedback ~source:start_p "%s@.%a, before or at token: %s\n%a@."
str
Errorloc.pp_location (start_p, end_p)
lexeme
(Errorloc.pp_context_from_file ~start_line ~ctx:2) start_p;
raise (Log.AbortError "aorai")
in
Pretty_utils.ksfprintf abort msg
let c = check_and_open_in f_ltl "invalid LTL file" in
let (ltl_form,exprs) = Ltllexer.parse c in
close_in c;
Ltl_output.output ltl_form (f_out :> string);
set_ltl_correspondence exprs
let load_ya_file filename =
let channel = check_and_open_in filename "invalid Ya file" in
......@@ -121,30 +92,21 @@ let load_ya_file filename =
close_in channel;
Data_for_aorai.setAutomata automata
with
| Parsing.Parse_error | Invalid_argument _ ->
parse_error' lexbuf "syntax error"
| Yalexer.Lexing_error msg ->
parse_error' lexbuf "%s" msg
| Parsing.Parse_error ->
Utils_parser.abort_current lexbuf "syntax error"
let load_promela_file f =
try
let c = check_and_open_in f "invalid Promela file" in
let auto = Promelalexer.parse c in
let trans = convert_ltl_exprs auto.trans in
close_in c;
Data_for_aorai.setAutomata { auto with trans };
with
| Promelalexer.Error(loc,msg) -> syntax_error loc msg
let c = check_and_open_in f "invalid Promela file" in
let auto = Promelalexer.parse c in
let trans = convert_ltl_exprs auto.trans in
close_in c;
Data_for_aorai.setAutomata { auto with trans }
let load_promela_file_withexps f =
try
let c = check_and_open_in f "invalid Promela file" in
let automata = Promelalexer_withexps.parse c in
close_in c;
Data_for_aorai.setAutomata automata;
with
| Promelalexer_withexps.Error(loc,msg) -> syntax_error loc msg
let c = check_and_open_in f "invalid Promela file" in
let automata = Promelalexer_withexps.parse c in
close_in c;
Data_for_aorai.setAutomata automata
let display_status () =
if Aorai_option.verbose_atleast 2 then begin
......@@ -265,9 +227,6 @@ let work () =
else
load_promela_file !promela_file;
printverb "Loading promela : done\n";
(* Computing the list of ignored functions *)
(* Aorai_visitors.compute_ignored_functions file; *)
(* Promelaoutput.print_raw_automata (Data_for_aorai.getAutomata()); *)
(* Data_for_aorai.debug_ltl_expressions (); *)
......
(**************************************************************************)
(* *)
(* This file is part of Aorai plug-in of Frama-C. *)
(* *)
(* Copyright (C) 2007-2021 *)
(* CEA (Commissariat à l'énergie atomique et aux énergies *)
(* alternatives) *)
(* INRIA (Institut National de Recherche en Informatique et en *)
(* Automatique) *)
(* INSA (Institut National des Sciences Appliquees) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
(* Lesser General Public License as published by the Free Software *)
(* Foundation, version 2.1. *)
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
(* See the GNU Lesser General Public License version 2.1 *)
(* for more details (enclosed in the file licenses/LGPLv2.1). *)
(* *)
(**************************************************************************)
(** visitors performing the instrumentation *)
module Aux_funcs: sig
(** the various kinds of auxiliary functions. *)
type kind =
| Not_aux_func (** original C function. *)
| Aux of Cil_types.kernel_function
(** Checks whether we are in the corresponding behavior
of the function. *)
| Pre of Cil_types.kernel_function
(** [Pre_func f] denotes a function updating the automaton
when [f] is called. *)
| Post of Cil_types.kernel_function
(** [Post_func f] denotes a function updating the automaton
when returning from [f]. *)
val iter: (Cil_types.varinfo -> kind -> unit) -> unit
(** [iter f] calls [f] on all functions registered so far by
{!add_sync_with_buch}
*)
end
(** generate prototypes for auxiliary functions. *)
val add_sync_with_buch: Cil_types.file -> unit
(**
[add_pre_post_from_buch ast treatloop]
provide contracts and/or bodies for auxiliary function
(once they have been generated). If [treatloop] is [true],
loop invariants are also generated.
*)
val add_pre_post_from_buch: Cil_types.file -> bool -> unit
(**************************************************************************)
(* *)
(* This file is part of Aorai plug-in of Frama-C. *)
(* *)
(* Copyright (C) 2007-2021 *)
(* CEA (Commissariat à l'énergie atomique et aux énergies *)
(* alternatives) *)
(* INRIA (Institut National de Recherche en Informatique et en *)
(* Automatique) *)
(* INSA (Institut National des Sciences Appliquees) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
(* Lesser General Public License as published by the Free Software *)
(* Foundation, version 2.1. *)
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
(* See the GNU Lesser General Public License version 2.1 *)
(* for more details (enclosed in the file licenses/LGPLv2.1). *)
(* *)
(**************************************************************************)
val parse:
in_channel ->
Ltlast.formula *
(string,
(Logic_ptree.relation * Promelaast.expression * Promelaast.expression))
Hashtbl.t
......@@ -28,47 +28,10 @@
(* from http://www.ltl2dstar.de/down/ltl2dstar-0.4.2.zip *)
{
open Ltlparser
open Lexing
let loc lexbuf = (lexeme_start_p lexbuf, lexeme_end_p lexbuf)
(*let lex_error lexbuf s = ()*)
(* Creport.raise_located (loc lexbuf) (AnyMessage ("lexical error: " ^ s))
*)
let buf = Buffer.create 1024
let newline lexbuf =
let pos = lexbuf.lex_curr_p in
lexbuf.lex_curr_p <-
{ pos with pos_lnum = pos.pos_lnum + 1; pos_bol = pos.pos_cnum }
(* Update the current location with file name and line number. *)
(* let update_loc lexbuf file line absolute chars =
let pos = lexbuf.lex_curr_p in
let new_file = match file with
| None -> pos.pos_fname
| Some s -> s
in
lexbuf.lex_curr_p <-
{ pos with
pos_fname = new_file;
pos_lnum = if absolute then line else pos.pos_lnum + line;
pos_bol = pos.pos_cnum - chars;
}
*)
exception Error of (Lexing.position * Lexing.position) * string
let raise_located loc e = raise (Error (loc, e))
open Ltlparser
open Lexing
}
let rD = ['0'-'9']
let rL = ['a'-'z' 'A'-'Z' '_']
......@@ -119,11 +82,11 @@ rule token = parse
(* Comments *)
| "/*" { comment lexbuf; token lexbuf }
| "//" [^ '\n']* '\n' { newline lexbuf; token lexbuf }
| "//" [^ '\n']* '\n' { Utils_parser.newline lexbuf; token lexbuf }
(* Spaces *)
| [' ' '\t' '\012' '\r']+ { token lexbuf }
| '\n' { newline lexbuf; token lexbuf }
| '\n' { Utils_parser.newline lexbuf; token lexbuf }
(* Variables and constants *)
| rD+ | '-' rD+ { LTL_INT (lexeme lexbuf) }
......@@ -131,15 +94,12 @@ rule token = parse
(* Others *)
| eof { EOF }
| _ {
raise_located (loc lexbuf)
(Format.sprintf "Illegal_character %s\n" (lexeme lexbuf))
}
| _ { Utils_parser.unknown_token lexbuf }
and comment = parse
| "*/" { () }
| eof { raise_located (loc lexbuf) "Unterminated_comment\n" }
| '\n' { newline lexbuf; comment lexbuf }
| eof { Utils_parser.abort_current lexbuf "Unterminated_comment" }
| '\n' { Utils_parser.newline lexbuf; comment lexbuf }
| _ { comment lexbuf }
......@@ -149,6 +109,5 @@ and comment = parse
try
Ltlparser.ltl token lb
with
Parsing.Parse_error
| Invalid_argument _ -> raise_located (loc lb) "Syntax error"
Parsing.Parse_error -> Utils_parser.abort_current lb "Syntax error"
}
......@@ -24,56 +24,6 @@
(**************************************************************************)
open Promelaast
(*open Graph.Pack.Digraph
let st_array = ref (Array.make 1 (V.create 0)) ;;
let auto2digraph (stl,trl) =
Aorai_option.feedback "auto2digraph:" ;
let digraph = create () in
st_array:= Array.make (List.length stl) (V.create 0);
Aorai_option.feedback " array : ok\n" ;
let _ = List.iter
(fun st ->
(!st_array).(st.nums)<-(V.create st.nums);
add_vertex digraph (!st_array).(st.nums)
)
stl
in
Aorai_option.feedback " array remplissage : ok\n" ;
List.iter
(fun tr -> add_edge digraph (V.create tr.start.nums) (V.create tr.stop.nums))
trl;
digraph
;;
let existing_path auto st1 st2 =
Aorai_option.feedback "existing path ..\n" ;
let digraph = auto2digraph auto in
let start = (!st_array).(st1.nums) in
let stop = (!st_array).(st2.nums) in
Aorai_option.feedback "%s" ("test : Etats choisis ("^(string_of_int (V.label start))^","^(string_of_int (V.label stop))^")\n") ;
display_with_gv digraph;
Aorai_option.feedback " affichage : ok\n" ;
Aorai_option.feedback "shortest path : " ;
let path=shortest_path digraph start stop in
Aorai_option.feedback "done.\n" ;
path
;;
let test (stl,trl) =
let st2 = List.hd stl in
let st1 = List.hd (List.tl stl) in
let _ = existing_path (stl,trl) st1 st2 in
Aorai_option.feedback "Fini.\n" ;
()
;;
*)
let voisins (_,trans_l) st =
List.fold_left
......
(**************************************************************************)
(* *)
(* This file is part of Aorai plug-in of Frama-C. *)
(* *)
(* Copyright (C) 2007-2021 *)
(* CEA (Commissariat à l'énergie atomique et aux énergies *)
(* alternatives) *)
(* INRIA (Institut National de Recherche en Informatique et en *)
(* Automatique) *)
(* INSA (Institut National des Sciences Appliquees) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
(* Lesser General Public License as published by the Free Software *)
(* Foundation, version 2.1. *)
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
(* See the GNU Lesser General Public License version 2.1 *)
(* for more details (enclosed in the file licenses/LGPLv2.1). *)
(* *)
(**************************************************************************)
(** [get_edges s1 s2 g] retrieves all edges in [g] between [s1] and [s2]. *)
val get_edges:
Promelaast.state -> Promelaast.state -> ('c,'a) Promelaast.graph
-> ('c, 'a) Promelaast.trans list
(** retrieve all edges starting at the given node. *)
val get_transitions_of_state:
Promelaast.state -> ('c,'a) Promelaast.graph -> ('c,'a) Promelaast.trans list
(** return the initial states of the graph. *)
val get_init_states: ('c, 'a) Promelaast.graph -> Promelaast.state list
(** [true] iff there's at most one path between the two states in the graph. *)
val at_most_one_path:
('c, 'a) Promelaast.graph -> Promelaast.state -> Promelaast.state -> bool
......@@ -132,6 +132,8 @@ type ('c,'a) trans = {
mutable numt : int (** Numerical ID of the transition *)
}
type ('c,'a) graph = state list * ('c, 'a) trans list
type parsed_trans = (guard, action) trans
type typed_trans = (typed_condition, typed_action) trans
......@@ -140,7 +142,7 @@ type typed_trans = (typed_condition, typed_action) trans
of transitions.*)
type ('c,'a) automaton = {
states: state list;
trans: (('c,'a) trans) list;
trans: ('c,'a) trans list;
metavariables: Cil_types.varinfo Datatype.String.Map.t;
observables: Datatype.String.Set.t option;
}
......
(**************************************************************************)
(* *)
(* This file is part of Aorai plug-in of Frama-C. *)
(* *)
(* Copyright (C) 2007-2021 *)
(* CEA (Commissariat à l'énergie atomique et aux énergies *)
(* alternatives) *)
(* INRIA (Institut National de Recherche en Informatique et en *)
(* Automatique) *)
(* INSA (Institut National des Sciences Appliquees) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
(* Lesser General Public License as published by the Free Software *)
(* Foundation, version 2.1. *)
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
(* See the GNU Lesser General Public License version 2.1 *)
(* for more details (enclosed in the file licenses/LGPLv2.1). *)
(* *)
(**************************************************************************)
val parse: in_channel -> Promelaast.parsed_automaton
......@@ -30,19 +30,6 @@
{
open Promelaparser
open Lexing
exception Error of (Lexing.position * Lexing.position) * string
let loc lexbuf = (lexeme_start_p lexbuf, lexeme_end_p lexbuf)
let raise_located loc e = raise (Error (loc, e))
let buf = Buffer.create 1024
let newline lexbuf =
let pos = lexbuf.lex_curr_p in
lexbuf.lex_curr_p <-
{ pos with pos_lnum = pos.pos_lnum + 1; pos_bol = pos.pos_cnum }
}
let rD = ['0'-'9']
......@@ -69,9 +56,9 @@ rule token = parse
| "&&" { PROMELA_AND }
| '!' { PROMELA_NOT }
| [' ' '\t' '\012' '\r']+ { token lexbuf }
| '\n' { newline lexbuf; token lexbuf }
| '\n' { Utils_parser.newline lexbuf; token lexbuf }
| "/*" { comment lexbuf; token lexbuf }
| "//" [^ '\n']* '\n' { newline lexbuf; token lexbuf }
| "//" [^ '\n']* '\n' { Utils_parser.newline lexbuf; token lexbuf }
| "callof_" rL* (rL | rD)*
{ let s=(lexeme lexbuf) in
......@@ -86,53 +73,30 @@ rule token = parse
let s=String.sub s 15 ((String.length s)-15) in
PROMELA_CALLORRETURNOF s }
| "callof_" { Utils_parser.abort_current lexbuf
"Illegal function name in Promela file." }
| "returnof_" { Utils_parser.abort_current lexbuf
"Illegal function name in Promela file." }
| "callorreturnof_" { Utils_parser.abort_current lexbuf
"Illegal function name in Promela file." }
| "callof_" { raise_located (loc lexbuf) "Illegal function name in Promela file." }
| "returnof_" { raise_located (loc lexbuf) "Illegal function name in Promela file." }
| "callorreturnof_" { raise_located (loc lexbuf) "Illegal function name in Promela file." }
| rL (rL | rD)* { let s = lexeme lexbuf in
PROMELA_LABEL s }
| rL (rL | rD)* { let s = lexeme lexbuf in PROMELA_LABEL s }
| eof { EOF }
| "1" { PROMELA_TRUE }
| _ { Aorai_option.error "Illegal_character : '%s'\n" (lexeme lexbuf);
raise Parsing.Parse_error}
| _ { Utils_parser.unknown_token lexbuf }
and comment = parse
| "*/" { () }
| eof { Aorai_option.error "Unterminated_comment\n" (*lex_error lexbuf "Unterminated_comment"*) }
| '\n' { newline lexbuf; comment lexbuf }
| eof { Aorai_option.abort "Unterminated_comment\n" }
| '\n' { Utils_parser.newline lexbuf; comment lexbuf }
| _ { comment lexbuf }
{
let parse c =
let lb = from_channel c in
try
Promelaparser.promela token lb
with
Parsing.Parse_error
| Invalid_argument _ ->
let (a,b)=(loc lb) in
Aorai_option.error "Syntax error (l%d c%d -> l%dc%d)" a.pos_lnum (a.pos_cnum-a.pos_bol) b.pos_lnum (b.pos_cnum-b.pos_bol);
(* Format.print_string "Syntax error (" ; *)
(* Format.print_string "l" ; *)
(* Format.print_int a.pos_lnum ; *)
(* Format.print_string "c" ; *)
(* Format.print_int (a.pos_cnum-a.pos_bol) ;*)
(* Format.print_string " -> l" ; *)
(* Format.print_int b.pos_lnum ; *)
(* Format.print_string "c" ; *)
(* Format.print_int (b.pos_cnum-b.pos_bol) ;*)
(* Format.print_string ")\n" ; *)
raise_located (loc lb) "Syntax error"
| Parsing.Parse_error -> Utils_parser.abort_current lb "Syntax error"
}
(**************************************************************************)
(* *)
(* This file is part of Aorai plug-in of Frama-C. *)
(* *)
(* Copyright (C) 2007-2021 *)
(* CEA (Commissariat à l'énergie atomique et aux énergies *)
(* alternatives) *)
(* INRIA (Institut National de Recherche en Informatique et en *)
(* Automatique) *)
(* INSA (Institut National des Sciences Appliquees) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
(* Lesser General Public License as published by the Free Software *)
(* Foundation, version 2.1. *)
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
(* See the GNU Lesser General Public License version 2.1 *)
(* for more details (enclosed in the file licenses/LGPLv2.1). *)
(* *)
(**************************************************************************)
val parse: in_channel -> Promelaast.parsed_automaton
......@@ -31,19 +31,6 @@
open Promelaparser_withexps
open Lexing
exception Error of (Lexing.position * Lexing.position) * string
let loc lexbuf = (lexeme_start_p lexbuf, lexeme_end_p lexbuf)
let raise_located loc e = raise (Error (loc, e))
let buf = Buffer.create 1024
let newline lexbuf =
let pos = lexbuf.lex_curr_p in
lexbuf.lex_curr_p <-
{ pos with pos_lnum = pos.pos_lnum + 1; pos_bol = pos.pos_cnum }
}
......@@ -74,9 +61,9 @@ rule token = parse
| "&&" { PROMELA_AND }
| '!' { PROMELA_NOT }
| [' ' '\t' '\012' '\r']+ { token lexbuf }
| '\n' { newline lexbuf; token lexbuf }
| '\n' { Utils_parser.newline lexbuf; token lexbuf }
| "/*" { comment lexbuf; token lexbuf }
| "//" [^ '\n']* '\n' { newline lexbuf; token lexbuf }
| "//" [^ '\n']* '\n' { Utils_parser.newline lexbuf; token lexbuf }
| "callof_" rL* (rL | rD)*
{ let s=(lexeme lexbuf) in
......@@ -92,9 +79,12 @@ rule token = parse
PROMELA_CALLORRETURNOF s }
| "callof_" { raise_located (loc lexbuf) "Illegal function name in Promela file." }
| "returnof_" { raise_located (loc lexbuf) "Illegal function name in Promela file." }
| "callorreturnof_" { raise_located (loc lexbuf) "Illegal function name in Promela file." }
| "callof_" { Utils_parser.abort_current lexbuf
"Illegal function name in Promela file." }
| "returnof_" { Utils_parser.abort_current lexbuf
"Illegal function name in Promela file." }
| "callorreturnof_" { Utils_parser.abort_current lexbuf
"Illegal function name in Promela file." }
| rD+ | '-' rD+ { PROMELA_INT (lexeme lexbuf) }
......@@ -130,16 +120,12 @@ rule token = parse
| eof { EOF }
| "1" { PROMELA_TRUE }
| _ { Aorai_option.error "Illegal_character : '%s'\n" (lexeme lexbuf);
raise Parsing.Parse_error}
| _ { Utils_parser.unknown_token lexbuf }
and comment = parse
| "*/" { () }
| eof { Aorai_option.warning "Unterminated_comment\n" (*lex_error lexbuf "Unterminated_comment"*) }
| '\n' { newline lexbuf; comment lexbuf }
| eof { Aorai_option.abort "Unterminated_comment\n" }
| '\n' { Utils_parser.newline lexbuf; comment lexbuf }
| _ { comment lexbuf }
......@@ -149,22 +135,5 @@ and comment = parse
try
Promelaparser_withexps.promela token lb
with
Parsing.Parse_error
| Invalid_argument _ ->
let (a,b)=(loc lb) in
Aorai_option.error "Syntax error (l%d c%d -> l%dc%d)" a.pos_lnum (a.pos_cnum-a.pos_bol) b.pos_lnum (b.pos_cnum-b.pos_bol);
(* Format.print_string "Syntax error (" ; *)
(* Format.print_string "l" ; *)
(* Format.print_int a.pos_lnum ; *)
(* Format.print_string "c" ; *)
(* Format.print_int (a.pos_cnum-a.pos_bol) ;*)
(* Format.print_string " -> l" ; *)
(* Format.print_int b.pos_lnum ; *)
(* Format.print_string "c" ; *)
(* Format.print_int (b.pos_cnum-b.pos_bol) ;*)
(* Format.print_string ")\n" ; *)
raise_located (loc lb) "Syntax error"
Parsing.Parse_error -> Utils_parser.abort_current lb "Syntax error"
}
......@@ -23,6 +23,27 @@
(* *)
(**************************************************************************)
open Lexing
let current_loc lex = Cil_datatype.Position.of_lexing_pos (lexeme_start_p lex)
let abort_current lex fmt =
let source = current_loc lex in
let start_line = source.Filepath.pos_lnum in
let fmt = "before or at token %s@\n%a@\n" ^^ fmt in
Aorai_option.abort ~source fmt
(Lexing.lexeme lex)
(Errorloc.pp_context_from_file ~start_line ~ctx:2) source
let unknown_token lex =
abort_current lex
"Unexpected character: '%c'" (lexeme_char lex (lexeme_start lex))
let newline lexbuf =
let pos = lexbuf.lex_curr_p in
lexbuf.lex_curr_p <-
{ pos with pos_lnum = pos.pos_lnum + 1; pos_bol = pos.pos_cnum }
let rec get_last_field my_field my_offset =
match my_offset with
| Cil_types.NoOffset -> my_field
......
(**************************************************************************)
(* *)
(* This file is part of Aorai plug-in of Frama-C. *)
(* *)
(* Copyright (C) 2007-2021 *)
(* CEA (Commissariat à l'énergie atomique et aux énergies *)
(* alternatives) *)
(* INRIA (Institut National de Recherche en Informatique et en *)
(* Automatique) *)
(* INSA (Institut National des Sciences Appliquees) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
(* Lesser General Public License as published by the Free Software *)
(* Foundation, version 2.1. *)
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
(* See the GNU Lesser General Public License version 2.1 *)
(* for more details (enclosed in the file licenses/LGPLv2.1). *)
(* *)
(**************************************************************************)
(** utilities for parsing automata and LTL formulas *)
(** returns the position corresponding to the
current character in the lexbuf. *)
val current_loc: Lexing.lexbuf -> Cil_datatype.Position.t
(** aborts the execution using current lexbuf position as source. *)
val abort_current:
Lexing.lexbuf -> ('a, Format.formatter, unit, 'b) format4 -> 'a
(** aborts in case the current character
is not the beginning of a valid token. *)
val unknown_token: Lexing.lexbuf -> 'a
(** initiate a new line in the lexbuf *)
val newline: Lexing.lexbuf -> unit
(**************************************************************************)
(* *)
(* This file is part of Aorai plug-in of Frama-C. *)
(* *)
(* Copyright (C) 2007-2021 *)
(* CEA (Commissariat à l'énergie atomique et aux énergies *)
(* alternatives) *)
(* INRIA (Institut National de Recherche en Informatique et en *)
(* Automatique) *)
(* INSA (Institut National des Sciences Appliquees) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
(* Lesser General Public License as published by the Free Software *)
(* Foundation, version 2.1. *)
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
(* See the GNU Lesser General Public License version 2.1 *)
(* for more details (enclosed in the file licenses/LGPLv2.1). *)
(* *)
(**************************************************************************)
val token: Lexing.lexbuf -> Yaparser.token
......@@ -26,14 +26,6 @@
(* File yalexer.mll *)
{
open Yaparser
open Lexing
exception Lexing_error of string
let new_line lexbuf =
let lcp = lexbuf.lex_curr_p in
lexbuf.lex_curr_p <- { lcp with pos_lnum = lcp.pos_lnum + 1;
pos_bol = lcp.pos_cnum; }
;;
}
let num = ['0'-'9']
......@@ -44,7 +36,7 @@ let string = ([^ '"' '\\']|'\\'_)*
rule token = parse
[' ' '\t' ] { token lexbuf } (* skip blanks *)
| '\n' { new_line lexbuf; token lexbuf }
| '\n' { Utils_parser.newline lexbuf; token lexbuf }
| ['0'-'9']+ as lxm { INT(lxm) }
| "CALL" { CALL_OF }
| "RETURN" { RETURN_OF }
......@@ -89,6 +81,4 @@ rule token = parse
| '?' { QUESTION }
| eof { EOF }
| ":=" { AFF }
| _ {
raise (Lexing_error ("unexpected character '" ^ Lexing.lexeme lexbuf ^ "'"))
}
| _ { Utils_parser.unknown_token lexbuf }
......@@ -19,3 +19,4 @@
(* for more details (enclosed in the file licenses/LGPLv2.1). *)
(* *)
(**************************************************************************)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment