Skip to content
Snippets Groups Projects
Commit bdd7c6a5 authored by Allan Blanchard's avatar Allan Blanchard
Browse files

[wp] Enables more tests for Chunks typing

parent dae26b84
No related branches found
No related tags found
1 merge request!3Fixed a semantic error concerning ISO C99 Uninitialized Value Undefined Behaviour in Eva main manual
/* run.config
OPT: -wp-rte -wp-prop="-NO_CHECK,-rte"
OPT: -wp-rte
*/
/* run.config_qualif
OPT: -wp-rte -wp-prop="-NO_CHECK,-rte"
OPT: -wp-rte
*/
const char x[10];
......@@ -28,7 +28,7 @@ void function(signed char i8[10],
unsigned long long int u64[10])
{
/*@
loop invariant NO_CHECK: 0 <= i <= 10;
loop invariant 0 <= i <= 10;
loop invariant \forall integer k ; 0 <= k < i ==> i8[k] == 1 ;
loop invariant \forall integer k ; 0 <= k < i ==> u8[k] == 2 ;
loop invariant \forall integer k ; 0 <= k < i ==> i16[k] == 3 ;
......@@ -37,9 +37,9 @@ void function(signed char i8[10],
loop invariant \forall integer k ; 0 <= k < i ==> u32[k] == 6 ;
loop invariant \forall integer k ; 0 <= k < i ==> i64[k] == 7 ;
loop invariant \forall integer k ; 0 <= k < i ==> u64[k] == 8 ;
loop assigns NO_CHECK: i, i8[0..9], u8[0..9], i16[0..9], u16[0..9],
i32[0..9], u32[0..9], i64[0..9], u64[0..9] ;
loop variant NO_CHECK: 10-i;
loop assigns i, i8[0..9], u8[0..9], i16[0..9], u16[0..9],
i32[0..9], u32[0..9], i64[0..9], u64[0..9] ;
loop variant 10-i;
*/
for (int i = 0; i < 10; ++i) {
i8[i] = 1;
......
......@@ -78,7 +78,7 @@ Assume {
(* Invariant *)
Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i_1) ->
(a_1[shift_sint8(i8_0, i_2)] = 1))).
(* Invariant 'NO_CHECK' *)
(* Invariant *)
Have: (0 <= i_1) /\ (i_1 <= 10).
(* Else *)
Have: 10 <= i_1.
......@@ -89,6 +89,111 @@ Prove: (a_16 = (1 + x[i])) /\ (a_17 = (1 + a_16)) /\ (a_18 = (1 + a_17)) /\
------------------------------------------------------------
Goal Preservation of Invariant (file tests/wp_acsl/chunk_typing.i, line 31):
Let a = shift_uint64(u64_0, i).
Let a_1 = shift_sint64(i64_0, i).
Let a_2 = shift_uint32(u32_0, i).
Let a_3 = shift_sint32(i32_0, i).
Let a_4 = shift_uint16(u16_0, i).
Let a_5 = shift_sint16(i16_0, i).
Let a_6 = shift_uint8(u8_0, i).
Let a_7 = shift_sint8(i8_0, i).
Let a_8 = shift_sint8(i8_0, 0).
Let a_9 = havoc(Mchar_undef_0, Mchar_0, a_8, 10).
Let a_10 = shift_uint8(u8_0, 0).
Let a_11 = havoc(Mint_undef_6, Mint_6, a_10, 10).
Let a_12 = shift_sint16(i16_0, 0).
Let a_13 = havoc(Mint_undef_0, Mint_0, a_12, 10).
Let a_14 = shift_uint16(u16_0, 0).
Let a_15 = havoc(Mint_undef_3, Mint_3, a_14, 10).
Let a_16 = shift_sint32(i32_0, 0).
Let a_17 = havoc(Mint_undef_1, Mint_1, a_16, 10).
Let a_18 = shift_uint32(u32_0, 0).
Let a_19 = havoc(Mint_undef_4, Mint_4, a_18, 10).
Let a_20 = shift_sint64(i64_0, 0).
Let a_21 = havoc(Mint_undef_2, Mint_2, a_20, 10).
Let a_22 = shift_uint64(u64_0, 0).
Let a_23 = havoc(Mint_undef_5, Mint_5, a_22, 10).
Assume {
Type: is_sint16_chunk(Mint_0) /\ is_sint32_chunk(Mint_1) /\
is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint32(1 + i) /\ is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
is_sint16_chunk(a_13[a_5 <- 3]) /\ is_sint32_chunk(a_17[a_3 <- 5]) /\
is_sint64_chunk(a_21[a_1 <- 7]) /\ is_sint8_chunk(a_9[a_7 <- 1]) /\
is_uint16_chunk(a_15[a_4 <- 4]) /\ is_uint32_chunk(a_19[a_2 <- 6]) /\
is_uint64_chunk(a_23[a <- 8]) /\ is_uint8_chunk(a_11[a_6 <- 2]).
(* Heap *)
Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
(region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
(region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
(region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
linked(Malloc_0) /\ sconst(Mchar_0).
(* Pre-condition *)
Have: valid_rw(Malloc_0, a_12, 10) /\ valid_rw(Malloc_0, a_16, 10) /\
valid_rw(Malloc_0, a_20, 10) /\ valid_rw(Malloc_0, a_8, 10) /\
valid_rw(Malloc_0, a_14, 10) /\ valid_rw(Malloc_0, a_18, 10) /\
valid_rw(Malloc_0, a_22, 10) /\ valid_rw(Malloc_0, a_10, 10).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_23[shift_uint64(u64_0, i_1)] = 8))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_21[shift_sint64(i64_0, i_1)] = 7))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_19[shift_uint32(u32_0, i_1)] = 6))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_17[shift_sint32(i32_0, i_1)] = 5))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_15[shift_uint16(u16_0, i_1)] = 4))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_13[shift_sint16(i16_0, i_1)] = 3))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_11[shift_uint8(u8_0, i_1)] = 2))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_9[shift_sint8(i8_0, i_1)] = 1))).
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_7, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_6, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_5, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_4, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_3, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_2, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_1, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a, 1).
(* Assertion 'rte,signed_overflow' *)
Have: i <= 2147483646.
}
Prove: (-1) <= i.
------------------------------------------------------------
Goal Establishment of Invariant (file tests/wp_acsl/chunk_typing.i, line 31):
Prove: true.
------------------------------------------------------------
Goal Preservation of Invariant (file tests/wp_acsl/chunk_typing.i, line 32):
Let a = shift_uint64(u64_0, i).
Let a_1 = shift_sint64(i64_0, i).
......@@ -120,7 +225,7 @@ Assume {
is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint32(1 + i) /\ is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
......@@ -165,7 +270,7 @@ Assume {
(* Invariant *)
Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
(a_9[shift_sint8(i8_0, i_2)] = 1))).
(* Invariant 'NO_CHECK' *)
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
......@@ -228,7 +333,7 @@ Assume {
is_sint64_chunk(Mint_3) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_4) /\ is_uint32_chunk(Mint_5) /\
is_uint64_chunk(Mint_6) /\ is_uint8_chunk(Mint_0) /\ is_sint32(i) /\
is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint32(1 + i) /\ is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
......@@ -273,7 +378,7 @@ Assume {
(* Invariant *)
Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
(a_9[shift_sint8(i8_0, i_2)] = 1))).
(* Invariant 'NO_CHECK' *)
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
......@@ -336,7 +441,7 @@ Assume {
is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint32(1 + i) /\ is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
......@@ -381,7 +486,7 @@ Assume {
(* Invariant *)
Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
(a_9[shift_sint8(i8_0, i_2)] = 1))).
(* Invariant 'NO_CHECK' *)
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
......@@ -444,7 +549,7 @@ Assume {
is_sint64_chunk(Mint_3) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_0) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint32(1 + i) /\ is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
......@@ -489,7 +594,7 @@ Assume {
(* Invariant *)
Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
(a_9[shift_sint8(i8_0, i_2)] = 1))).
(* Invariant 'NO_CHECK' *)
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
......@@ -552,7 +657,7 @@ Assume {
is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint32(1 + i) /\ is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
......@@ -597,7 +702,7 @@ Assume {
(* Invariant *)
Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
(a_9[shift_sint8(i8_0, i_2)] = 1))).
(* Invariant 'NO_CHECK' *)
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
......@@ -660,7 +765,7 @@ Assume {
is_sint64_chunk(Mint_3) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_4) /\ is_uint32_chunk(Mint_0) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint32(1 + i) /\ is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
......@@ -705,7 +810,7 @@ Assume {
(* Invariant *)
Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
(a_9[shift_sint8(i8_0, i_2)] = 1))).
(* Invariant 'NO_CHECK' *)
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
......@@ -768,7 +873,7 @@ Assume {
is_sint64_chunk(Mint_0) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint32(1 + i) /\ is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
......@@ -813,7 +918,7 @@ Assume {
(* Invariant *)
Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
(a_9[shift_sint8(i8_0, i_2)] = 1))).
(* Invariant 'NO_CHECK' *)
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
......@@ -876,7 +981,7 @@ Assume {
is_sint64_chunk(Mint_3) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_4) /\ is_uint32_chunk(Mint_5) /\
is_uint64_chunk(Mint_0) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint32(1 + i) /\ is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
......@@ -921,7 +1026,7 @@ Assume {
(* Invariant *)
Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
(a_9[shift_sint8(i8_0, i_2)] = 1))).
(* Invariant 'NO_CHECK' *)
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
......@@ -951,6 +1056,825 @@ Prove: a_24[shift_uint64(u64_0, i_1)] = 8.
Goal Establishment of Invariant (file tests/wp_acsl/chunk_typing.i, line 39):
Prove: true.
------------------------------------------------------------
Goal Assertion 'rte,mem_access' (file tests/wp_acsl/chunk_typing.i, line 45):
Let a = shift_sint8(i8_0, 0).
Let a_1 = havoc(Mchar_undef_0, Mchar_0, a, 10).
Let a_2 = shift_uint8(u8_0, 0).
Let a_3 = havoc(Mint_undef_6, Mint_6, a_2, 10).
Let a_4 = shift_sint16(i16_0, 0).
Let a_5 = havoc(Mint_undef_0, Mint_0, a_4, 10).
Let a_6 = shift_uint16(u16_0, 0).
Let a_7 = havoc(Mint_undef_3, Mint_3, a_6, 10).
Let a_8 = shift_sint32(i32_0, 0).
Let a_9 = havoc(Mint_undef_1, Mint_1, a_8, 10).
Let a_10 = shift_uint32(u32_0, 0).
Let a_11 = havoc(Mint_undef_4, Mint_4, a_10, 10).
Let a_12 = shift_sint64(i64_0, 0).
Let a_13 = havoc(Mint_undef_2, Mint_2, a_12, 10).
Let a_14 = shift_uint64(u64_0, 0).
Let a_15 = havoc(Mint_undef_5, Mint_5, a_14, 10).
Assume {
Type: is_sint16_chunk(Mint_0) /\ is_sint32_chunk(Mint_1) /\
is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_5) /\ is_sint32_chunk(a_9) /\
is_sint64_chunk(a_13) /\ is_sint8_chunk(a_1) /\ is_uint16_chunk(a_7) /\
is_uint32_chunk(a_11) /\ is_uint64_chunk(a_15) /\ is_uint8_chunk(a_3).
(* Heap *)
Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
(region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
(region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
(region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
linked(Malloc_0) /\ sconst(Mchar_0).
(* Pre-condition *)
Have: valid_rw(Malloc_0, a_4, 10) /\ valid_rw(Malloc_0, a_8, 10) /\
valid_rw(Malloc_0, a_12, 10) /\ valid_rw(Malloc_0, a, 10) /\
valid_rw(Malloc_0, a_6, 10) /\ valid_rw(Malloc_0, a_10, 10) /\
valid_rw(Malloc_0, a_14, 10) /\ valid_rw(Malloc_0, a_2, 10).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_15[shift_uint64(u64_0, i_1)] = 8))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_13[shift_sint64(i64_0, i_1)] = 7))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_11[shift_uint32(u32_0, i_1)] = 6))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_9[shift_sint32(i32_0, i_1)] = 5))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_7[shift_uint16(u16_0, i_1)] = 4))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_5[shift_sint16(i16_0, i_1)] = 3))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_3[shift_uint8(u8_0, i_1)] = 2))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_1[shift_sint8(i8_0, i_1)] = 1))).
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
}
Prove: valid_rw(Malloc_0, shift_sint8(i8_0, i), 1).
------------------------------------------------------------
Goal Assertion 'rte,mem_access' (file tests/wp_acsl/chunk_typing.i, line 46):
Let a = shift_sint8(i8_0, i).
Let a_1 = shift_sint8(i8_0, 0).
Let a_2 = havoc(Mchar_undef_0, Mchar_0, a_1, 10).
Let a_3 = shift_uint8(u8_0, 0).
Let a_4 = havoc(Mint_undef_6, Mint_6, a_3, 10).
Let a_5 = shift_sint16(i16_0, 0).
Let a_6 = havoc(Mint_undef_0, Mint_0, a_5, 10).
Let a_7 = shift_uint16(u16_0, 0).
Let a_8 = havoc(Mint_undef_3, Mint_3, a_7, 10).
Let a_9 = shift_sint32(i32_0, 0).
Let a_10 = havoc(Mint_undef_1, Mint_1, a_9, 10).
Let a_11 = shift_uint32(u32_0, 0).
Let a_12 = havoc(Mint_undef_4, Mint_4, a_11, 10).
Let a_13 = shift_sint64(i64_0, 0).
Let a_14 = havoc(Mint_undef_2, Mint_2, a_13, 10).
Let a_15 = shift_uint64(u64_0, 0).
Let a_16 = havoc(Mint_undef_5, Mint_5, a_15, 10).
Assume {
Type: is_sint16_chunk(Mint_0) /\ is_sint32_chunk(Mint_1) /\
is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_6) /\ is_sint32_chunk(a_10) /\
is_sint64_chunk(a_14) /\ is_sint8_chunk(a_2) /\ is_uint16_chunk(a_8) /\
is_uint32_chunk(a_12) /\ is_uint64_chunk(a_16) /\
is_uint8_chunk(a_4) /\ is_sint8_chunk(a_2[a <- 1]).
(* Heap *)
Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
(region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
(region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
(region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
linked(Malloc_0) /\ sconst(Mchar_0).
(* Pre-condition *)
Have: valid_rw(Malloc_0, a_5, 10) /\ valid_rw(Malloc_0, a_9, 10) /\
valid_rw(Malloc_0, a_13, 10) /\ valid_rw(Malloc_0, a_1, 10) /\
valid_rw(Malloc_0, a_7, 10) /\ valid_rw(Malloc_0, a_11, 10) /\
valid_rw(Malloc_0, a_15, 10) /\ valid_rw(Malloc_0, a_3, 10).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_16[shift_uint64(u64_0, i_1)] = 8))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_14[shift_sint64(i64_0, i_1)] = 7))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_12[shift_uint32(u32_0, i_1)] = 6))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_10[shift_sint32(i32_0, i_1)] = 5))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_8[shift_uint16(u16_0, i_1)] = 4))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_6[shift_sint16(i16_0, i_1)] = 3))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_4[shift_uint8(u8_0, i_1)] = 2))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_2[shift_sint8(i8_0, i_1)] = 1))).
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a, 1).
}
Prove: valid_rw(Malloc_0, shift_uint8(u8_0, i), 1).
------------------------------------------------------------
Goal Assertion 'rte,mem_access' (file tests/wp_acsl/chunk_typing.i, line 47):
Let a = shift_uint8(u8_0, i).
Let a_1 = shift_sint8(i8_0, i).
Let a_2 = shift_sint8(i8_0, 0).
Let a_3 = havoc(Mchar_undef_0, Mchar_0, a_2, 10).
Let a_4 = shift_uint8(u8_0, 0).
Let a_5 = havoc(Mint_undef_6, Mint_6, a_4, 10).
Let a_6 = shift_sint16(i16_0, 0).
Let a_7 = havoc(Mint_undef_0, Mint_0, a_6, 10).
Let a_8 = shift_uint16(u16_0, 0).
Let a_9 = havoc(Mint_undef_3, Mint_3, a_8, 10).
Let a_10 = shift_sint32(i32_0, 0).
Let a_11 = havoc(Mint_undef_1, Mint_1, a_10, 10).
Let a_12 = shift_uint32(u32_0, 0).
Let a_13 = havoc(Mint_undef_4, Mint_4, a_12, 10).
Let a_14 = shift_sint64(i64_0, 0).
Let a_15 = havoc(Mint_undef_2, Mint_2, a_14, 10).
Let a_16 = shift_uint64(u64_0, 0).
Let a_17 = havoc(Mint_undef_5, Mint_5, a_16, 10).
Assume {
Type: is_sint16_chunk(Mint_0) /\ is_sint32_chunk(Mint_1) /\
is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_7) /\ is_sint32_chunk(a_11) /\
is_sint64_chunk(a_15) /\ is_sint8_chunk(a_3) /\ is_uint16_chunk(a_9) /\
is_uint32_chunk(a_13) /\ is_uint64_chunk(a_17) /\
is_uint8_chunk(a_5) /\ is_sint8_chunk(a_3[a_1 <- 1]) /\
is_uint8_chunk(a_5[a <- 2]).
(* Heap *)
Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
(region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
(region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
(region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
linked(Malloc_0) /\ sconst(Mchar_0).
(* Pre-condition *)
Have: valid_rw(Malloc_0, a_6, 10) /\ valid_rw(Malloc_0, a_10, 10) /\
valid_rw(Malloc_0, a_14, 10) /\ valid_rw(Malloc_0, a_2, 10) /\
valid_rw(Malloc_0, a_8, 10) /\ valid_rw(Malloc_0, a_12, 10) /\
valid_rw(Malloc_0, a_16, 10) /\ valid_rw(Malloc_0, a_4, 10).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_17[shift_uint64(u64_0, i_1)] = 8))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_15[shift_sint64(i64_0, i_1)] = 7))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_13[shift_uint32(u32_0, i_1)] = 6))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_11[shift_sint32(i32_0, i_1)] = 5))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_9[shift_uint16(u16_0, i_1)] = 4))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_7[shift_sint16(i16_0, i_1)] = 3))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_5[shift_uint8(u8_0, i_1)] = 2))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_3[shift_sint8(i8_0, i_1)] = 1))).
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_1, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a, 1).
}
Prove: valid_rw(Malloc_0, shift_sint16(i16_0, i), 1).
------------------------------------------------------------
Goal Assertion 'rte,mem_access' (file tests/wp_acsl/chunk_typing.i, line 48):
Let a = shift_sint16(i16_0, i).
Let a_1 = shift_uint8(u8_0, i).
Let a_2 = shift_sint8(i8_0, i).
Let a_3 = shift_sint8(i8_0, 0).
Let a_4 = havoc(Mchar_undef_0, Mchar_0, a_3, 10).
Let a_5 = shift_uint8(u8_0, 0).
Let a_6 = havoc(Mint_undef_6, Mint_6, a_5, 10).
Let a_7 = shift_sint16(i16_0, 0).
Let a_8 = havoc(Mint_undef_0, Mint_0, a_7, 10).
Let a_9 = shift_uint16(u16_0, 0).
Let a_10 = havoc(Mint_undef_3, Mint_3, a_9, 10).
Let a_11 = shift_sint32(i32_0, 0).
Let a_12 = havoc(Mint_undef_1, Mint_1, a_11, 10).
Let a_13 = shift_uint32(u32_0, 0).
Let a_14 = havoc(Mint_undef_4, Mint_4, a_13, 10).
Let a_15 = shift_sint64(i64_0, 0).
Let a_16 = havoc(Mint_undef_2, Mint_2, a_15, 10).
Let a_17 = shift_uint64(u64_0, 0).
Let a_18 = havoc(Mint_undef_5, Mint_5, a_17, 10).
Assume {
Type: is_sint16_chunk(Mint_0) /\ is_sint32_chunk(Mint_1) /\
is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_8) /\ is_sint32_chunk(a_12) /\
is_sint64_chunk(a_16) /\ is_sint8_chunk(a_4) /\
is_uint16_chunk(a_10) /\ is_uint32_chunk(a_14) /\
is_uint64_chunk(a_18) /\ is_uint8_chunk(a_6) /\
is_sint16_chunk(a_8[a <- 3]) /\ is_sint8_chunk(a_4[a_2 <- 1]) /\
is_uint8_chunk(a_6[a_1 <- 2]).
(* Heap *)
Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
(region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
(region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
(region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
linked(Malloc_0) /\ sconst(Mchar_0).
(* Pre-condition *)
Have: valid_rw(Malloc_0, a_7, 10) /\ valid_rw(Malloc_0, a_11, 10) /\
valid_rw(Malloc_0, a_15, 10) /\ valid_rw(Malloc_0, a_3, 10) /\
valid_rw(Malloc_0, a_9, 10) /\ valid_rw(Malloc_0, a_13, 10) /\
valid_rw(Malloc_0, a_17, 10) /\ valid_rw(Malloc_0, a_5, 10).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_18[shift_uint64(u64_0, i_1)] = 8))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_16[shift_sint64(i64_0, i_1)] = 7))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_14[shift_uint32(u32_0, i_1)] = 6))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_12[shift_sint32(i32_0, i_1)] = 5))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_10[shift_uint16(u16_0, i_1)] = 4))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_8[shift_sint16(i16_0, i_1)] = 3))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_6[shift_uint8(u8_0, i_1)] = 2))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_4[shift_sint8(i8_0, i_1)] = 1))).
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_2, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_1, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a, 1).
}
Prove: valid_rw(Malloc_0, shift_uint16(u16_0, i), 1).
------------------------------------------------------------
Goal Assertion 'rte,mem_access' (file tests/wp_acsl/chunk_typing.i, line 49):
Let a = shift_uint16(u16_0, i).
Let a_1 = shift_sint16(i16_0, i).
Let a_2 = shift_uint8(u8_0, i).
Let a_3 = shift_sint8(i8_0, i).
Let a_4 = shift_sint8(i8_0, 0).
Let a_5 = havoc(Mchar_undef_0, Mchar_0, a_4, 10).
Let a_6 = shift_uint8(u8_0, 0).
Let a_7 = havoc(Mint_undef_6, Mint_6, a_6, 10).
Let a_8 = shift_sint16(i16_0, 0).
Let a_9 = havoc(Mint_undef_0, Mint_0, a_8, 10).
Let a_10 = shift_uint16(u16_0, 0).
Let a_11 = havoc(Mint_undef_3, Mint_3, a_10, 10).
Let a_12 = shift_sint32(i32_0, 0).
Let a_13 = havoc(Mint_undef_1, Mint_1, a_12, 10).
Let a_14 = shift_uint32(u32_0, 0).
Let a_15 = havoc(Mint_undef_4, Mint_4, a_14, 10).
Let a_16 = shift_sint64(i64_0, 0).
Let a_17 = havoc(Mint_undef_2, Mint_2, a_16, 10).
Let a_18 = shift_uint64(u64_0, 0).
Let a_19 = havoc(Mint_undef_5, Mint_5, a_18, 10).
Assume {
Type: is_sint16_chunk(Mint_0) /\ is_sint32_chunk(Mint_1) /\
is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_9) /\ is_sint32_chunk(a_13) /\
is_sint64_chunk(a_17) /\ is_sint8_chunk(a_5) /\
is_uint16_chunk(a_11) /\ is_uint32_chunk(a_15) /\
is_uint64_chunk(a_19) /\ is_uint8_chunk(a_7) /\
is_sint16_chunk(a_9[a_1 <- 3]) /\ is_sint8_chunk(a_5[a_3 <- 1]) /\
is_uint16_chunk(a_11[a <- 4]) /\ is_uint8_chunk(a_7[a_2 <- 2]).
(* Heap *)
Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
(region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
(region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
(region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
linked(Malloc_0) /\ sconst(Mchar_0).
(* Pre-condition *)
Have: valid_rw(Malloc_0, a_8, 10) /\ valid_rw(Malloc_0, a_12, 10) /\
valid_rw(Malloc_0, a_16, 10) /\ valid_rw(Malloc_0, a_4, 10) /\
valid_rw(Malloc_0, a_10, 10) /\ valid_rw(Malloc_0, a_14, 10) /\
valid_rw(Malloc_0, a_18, 10) /\ valid_rw(Malloc_0, a_6, 10).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_19[shift_uint64(u64_0, i_1)] = 8))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_17[shift_sint64(i64_0, i_1)] = 7))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_15[shift_uint32(u32_0, i_1)] = 6))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_13[shift_sint32(i32_0, i_1)] = 5))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_11[shift_uint16(u16_0, i_1)] = 4))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_9[shift_sint16(i16_0, i_1)] = 3))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_7[shift_uint8(u8_0, i_1)] = 2))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_5[shift_sint8(i8_0, i_1)] = 1))).
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_3, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_2, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_1, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a, 1).
}
Prove: valid_rw(Malloc_0, shift_sint32(i32_0, i), 1).
------------------------------------------------------------
Goal Assertion 'rte,mem_access' (file tests/wp_acsl/chunk_typing.i, line 50):
Let a = shift_sint32(i32_0, i).
Let a_1 = shift_uint16(u16_0, i).
Let a_2 = shift_sint16(i16_0, i).
Let a_3 = shift_uint8(u8_0, i).
Let a_4 = shift_sint8(i8_0, i).
Let a_5 = shift_sint8(i8_0, 0).
Let a_6 = havoc(Mchar_undef_0, Mchar_0, a_5, 10).
Let a_7 = shift_uint8(u8_0, 0).
Let a_8 = havoc(Mint_undef_6, Mint_6, a_7, 10).
Let a_9 = shift_sint16(i16_0, 0).
Let a_10 = havoc(Mint_undef_0, Mint_0, a_9, 10).
Let a_11 = shift_uint16(u16_0, 0).
Let a_12 = havoc(Mint_undef_3, Mint_3, a_11, 10).
Let a_13 = shift_sint32(i32_0, 0).
Let a_14 = havoc(Mint_undef_1, Mint_1, a_13, 10).
Let a_15 = shift_uint32(u32_0, 0).
Let a_16 = havoc(Mint_undef_4, Mint_4, a_15, 10).
Let a_17 = shift_sint64(i64_0, 0).
Let a_18 = havoc(Mint_undef_2, Mint_2, a_17, 10).
Let a_19 = shift_uint64(u64_0, 0).
Let a_20 = havoc(Mint_undef_5, Mint_5, a_19, 10).
Assume {
Type: is_sint16_chunk(Mint_0) /\ is_sint32_chunk(Mint_1) /\
is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_10) /\ is_sint32_chunk(a_14) /\
is_sint64_chunk(a_18) /\ is_sint8_chunk(a_6) /\
is_uint16_chunk(a_12) /\ is_uint32_chunk(a_16) /\
is_uint64_chunk(a_20) /\ is_uint8_chunk(a_8) /\
is_sint16_chunk(a_10[a_2 <- 3]) /\ is_sint32_chunk(a_14[a <- 5]) /\
is_sint8_chunk(a_6[a_4 <- 1]) /\ is_uint16_chunk(a_12[a_1 <- 4]) /\
is_uint8_chunk(a_8[a_3 <- 2]).
(* Heap *)
Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
(region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
(region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
(region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
linked(Malloc_0) /\ sconst(Mchar_0).
(* Pre-condition *)
Have: valid_rw(Malloc_0, a_9, 10) /\ valid_rw(Malloc_0, a_13, 10) /\
valid_rw(Malloc_0, a_17, 10) /\ valid_rw(Malloc_0, a_5, 10) /\
valid_rw(Malloc_0, a_11, 10) /\ valid_rw(Malloc_0, a_15, 10) /\
valid_rw(Malloc_0, a_19, 10) /\ valid_rw(Malloc_0, a_7, 10).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_20[shift_uint64(u64_0, i_1)] = 8))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_18[shift_sint64(i64_0, i_1)] = 7))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_16[shift_uint32(u32_0, i_1)] = 6))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_14[shift_sint32(i32_0, i_1)] = 5))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_12[shift_uint16(u16_0, i_1)] = 4))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_10[shift_sint16(i16_0, i_1)] = 3))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_8[shift_uint8(u8_0, i_1)] = 2))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_6[shift_sint8(i8_0, i_1)] = 1))).
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_4, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_3, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_2, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_1, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a, 1).
}
Prove: valid_rw(Malloc_0, shift_uint32(u32_0, i), 1).
------------------------------------------------------------
Goal Assertion 'rte,mem_access' (file tests/wp_acsl/chunk_typing.i, line 51):
Let a = shift_uint32(u32_0, i).
Let a_1 = shift_sint32(i32_0, i).
Let a_2 = shift_uint16(u16_0, i).
Let a_3 = shift_sint16(i16_0, i).
Let a_4 = shift_uint8(u8_0, i).
Let a_5 = shift_sint8(i8_0, i).
Let a_6 = shift_sint8(i8_0, 0).
Let a_7 = havoc(Mchar_undef_0, Mchar_0, a_6, 10).
Let a_8 = shift_uint8(u8_0, 0).
Let a_9 = havoc(Mint_undef_6, Mint_6, a_8, 10).
Let a_10 = shift_sint16(i16_0, 0).
Let a_11 = havoc(Mint_undef_0, Mint_0, a_10, 10).
Let a_12 = shift_uint16(u16_0, 0).
Let a_13 = havoc(Mint_undef_3, Mint_3, a_12, 10).
Let a_14 = shift_sint32(i32_0, 0).
Let a_15 = havoc(Mint_undef_1, Mint_1, a_14, 10).
Let a_16 = shift_uint32(u32_0, 0).
Let a_17 = havoc(Mint_undef_4, Mint_4, a_16, 10).
Let a_18 = shift_sint64(i64_0, 0).
Let a_19 = havoc(Mint_undef_2, Mint_2, a_18, 10).
Let a_20 = shift_uint64(u64_0, 0).
Let a_21 = havoc(Mint_undef_5, Mint_5, a_20, 10).
Assume {
Type: is_sint16_chunk(Mint_0) /\ is_sint32_chunk(Mint_1) /\
is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_11) /\ is_sint32_chunk(a_15) /\
is_sint64_chunk(a_19) /\ is_sint8_chunk(a_7) /\
is_uint16_chunk(a_13) /\ is_uint32_chunk(a_17) /\
is_uint64_chunk(a_21) /\ is_uint8_chunk(a_9) /\
is_sint16_chunk(a_11[a_3 <- 3]) /\ is_sint32_chunk(a_15[a_1 <- 5]) /\
is_sint8_chunk(a_7[a_5 <- 1]) /\ is_uint16_chunk(a_13[a_2 <- 4]) /\
is_uint32_chunk(a_17[a <- 6]) /\ is_uint8_chunk(a_9[a_4 <- 2]).
(* Heap *)
Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
(region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
(region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
(region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
linked(Malloc_0) /\ sconst(Mchar_0).
(* Pre-condition *)
Have: valid_rw(Malloc_0, a_10, 10) /\ valid_rw(Malloc_0, a_14, 10) /\
valid_rw(Malloc_0, a_18, 10) /\ valid_rw(Malloc_0, a_6, 10) /\
valid_rw(Malloc_0, a_12, 10) /\ valid_rw(Malloc_0, a_16, 10) /\
valid_rw(Malloc_0, a_20, 10) /\ valid_rw(Malloc_0, a_8, 10).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_21[shift_uint64(u64_0, i_1)] = 8))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_19[shift_sint64(i64_0, i_1)] = 7))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_17[shift_uint32(u32_0, i_1)] = 6))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_15[shift_sint32(i32_0, i_1)] = 5))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_13[shift_uint16(u16_0, i_1)] = 4))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_11[shift_sint16(i16_0, i_1)] = 3))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_9[shift_uint8(u8_0, i_1)] = 2))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_7[shift_sint8(i8_0, i_1)] = 1))).
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_5, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_4, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_3, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_2, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_1, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a, 1).
}
Prove: valid_rw(Malloc_0, shift_sint64(i64_0, i), 1).
------------------------------------------------------------
Goal Assertion 'rte,mem_access' (file tests/wp_acsl/chunk_typing.i, line 52):
Let a = shift_sint64(i64_0, i).
Let a_1 = shift_uint32(u32_0, i).
Let a_2 = shift_sint32(i32_0, i).
Let a_3 = shift_uint16(u16_0, i).
Let a_4 = shift_sint16(i16_0, i).
Let a_5 = shift_uint8(u8_0, i).
Let a_6 = shift_sint8(i8_0, i).
Let a_7 = shift_sint8(i8_0, 0).
Let a_8 = havoc(Mchar_undef_0, Mchar_0, a_7, 10).
Let a_9 = shift_uint8(u8_0, 0).
Let a_10 = havoc(Mint_undef_6, Mint_6, a_9, 10).
Let a_11 = shift_sint16(i16_0, 0).
Let a_12 = havoc(Mint_undef_0, Mint_0, a_11, 10).
Let a_13 = shift_uint16(u16_0, 0).
Let a_14 = havoc(Mint_undef_3, Mint_3, a_13, 10).
Let a_15 = shift_sint32(i32_0, 0).
Let a_16 = havoc(Mint_undef_1, Mint_1, a_15, 10).
Let a_17 = shift_uint32(u32_0, 0).
Let a_18 = havoc(Mint_undef_4, Mint_4, a_17, 10).
Let a_19 = shift_sint64(i64_0, 0).
Let a_20 = havoc(Mint_undef_2, Mint_2, a_19, 10).
Let a_21 = shift_uint64(u64_0, 0).
Let a_22 = havoc(Mint_undef_5, Mint_5, a_21, 10).
Assume {
Type: is_sint16_chunk(Mint_0) /\ is_sint32_chunk(Mint_1) /\
is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_12) /\ is_sint32_chunk(a_16) /\
is_sint64_chunk(a_20) /\ is_sint8_chunk(a_8) /\
is_uint16_chunk(a_14) /\ is_uint32_chunk(a_18) /\
is_uint64_chunk(a_22) /\ is_uint8_chunk(a_10) /\
is_sint16_chunk(a_12[a_4 <- 3]) /\ is_sint32_chunk(a_16[a_2 <- 5]) /\
is_sint64_chunk(a_20[a <- 7]) /\ is_sint8_chunk(a_8[a_6 <- 1]) /\
is_uint16_chunk(a_14[a_3 <- 4]) /\ is_uint32_chunk(a_18[a_1 <- 6]) /\
is_uint8_chunk(a_10[a_5 <- 2]).
(* Heap *)
Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
(region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
(region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
(region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
linked(Malloc_0) /\ sconst(Mchar_0).
(* Pre-condition *)
Have: valid_rw(Malloc_0, a_11, 10) /\ valid_rw(Malloc_0, a_15, 10) /\
valid_rw(Malloc_0, a_19, 10) /\ valid_rw(Malloc_0, a_7, 10) /\
valid_rw(Malloc_0, a_13, 10) /\ valid_rw(Malloc_0, a_17, 10) /\
valid_rw(Malloc_0, a_21, 10) /\ valid_rw(Malloc_0, a_9, 10).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_22[shift_uint64(u64_0, i_1)] = 8))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_20[shift_sint64(i64_0, i_1)] = 7))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_18[shift_uint32(u32_0, i_1)] = 6))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_16[shift_sint32(i32_0, i_1)] = 5))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_14[shift_uint16(u16_0, i_1)] = 4))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_12[shift_sint16(i16_0, i_1)] = 3))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_10[shift_uint8(u8_0, i_1)] = 2))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_8[shift_sint8(i8_0, i_1)] = 1))).
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_6, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_5, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_4, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_3, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_2, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_1, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a, 1).
}
Prove: valid_rw(Malloc_0, shift_uint64(u64_0, i), 1).
------------------------------------------------------------
Goal Assertion 'rte,signed_overflow' (file tests/wp_acsl/chunk_typing.i, line 44):
Let a = shift_uint64(u64_0, i).
Let a_1 = shift_sint64(i64_0, i).
Let a_2 = shift_uint32(u32_0, i).
Let a_3 = shift_sint32(i32_0, i).
Let a_4 = shift_uint16(u16_0, i).
Let a_5 = shift_sint16(i16_0, i).
Let a_6 = shift_uint8(u8_0, i).
Let a_7 = shift_sint8(i8_0, i).
Let a_8 = shift_sint8(i8_0, 0).
Let a_9 = havoc(Mchar_undef_0, Mchar_0, a_8, 10).
Let a_10 = shift_uint8(u8_0, 0).
Let a_11 = havoc(Mint_undef_6, Mint_6, a_10, 10).
Let a_12 = shift_sint16(i16_0, 0).
Let a_13 = havoc(Mint_undef_0, Mint_0, a_12, 10).
Let a_14 = shift_uint16(u16_0, 0).
Let a_15 = havoc(Mint_undef_3, Mint_3, a_14, 10).
Let a_16 = shift_sint32(i32_0, 0).
Let a_17 = havoc(Mint_undef_1, Mint_1, a_16, 10).
Let a_18 = shift_uint32(u32_0, 0).
Let a_19 = havoc(Mint_undef_4, Mint_4, a_18, 10).
Let a_20 = shift_sint64(i64_0, 0).
Let a_21 = havoc(Mint_undef_2, Mint_2, a_20, 10).
Let a_22 = shift_uint64(u64_0, 0).
Let a_23 = havoc(Mint_undef_5, Mint_5, a_22, 10).
Assume {
Type: is_sint16_chunk(Mint_0) /\ is_sint32_chunk(Mint_1) /\
is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
is_sint16_chunk(a_13[a_5 <- 3]) /\ is_sint32_chunk(a_17[a_3 <- 5]) /\
is_sint64_chunk(a_21[a_1 <- 7]) /\ is_sint8_chunk(a_9[a_7 <- 1]) /\
is_uint16_chunk(a_15[a_4 <- 4]) /\ is_uint32_chunk(a_19[a_2 <- 6]) /\
is_uint64_chunk(a_23[a <- 8]) /\ is_uint8_chunk(a_11[a_6 <- 2]).
(* Heap *)
Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
(region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
(region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
(region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
linked(Malloc_0) /\ sconst(Mchar_0).
(* Pre-condition *)
Have: valid_rw(Malloc_0, a_12, 10) /\ valid_rw(Malloc_0, a_16, 10) /\
valid_rw(Malloc_0, a_20, 10) /\ valid_rw(Malloc_0, a_8, 10) /\
valid_rw(Malloc_0, a_14, 10) /\ valid_rw(Malloc_0, a_18, 10) /\
valid_rw(Malloc_0, a_22, 10) /\ valid_rw(Malloc_0, a_10, 10).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_23[shift_uint64(u64_0, i_1)] = 8))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_21[shift_sint64(i64_0, i_1)] = 7))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_19[shift_uint32(u32_0, i_1)] = 6))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_17[shift_sint32(i32_0, i_1)] = 5))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_15[shift_uint16(u16_0, i_1)] = 4))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_13[shift_sint16(i16_0, i_1)] = 3))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_11[shift_uint8(u8_0, i_1)] = 2))).
(* Invariant *)
Have: forall i_1 : Z. ((0 <= i_1) -> ((i_1 < i) ->
(a_9[shift_sint8(i8_0, i_1)] = 1))).
(* Invariant *)
Have: (0 <= i) /\ (i <= 10).
(* Then *)
Have: i <= 9.
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_7, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_6, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_5, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_4, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_3, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_2, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a_1, 1).
(* Assertion 'rte,mem_access' *)
Have: valid_rw(Malloc_0, a, 1).
}
Prove: i <= 2147483646.
------------------------------------------------------------
Goal Loop assigns (file tests/wp_acsl/chunk_typing.i, line 40) (1/9):
Prove: true.
------------------------------------------------------------
Goal Loop assigns (file tests/wp_acsl/chunk_typing.i, line 40) (2/9):
Effect at line 45
Prove: true.
------------------------------------------------------------
Goal Loop assigns (file tests/wp_acsl/chunk_typing.i, line 40) (3/9):
Effect at line 46
Prove: true.
------------------------------------------------------------
Goal Loop assigns (file tests/wp_acsl/chunk_typing.i, line 40) (4/9):
Effect at line 47
Prove: true.
------------------------------------------------------------
Goal Loop assigns (file tests/wp_acsl/chunk_typing.i, line 40) (5/9):
Effect at line 48
Prove: true.
------------------------------------------------------------
Goal Loop assigns (file tests/wp_acsl/chunk_typing.i, line 40) (6/9):
Effect at line 49
Prove: true.
------------------------------------------------------------
Goal Loop assigns (file tests/wp_acsl/chunk_typing.i, line 40) (7/9):
Effect at line 50
Prove: true.
------------------------------------------------------------
Goal Loop assigns (file tests/wp_acsl/chunk_typing.i, line 40) (8/9):
Effect at line 51
Prove: true.
------------------------------------------------------------
Goal Loop assigns (file tests/wp_acsl/chunk_typing.i, line 40) (9/9):
Effect at line 52
Prove: true.
------------------------------------------------------------
Goal Decreasing of Loop variant at loop (file tests/wp_acsl/chunk_typing.i, line 44):
Prove: true.
------------------------------------------------------------
Goal Positivity of Loop variant at loop (file tests/wp_acsl/chunk_typing.i, line 44):
Prove: true.
------------------------------------------------------------
[wp] Warning: Memory model hypotheses for function 'function':
/*@
......
......@@ -3,7 +3,7 @@
[wp] Running WP plugin...
[wp] Loading driver 'share/wp.driver'
[rte] annotating function function
[wp] 17 goals scheduled
[wp] 39 goals scheduled
[wp] [Alt-Ergo] Goal typed_function_ensures : Valid
[wp] [Alt-Ergo] Goal typed_function_loop_invariant_preserved : Valid
[wp] [Qed] Goal typed_function_loop_invariant_established : Valid
......@@ -21,12 +21,34 @@
[wp] [Qed] Goal typed_function_loop_invariant_7_established : Valid
[wp] [Alt-Ergo] Goal typed_function_loop_invariant_8_preserved : Valid
[wp] [Qed] Goal typed_function_loop_invariant_8_established : Valid
[wp] Proved goals: 17 / 17
Qed: 8
Alt-Ergo: 9
[wp] [Alt-Ergo] Goal typed_function_loop_invariant_9_preserved : Valid
[wp] [Qed] Goal typed_function_loop_invariant_9_established : Valid
[wp] [Alt-Ergo] Goal typed_function_assert_rte_mem_access : Valid
[wp] [Alt-Ergo] Goal typed_function_assert_rte_mem_access_2 : Valid
[wp] [Alt-Ergo] Goal typed_function_assert_rte_mem_access_3 : Valid
[wp] [Alt-Ergo] Goal typed_function_assert_rte_mem_access_4 : Valid
[wp] [Alt-Ergo] Goal typed_function_assert_rte_mem_access_5 : Valid
[wp] [Alt-Ergo] Goal typed_function_assert_rte_mem_access_6 : Valid
[wp] [Alt-Ergo] Goal typed_function_assert_rte_mem_access_7 : Valid
[wp] [Alt-Ergo] Goal typed_function_assert_rte_mem_access_8 : Valid
[wp] [Alt-Ergo] Goal typed_function_assert_rte_signed_overflow : Valid
[wp] [Qed] Goal typed_function_loop_assigns_part1 : Valid
[wp] [Qed] Goal typed_function_loop_assigns_part2 : Valid
[wp] [Qed] Goal typed_function_loop_assigns_part3 : Valid
[wp] [Qed] Goal typed_function_loop_assigns_part4 : Valid
[wp] [Qed] Goal typed_function_loop_assigns_part5 : Valid
[wp] [Qed] Goal typed_function_loop_assigns_part6 : Valid
[wp] [Qed] Goal typed_function_loop_assigns_part7 : Valid
[wp] [Qed] Goal typed_function_loop_assigns_part8 : Valid
[wp] [Qed] Goal typed_function_loop_assigns_part9 : Valid
[wp] [Qed] Goal typed_function_loop_variant_decrease : Valid
[wp] [Qed] Goal typed_function_loop_variant_positive : Valid
[wp] Proved goals: 39 / 39
Qed: 20
Alt-Ergo: 19
------------------------------------------------------------
Functions WP Alt-Ergo Total Success
function 8 9 17 100%
function 20 19 39 100%
------------------------------------------------------------
[wp] Warning: Memory model hypotheses for function 'function':
/*@
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment