From 0d76acb27bf06557a5a32adc67012180eefbd1b4 Mon Sep 17 00:00:00 2001
From: Allan Blanchard <allan.blanchard@cea.fr>
Date: Fri, 29 May 2020 17:37:39 +0200
Subject: [PATCH] [wp] Fix a bug with int chunks ranks + add a test

---
 src/plugins/wp/MemTyped.ml                    |  17 +-
 src/plugins/wp/tests/wp_acsl/chunk_typing.i   |  54 +
 .../wp_acsl/oracle/chunk_typing.res.oracle    | 965 ++++++++++++++++++
 .../oracle_qualif/chunk_typing.res.oracle     |  41 +
 4 files changed, 1072 insertions(+), 5 deletions(-)
 create mode 100644 src/plugins/wp/tests/wp_acsl/chunk_typing.i
 create mode 100644 src/plugins/wp/tests/wp_acsl/oracle/chunk_typing.res.oracle
 create mode 100644 src/plugins/wp/tests/wp_acsl/oracle_qualif/chunk_typing.res.oracle

diff --git a/src/plugins/wp/MemTyped.ml b/src/plugins/wp/MemTyped.ml
index fa4cb6ad40a..233ffc233ca 100644
--- a/src/plugins/wp/MemTyped.ml
+++ b/src/plugins/wp/MemTyped.ml
@@ -130,15 +130,22 @@ struct
   let self = "typed"
   let int_rank = function
     | CBool -> 0
-    | k -> Ctypes.i_bytes k * (if Ctypes.signed k then 2 else 1)
+    | UInt8 -> 1
+    | SInt8 -> 2
+    | UInt16 -> 3
+    | SInt16 -> 4
+    | UInt32 -> 5
+    | SInt32 -> 6
+    | UInt64 -> 7
+    | SInt64 -> 8
 
   let rank = function
     | M_char -> -1
     | M_int i -> int_rank i
-    | M_f32 -> 17
-    | M_f64 -> 18
-    | M_pointer -> 19
-    | T_alloc -> 20
+    | M_f32 -> 9
+    | M_f64 -> 10
+    | M_pointer -> 11
+    | T_alloc -> 12
   let hash = rank
   let name = function
     | M_int _ -> "Mint"
diff --git a/src/plugins/wp/tests/wp_acsl/chunk_typing.i b/src/plugins/wp/tests/wp_acsl/chunk_typing.i
new file mode 100644
index 00000000000..69820195dc0
--- /dev/null
+++ b/src/plugins/wp/tests/wp_acsl/chunk_typing.i
@@ -0,0 +1,54 @@
+/* run.config
+   OPT: -wp-rte -wp-prop="-NO_CHECK,-rte"
+*/
+/* run.config_qualif
+   OPT: -wp-rte -wp-prop="-NO_CHECK,-rte"
+*/
+
+const char x[10] ;
+
+/*@
+	requires \valid(&i8[0..9])  && \valid(&u8[0..9])  && \valid(&i16[0..9]) && \valid(&u16[0..9])
+        && \valid(&i32[0..9]) && \valid(&u32[0..9]) && \valid(&i64[0..9]) && \valid(&u64[0..9]);
+	ensures \forall integer k ;
+	        0 <= k < 10 ==>
+					  x[k]+8   ==
+						i8[k]+7  == u8[k]+6 ==
+						i16[k]+5 ==	u16[k]+4 ==
+						i32[k]+3 ==	u32[k]+2 ==
+						i64[k]+1 ==	u64[k];
+*/
+void function(signed char i8[10],
+							unsigned char u8[10],
+							signed short i16[10],
+							unsigned short u16[10],
+							signed int i32[10],
+							unsigned int u32[10],
+							signed long long int i64[10],
+							unsigned long long int u64[10])
+{
+	/*@
+		loop invariant NO_CHECK: 0 <= i <= 10;
+		loop invariant \forall integer k ; 0 <= k < i ==> i8[k] == 1 ;
+		loop invariant \forall integer k ; 0 <= k < i ==> u8[k] == 2 ;
+		loop invariant \forall integer k ; 0 <= k < i ==> i16[k] == 3 ;
+		loop invariant \forall integer k ; 0 <= k < i ==> u16[k] == 4 ;
+		loop invariant \forall integer k ; 0 <= k < i ==> i32[k] == 5 ;
+		loop invariant \forall integer k ; 0 <= k < i ==> u32[k] == 6 ;
+		loop invariant \forall integer k ; 0 <= k < i ==> i64[k] == 7 ;
+		loop invariant \forall integer k ; 0 <= k < i ==> u64[k] == 8 ;
+		loop assigns NO_CHECK: i, i8[0..9], u8[0..9], i16[0..9], u16[0..9], i32[0..9], u32[0..9], i64[0..9], u64[0..9] ;
+		loop variant NO_CHECK: 10-i;
+ 	*/
+	for(int i = 0; i < 10; ++i){
+		i8[i] = 1 ;
+		u8[i] = 2 ;
+		i16[i] = 3 ;
+		u16[i] = 4 ;
+		i32[i] = 5 ;
+		u32[i] = 6 ;
+		i64[i] = 7 ;
+		u64[i] = 8 ;
+	}
+}
+
diff --git a/src/plugins/wp/tests/wp_acsl/oracle/chunk_typing.res.oracle b/src/plugins/wp/tests/wp_acsl/oracle/chunk_typing.res.oracle
new file mode 100644
index 00000000000..72149c7bf3d
--- /dev/null
+++ b/src/plugins/wp/tests/wp_acsl/oracle/chunk_typing.res.oracle
@@ -0,0 +1,965 @@
+# frama-c -wp -wp-rte [...]
+[kernel] Parsing tests/wp_acsl/chunk_typing.i (no preprocessing)
+[wp] Running WP plugin...
+[wp] Loading driver 'share/wp.driver'
+[rte] annotating function function
+------------------------------------------------------------
+  Function function
+------------------------------------------------------------
+
+Goal Post-condition (file tests/wp_acsl/chunk_typing.i, line 13) in 'function':
+Let a = shift_sint8(i8_0, 0).
+Let a_1 = havoc(Mchar_undef_0, Mchar_0, a, 10).
+Let a_2 = shift_uint8(u8_0, 0).
+Let a_3 = havoc(Mint_undef_0, Mint_0, a_2, 10).
+Let a_4 = shift_sint16(i16_0, 0).
+Let a_5 = havoc(Mint_undef_1, Mint_1, a_4, 10).
+Let a_6 = shift_uint16(u16_0, 0).
+Let a_7 = havoc(Mint_undef_2, Mint_2, a_6, 10).
+Let a_8 = shift_sint32(i32_0, 0).
+Let a_9 = havoc(Mint_undef_3, Mint_3, a_8, 10).
+Let a_10 = shift_uint32(u32_0, 0).
+Let a_11 = havoc(Mint_undef_4, Mint_4, a_10, 10).
+Let a_12 = shift_sint64(i64_0, 0).
+Let a_13 = havoc(Mint_undef_5, Mint_5, a_12, 10).
+Let a_14 = shift_uint64(u64_0, 0).
+Let a_15 = havoc(Mint_undef_6, Mint_6, a_14, 10).
+Let a_16 = a_1[shift_sint8(i8_0, i)].
+Let a_17 = a_3[shift_uint8(u8_0, i)].
+Let a_18 = a_5[shift_sint16(i16_0, i)].
+Let a_19 = a_7[shift_uint16(u16_0, i)].
+Let a_20 = a_9[shift_sint32(i32_0, i)].
+Let a_21 = a_11[shift_uint32(u32_0, i)].
+Let a_22 = a_13[shift_sint64(i64_0, i)].
+Assume {
+  Type: is_sint16_chunk(Mint_1) /\ is_sint32_chunk(Mint_3) /\
+      is_sint64_chunk(Mint_5) /\ is_sint8_chunk(Mchar_0) /\
+      is_uint16_chunk(Mint_2) /\ is_uint32_chunk(Mint_4) /\
+      is_uint64_chunk(Mint_6) /\ is_uint8_chunk(Mint_0) /\ is_sint32(i_1) /\
+      is_sint16_chunk(a_5) /\ is_sint32_chunk(a_9) /\
+      is_sint64_chunk(a_13) /\ is_sint8_chunk(a_1) /\ is_uint16_chunk(a_7) /\
+      is_uint32_chunk(a_11) /\ is_uint64_chunk(a_15) /\ is_uint8_chunk(a_3).
+  (* Heap *)
+  Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
+      (region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
+      (region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
+      (region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
+      IsArray1_sint8(x) /\ linked(Malloc_0) /\ sconst(Mchar_0).
+  (* Goal *)
+  When: (0 <= i) /\ (i <= 9).
+  (* Initializer *)
+  Init: forall i_2 : Z. ((0 <= i_2) -> ((i_2 <= 9) -> (x[i_2] = 0))).
+  (* Pre-condition *)
+  Have: valid_rw(Malloc_0, a_4, 10) /\ valid_rw(Malloc_0, a_8, 10) /\
+      valid_rw(Malloc_0, a_12, 10) /\ valid_rw(Malloc_0, a, 10) /\
+      valid_rw(Malloc_0, a_6, 10) /\ valid_rw(Malloc_0, a_10, 10) /\
+      valid_rw(Malloc_0, a_14, 10) /\ valid_rw(Malloc_0, a_2, 10).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i_1) ->
+      (a_15[shift_uint64(u64_0, i_2)] = 8))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i_1) ->
+      (a_13[shift_sint64(i64_0, i_2)] = 7))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i_1) ->
+      (a_11[shift_uint32(u32_0, i_2)] = 6))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i_1) ->
+      (a_9[shift_sint32(i32_0, i_2)] = 5))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i_1) ->
+      (a_7[shift_uint16(u16_0, i_2)] = 4))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i_1) ->
+      (a_5[shift_sint16(i16_0, i_2)] = 3))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i_1) ->
+      (a_3[shift_uint8(u8_0, i_2)] = 2))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i_1) ->
+      (a_1[shift_sint8(i8_0, i_2)] = 1))).
+  (* Invariant 'NO_CHECK' *)
+  Have: (0 <= i_1) /\ (i_1 <= 10).
+  (* Else *)
+  Have: 10 <= i_1.
+}
+Prove: (a_16 = (1 + x[i])) /\ (a_17 = (1 + a_16)) /\ (a_18 = (1 + a_17)) /\
+    (a_19 = (1 + a_18)) /\ (a_20 = (1 + a_19)) /\ (a_21 = (1 + a_20)) /\
+    (a_22 = (1 + a_21)) /\ (a_15[shift_uint64(u64_0, i)] = (1 + a_22)).
+
+------------------------------------------------------------
+
+Goal Preservation of Invariant (file tests/wp_acsl/chunk_typing.i, line 32):
+Let a = shift_uint64(u64_0, i).
+Let a_1 = shift_sint64(i64_0, i).
+Let a_2 = shift_uint32(u32_0, i).
+Let a_3 = shift_sint32(i32_0, i).
+Let a_4 = shift_uint16(u16_0, i).
+Let a_5 = shift_sint16(i16_0, i).
+Let a_6 = shift_uint8(u8_0, i).
+Let a_7 = shift_sint8(i8_0, i).
+Let a_8 = shift_sint8(i8_0, 0).
+Let a_9 = havoc(Mchar_undef_0, Mchar_0, a_8, 10).
+Let a_10 = shift_uint8(u8_0, 0).
+Let a_11 = havoc(Mint_undef_6, Mint_6, a_10, 10).
+Let a_12 = shift_sint16(i16_0, 0).
+Let a_13 = havoc(Mint_undef_0, Mint_0, a_12, 10).
+Let a_14 = shift_uint16(u16_0, 0).
+Let a_15 = havoc(Mint_undef_3, Mint_3, a_14, 10).
+Let a_16 = shift_sint32(i32_0, 0).
+Let a_17 = havoc(Mint_undef_1, Mint_1, a_16, 10).
+Let a_18 = shift_uint32(u32_0, 0).
+Let a_19 = havoc(Mint_undef_4, Mint_4, a_18, 10).
+Let a_20 = shift_sint64(i64_0, 0).
+Let a_21 = havoc(Mint_undef_2, Mint_2, a_20, 10).
+Let a_22 = shift_uint64(u64_0, 0).
+Let a_23 = havoc(Mint_undef_5, Mint_5, a_22, 10).
+Let a_24 = a_9[a_7 <- 1].
+Assume {
+  Type: is_sint16_chunk(Mint_0) /\ is_sint32_chunk(Mint_1) /\
+      is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
+      is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
+      is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
+      is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
+      is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
+      is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
+      is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
+      is_sint16_chunk(a_13[a_5 <- 3]) /\ is_sint32_chunk(a_17[a_3 <- 5]) /\
+      is_sint64_chunk(a_21[a_1 <- 7]) /\ is_sint8_chunk(a_24) /\
+      is_uint16_chunk(a_15[a_4 <- 4]) /\ is_uint32_chunk(a_19[a_2 <- 6]) /\
+      is_uint64_chunk(a_23[a <- 8]) /\ is_uint8_chunk(a_11[a_6 <- 2]).
+  (* Heap *)
+  Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
+      (region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
+      (region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
+      (region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
+      linked(Malloc_0) /\ sconst(Mchar_0).
+  (* Goal *)
+  When: (0 <= i_1) /\ (i_1 <= i).
+  (* Pre-condition *)
+  Have: valid_rw(Malloc_0, a_12, 10) /\ valid_rw(Malloc_0, a_16, 10) /\
+      valid_rw(Malloc_0, a_20, 10) /\ valid_rw(Malloc_0, a_8, 10) /\
+      valid_rw(Malloc_0, a_14, 10) /\ valid_rw(Malloc_0, a_18, 10) /\
+      valid_rw(Malloc_0, a_22, 10) /\ valid_rw(Malloc_0, a_10, 10).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_23[shift_uint64(u64_0, i_2)] = 8))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_21[shift_sint64(i64_0, i_2)] = 7))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_19[shift_uint32(u32_0, i_2)] = 6))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_17[shift_sint32(i32_0, i_2)] = 5))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_15[shift_uint16(u16_0, i_2)] = 4))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_13[shift_sint16(i16_0, i_2)] = 3))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_11[shift_uint8(u8_0, i_2)] = 2))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_9[shift_sint8(i8_0, i_2)] = 1))).
+  (* Invariant 'NO_CHECK' *)
+  Have: (0 <= i) /\ (i <= 10).
+  (* Then *)
+  Have: i <= 9.
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_7, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_6, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_5, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_4, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_3, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_2, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_1, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a, 1).
+  (* Assertion 'rte,signed_overflow' *)
+  Have: i <= 2147483646.
+}
+Prove: a_24[shift_sint8(i8_0, i_1)] = 1.
+
+------------------------------------------------------------
+
+Goal Establishment of Invariant (file tests/wp_acsl/chunk_typing.i, line 32):
+Prove: true.
+
+------------------------------------------------------------
+
+Goal Preservation of Invariant (file tests/wp_acsl/chunk_typing.i, line 33):
+Let a = shift_uint64(u64_0, i).
+Let a_1 = shift_sint64(i64_0, i).
+Let a_2 = shift_uint32(u32_0, i).
+Let a_3 = shift_sint32(i32_0, i).
+Let a_4 = shift_uint16(u16_0, i).
+Let a_5 = shift_sint16(i16_0, i).
+Let a_6 = shift_uint8(u8_0, i).
+Let a_7 = shift_sint8(i8_0, i).
+Let a_8 = shift_sint8(i8_0, 0).
+Let a_9 = havoc(Mchar_undef_0, Mchar_0, a_8, 10).
+Let a_10 = shift_uint8(u8_0, 0).
+Let a_11 = havoc(Mint_undef_0, Mint_0, a_10, 10).
+Let a_12 = shift_sint16(i16_0, 0).
+Let a_13 = havoc(Mint_undef_1, Mint_1, a_12, 10).
+Let a_14 = shift_uint16(u16_0, 0).
+Let a_15 = havoc(Mint_undef_4, Mint_4, a_14, 10).
+Let a_16 = shift_sint32(i32_0, 0).
+Let a_17 = havoc(Mint_undef_2, Mint_2, a_16, 10).
+Let a_18 = shift_uint32(u32_0, 0).
+Let a_19 = havoc(Mint_undef_5, Mint_5, a_18, 10).
+Let a_20 = shift_sint64(i64_0, 0).
+Let a_21 = havoc(Mint_undef_3, Mint_3, a_20, 10).
+Let a_22 = shift_uint64(u64_0, 0).
+Let a_23 = havoc(Mint_undef_6, Mint_6, a_22, 10).
+Let a_24 = a_11[a_6 <- 2].
+Assume {
+  Type: is_sint16_chunk(Mint_1) /\ is_sint32_chunk(Mint_2) /\
+      is_sint64_chunk(Mint_3) /\ is_sint8_chunk(Mchar_0) /\
+      is_uint16_chunk(Mint_4) /\ is_uint32_chunk(Mint_5) /\
+      is_uint64_chunk(Mint_6) /\ is_uint8_chunk(Mint_0) /\ is_sint32(i) /\
+      is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
+      is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
+      is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
+      is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
+      is_sint16_chunk(a_13[a_5 <- 3]) /\ is_sint32_chunk(a_17[a_3 <- 5]) /\
+      is_sint64_chunk(a_21[a_1 <- 7]) /\ is_sint8_chunk(a_9[a_7 <- 1]) /\
+      is_uint16_chunk(a_15[a_4 <- 4]) /\ is_uint32_chunk(a_19[a_2 <- 6]) /\
+      is_uint64_chunk(a_23[a <- 8]) /\ is_uint8_chunk(a_24).
+  (* Heap *)
+  Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
+      (region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
+      (region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
+      (region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
+      linked(Malloc_0) /\ sconst(Mchar_0).
+  (* Goal *)
+  When: (0 <= i_1) /\ (i_1 <= i).
+  (* Pre-condition *)
+  Have: valid_rw(Malloc_0, a_12, 10) /\ valid_rw(Malloc_0, a_16, 10) /\
+      valid_rw(Malloc_0, a_20, 10) /\ valid_rw(Malloc_0, a_8, 10) /\
+      valid_rw(Malloc_0, a_14, 10) /\ valid_rw(Malloc_0, a_18, 10) /\
+      valid_rw(Malloc_0, a_22, 10) /\ valid_rw(Malloc_0, a_10, 10).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_23[shift_uint64(u64_0, i_2)] = 8))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_21[shift_sint64(i64_0, i_2)] = 7))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_19[shift_uint32(u32_0, i_2)] = 6))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_17[shift_sint32(i32_0, i_2)] = 5))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_15[shift_uint16(u16_0, i_2)] = 4))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_13[shift_sint16(i16_0, i_2)] = 3))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_11[shift_uint8(u8_0, i_2)] = 2))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_9[shift_sint8(i8_0, i_2)] = 1))).
+  (* Invariant 'NO_CHECK' *)
+  Have: (0 <= i) /\ (i <= 10).
+  (* Then *)
+  Have: i <= 9.
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_7, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_6, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_5, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_4, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_3, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_2, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_1, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a, 1).
+  (* Assertion 'rte,signed_overflow' *)
+  Have: i <= 2147483646.
+}
+Prove: a_24[shift_uint8(u8_0, i_1)] = 2.
+
+------------------------------------------------------------
+
+Goal Establishment of Invariant (file tests/wp_acsl/chunk_typing.i, line 33):
+Prove: true.
+
+------------------------------------------------------------
+
+Goal Preservation of Invariant (file tests/wp_acsl/chunk_typing.i, line 34):
+Let a = shift_uint64(u64_0, i).
+Let a_1 = shift_sint64(i64_0, i).
+Let a_2 = shift_uint32(u32_0, i).
+Let a_3 = shift_sint32(i32_0, i).
+Let a_4 = shift_uint16(u16_0, i).
+Let a_5 = shift_sint16(i16_0, i).
+Let a_6 = shift_uint8(u8_0, i).
+Let a_7 = shift_sint8(i8_0, i).
+Let a_8 = shift_sint8(i8_0, 0).
+Let a_9 = havoc(Mchar_undef_0, Mchar_0, a_8, 10).
+Let a_10 = shift_uint8(u8_0, 0).
+Let a_11 = havoc(Mint_undef_6, Mint_6, a_10, 10).
+Let a_12 = shift_sint16(i16_0, 0).
+Let a_13 = havoc(Mint_undef_0, Mint_0, a_12, 10).
+Let a_14 = shift_uint16(u16_0, 0).
+Let a_15 = havoc(Mint_undef_3, Mint_3, a_14, 10).
+Let a_16 = shift_sint32(i32_0, 0).
+Let a_17 = havoc(Mint_undef_1, Mint_1, a_16, 10).
+Let a_18 = shift_uint32(u32_0, 0).
+Let a_19 = havoc(Mint_undef_4, Mint_4, a_18, 10).
+Let a_20 = shift_sint64(i64_0, 0).
+Let a_21 = havoc(Mint_undef_2, Mint_2, a_20, 10).
+Let a_22 = shift_uint64(u64_0, 0).
+Let a_23 = havoc(Mint_undef_5, Mint_5, a_22, 10).
+Let a_24 = a_13[a_5 <- 3].
+Assume {
+  Type: is_sint16_chunk(Mint_0) /\ is_sint32_chunk(Mint_1) /\
+      is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
+      is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
+      is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
+      is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
+      is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
+      is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
+      is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
+      is_sint16_chunk(a_24) /\ is_sint32_chunk(a_17[a_3 <- 5]) /\
+      is_sint64_chunk(a_21[a_1 <- 7]) /\ is_sint8_chunk(a_9[a_7 <- 1]) /\
+      is_uint16_chunk(a_15[a_4 <- 4]) /\ is_uint32_chunk(a_19[a_2 <- 6]) /\
+      is_uint64_chunk(a_23[a <- 8]) /\ is_uint8_chunk(a_11[a_6 <- 2]).
+  (* Heap *)
+  Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
+      (region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
+      (region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
+      (region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
+      linked(Malloc_0) /\ sconst(Mchar_0).
+  (* Goal *)
+  When: (0 <= i_1) /\ (i_1 <= i).
+  (* Pre-condition *)
+  Have: valid_rw(Malloc_0, a_12, 10) /\ valid_rw(Malloc_0, a_16, 10) /\
+      valid_rw(Malloc_0, a_20, 10) /\ valid_rw(Malloc_0, a_8, 10) /\
+      valid_rw(Malloc_0, a_14, 10) /\ valid_rw(Malloc_0, a_18, 10) /\
+      valid_rw(Malloc_0, a_22, 10) /\ valid_rw(Malloc_0, a_10, 10).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_23[shift_uint64(u64_0, i_2)] = 8))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_21[shift_sint64(i64_0, i_2)] = 7))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_19[shift_uint32(u32_0, i_2)] = 6))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_17[shift_sint32(i32_0, i_2)] = 5))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_15[shift_uint16(u16_0, i_2)] = 4))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_13[shift_sint16(i16_0, i_2)] = 3))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_11[shift_uint8(u8_0, i_2)] = 2))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_9[shift_sint8(i8_0, i_2)] = 1))).
+  (* Invariant 'NO_CHECK' *)
+  Have: (0 <= i) /\ (i <= 10).
+  (* Then *)
+  Have: i <= 9.
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_7, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_6, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_5, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_4, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_3, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_2, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_1, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a, 1).
+  (* Assertion 'rte,signed_overflow' *)
+  Have: i <= 2147483646.
+}
+Prove: a_24[shift_sint16(i16_0, i_1)] = 3.
+
+------------------------------------------------------------
+
+Goal Establishment of Invariant (file tests/wp_acsl/chunk_typing.i, line 34):
+Prove: true.
+
+------------------------------------------------------------
+
+Goal Preservation of Invariant (file tests/wp_acsl/chunk_typing.i, line 35):
+Let a = shift_uint64(u64_0, i).
+Let a_1 = shift_sint64(i64_0, i).
+Let a_2 = shift_uint32(u32_0, i).
+Let a_3 = shift_sint32(i32_0, i).
+Let a_4 = shift_uint16(u16_0, i).
+Let a_5 = shift_sint16(i16_0, i).
+Let a_6 = shift_uint8(u8_0, i).
+Let a_7 = shift_sint8(i8_0, i).
+Let a_8 = shift_sint8(i8_0, 0).
+Let a_9 = havoc(Mchar_undef_0, Mchar_0, a_8, 10).
+Let a_10 = shift_uint8(u8_0, 0).
+Let a_11 = havoc(Mint_undef_6, Mint_6, a_10, 10).
+Let a_12 = shift_sint16(i16_0, 0).
+Let a_13 = havoc(Mint_undef_1, Mint_1, a_12, 10).
+Let a_14 = shift_uint16(u16_0, 0).
+Let a_15 = havoc(Mint_undef_0, Mint_0, a_14, 10).
+Let a_16 = shift_sint32(i32_0, 0).
+Let a_17 = havoc(Mint_undef_2, Mint_2, a_16, 10).
+Let a_18 = shift_uint32(u32_0, 0).
+Let a_19 = havoc(Mint_undef_4, Mint_4, a_18, 10).
+Let a_20 = shift_sint64(i64_0, 0).
+Let a_21 = havoc(Mint_undef_3, Mint_3, a_20, 10).
+Let a_22 = shift_uint64(u64_0, 0).
+Let a_23 = havoc(Mint_undef_5, Mint_5, a_22, 10).
+Let a_24 = a_15[a_4 <- 4].
+Assume {
+  Type: is_sint16_chunk(Mint_1) /\ is_sint32_chunk(Mint_2) /\
+      is_sint64_chunk(Mint_3) /\ is_sint8_chunk(Mchar_0) /\
+      is_uint16_chunk(Mint_0) /\ is_uint32_chunk(Mint_4) /\
+      is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
+      is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
+      is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
+      is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
+      is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
+      is_sint16_chunk(a_13[a_5 <- 3]) /\ is_sint32_chunk(a_17[a_3 <- 5]) /\
+      is_sint64_chunk(a_21[a_1 <- 7]) /\ is_sint8_chunk(a_9[a_7 <- 1]) /\
+      is_uint16_chunk(a_24) /\ is_uint32_chunk(a_19[a_2 <- 6]) /\
+      is_uint64_chunk(a_23[a <- 8]) /\ is_uint8_chunk(a_11[a_6 <- 2]).
+  (* Heap *)
+  Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
+      (region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
+      (region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
+      (region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
+      linked(Malloc_0) /\ sconst(Mchar_0).
+  (* Goal *)
+  When: (0 <= i_1) /\ (i_1 <= i).
+  (* Pre-condition *)
+  Have: valid_rw(Malloc_0, a_12, 10) /\ valid_rw(Malloc_0, a_16, 10) /\
+      valid_rw(Malloc_0, a_20, 10) /\ valid_rw(Malloc_0, a_8, 10) /\
+      valid_rw(Malloc_0, a_14, 10) /\ valid_rw(Malloc_0, a_18, 10) /\
+      valid_rw(Malloc_0, a_22, 10) /\ valid_rw(Malloc_0, a_10, 10).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_23[shift_uint64(u64_0, i_2)] = 8))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_21[shift_sint64(i64_0, i_2)] = 7))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_19[shift_uint32(u32_0, i_2)] = 6))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_17[shift_sint32(i32_0, i_2)] = 5))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_15[shift_uint16(u16_0, i_2)] = 4))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_13[shift_sint16(i16_0, i_2)] = 3))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_11[shift_uint8(u8_0, i_2)] = 2))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_9[shift_sint8(i8_0, i_2)] = 1))).
+  (* Invariant 'NO_CHECK' *)
+  Have: (0 <= i) /\ (i <= 10).
+  (* Then *)
+  Have: i <= 9.
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_7, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_6, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_5, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_4, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_3, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_2, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_1, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a, 1).
+  (* Assertion 'rte,signed_overflow' *)
+  Have: i <= 2147483646.
+}
+Prove: a_24[shift_uint16(u16_0, i_1)] = 4.
+
+------------------------------------------------------------
+
+Goal Establishment of Invariant (file tests/wp_acsl/chunk_typing.i, line 35):
+Prove: true.
+
+------------------------------------------------------------
+
+Goal Preservation of Invariant (file tests/wp_acsl/chunk_typing.i, line 36):
+Let a = shift_uint64(u64_0, i).
+Let a_1 = shift_sint64(i64_0, i).
+Let a_2 = shift_uint32(u32_0, i).
+Let a_3 = shift_sint32(i32_0, i).
+Let a_4 = shift_uint16(u16_0, i).
+Let a_5 = shift_sint16(i16_0, i).
+Let a_6 = shift_uint8(u8_0, i).
+Let a_7 = shift_sint8(i8_0, i).
+Let a_8 = shift_sint8(i8_0, 0).
+Let a_9 = havoc(Mchar_undef_0, Mchar_0, a_8, 10).
+Let a_10 = shift_uint8(u8_0, 0).
+Let a_11 = havoc(Mint_undef_6, Mint_6, a_10, 10).
+Let a_12 = shift_sint16(i16_0, 0).
+Let a_13 = havoc(Mint_undef_1, Mint_1, a_12, 10).
+Let a_14 = shift_uint16(u16_0, 0).
+Let a_15 = havoc(Mint_undef_3, Mint_3, a_14, 10).
+Let a_16 = shift_sint32(i32_0, 0).
+Let a_17 = havoc(Mint_undef_0, Mint_0, a_16, 10).
+Let a_18 = shift_uint32(u32_0, 0).
+Let a_19 = havoc(Mint_undef_4, Mint_4, a_18, 10).
+Let a_20 = shift_sint64(i64_0, 0).
+Let a_21 = havoc(Mint_undef_2, Mint_2, a_20, 10).
+Let a_22 = shift_uint64(u64_0, 0).
+Let a_23 = havoc(Mint_undef_5, Mint_5, a_22, 10).
+Let a_24 = a_17[a_3 <- 5].
+Assume {
+  Type: is_sint16_chunk(Mint_1) /\ is_sint32_chunk(Mint_0) /\
+      is_sint64_chunk(Mint_2) /\ is_sint8_chunk(Mchar_0) /\
+      is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
+      is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
+      is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
+      is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
+      is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
+      is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
+      is_sint16_chunk(a_13[a_5 <- 3]) /\ is_sint32_chunk(a_24) /\
+      is_sint64_chunk(a_21[a_1 <- 7]) /\ is_sint8_chunk(a_9[a_7 <- 1]) /\
+      is_uint16_chunk(a_15[a_4 <- 4]) /\ is_uint32_chunk(a_19[a_2 <- 6]) /\
+      is_uint64_chunk(a_23[a <- 8]) /\ is_uint8_chunk(a_11[a_6 <- 2]).
+  (* Heap *)
+  Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
+      (region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
+      (region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
+      (region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
+      linked(Malloc_0) /\ sconst(Mchar_0).
+  (* Goal *)
+  When: (0 <= i_1) /\ (i_1 <= i).
+  (* Pre-condition *)
+  Have: valid_rw(Malloc_0, a_12, 10) /\ valid_rw(Malloc_0, a_16, 10) /\
+      valid_rw(Malloc_0, a_20, 10) /\ valid_rw(Malloc_0, a_8, 10) /\
+      valid_rw(Malloc_0, a_14, 10) /\ valid_rw(Malloc_0, a_18, 10) /\
+      valid_rw(Malloc_0, a_22, 10) /\ valid_rw(Malloc_0, a_10, 10).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_23[shift_uint64(u64_0, i_2)] = 8))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_21[shift_sint64(i64_0, i_2)] = 7))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_19[shift_uint32(u32_0, i_2)] = 6))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_17[shift_sint32(i32_0, i_2)] = 5))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_15[shift_uint16(u16_0, i_2)] = 4))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_13[shift_sint16(i16_0, i_2)] = 3))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_11[shift_uint8(u8_0, i_2)] = 2))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_9[shift_sint8(i8_0, i_2)] = 1))).
+  (* Invariant 'NO_CHECK' *)
+  Have: (0 <= i) /\ (i <= 10).
+  (* Then *)
+  Have: i <= 9.
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_7, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_6, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_5, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_4, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_3, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_2, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_1, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a, 1).
+  (* Assertion 'rte,signed_overflow' *)
+  Have: i <= 2147483646.
+}
+Prove: a_24[shift_sint32(i32_0, i_1)] = 5.
+
+------------------------------------------------------------
+
+Goal Establishment of Invariant (file tests/wp_acsl/chunk_typing.i, line 36):
+Prove: true.
+
+------------------------------------------------------------
+
+Goal Preservation of Invariant (file tests/wp_acsl/chunk_typing.i, line 37):
+Let a = shift_uint64(u64_0, i).
+Let a_1 = shift_sint64(i64_0, i).
+Let a_2 = shift_uint32(u32_0, i).
+Let a_3 = shift_sint32(i32_0, i).
+Let a_4 = shift_uint16(u16_0, i).
+Let a_5 = shift_sint16(i16_0, i).
+Let a_6 = shift_uint8(u8_0, i).
+Let a_7 = shift_sint8(i8_0, i).
+Let a_8 = shift_sint8(i8_0, 0).
+Let a_9 = havoc(Mchar_undef_0, Mchar_0, a_8, 10).
+Let a_10 = shift_uint8(u8_0, 0).
+Let a_11 = havoc(Mint_undef_6, Mint_6, a_10, 10).
+Let a_12 = shift_sint16(i16_0, 0).
+Let a_13 = havoc(Mint_undef_1, Mint_1, a_12, 10).
+Let a_14 = shift_uint16(u16_0, 0).
+Let a_15 = havoc(Mint_undef_4, Mint_4, a_14, 10).
+Let a_16 = shift_sint32(i32_0, 0).
+Let a_17 = havoc(Mint_undef_2, Mint_2, a_16, 10).
+Let a_18 = shift_uint32(u32_0, 0).
+Let a_19 = havoc(Mint_undef_0, Mint_0, a_18, 10).
+Let a_20 = shift_sint64(i64_0, 0).
+Let a_21 = havoc(Mint_undef_3, Mint_3, a_20, 10).
+Let a_22 = shift_uint64(u64_0, 0).
+Let a_23 = havoc(Mint_undef_5, Mint_5, a_22, 10).
+Let a_24 = a_19[a_2 <- 6].
+Assume {
+  Type: is_sint16_chunk(Mint_1) /\ is_sint32_chunk(Mint_2) /\
+      is_sint64_chunk(Mint_3) /\ is_sint8_chunk(Mchar_0) /\
+      is_uint16_chunk(Mint_4) /\ is_uint32_chunk(Mint_0) /\
+      is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
+      is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
+      is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
+      is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
+      is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
+      is_sint16_chunk(a_13[a_5 <- 3]) /\ is_sint32_chunk(a_17[a_3 <- 5]) /\
+      is_sint64_chunk(a_21[a_1 <- 7]) /\ is_sint8_chunk(a_9[a_7 <- 1]) /\
+      is_uint16_chunk(a_15[a_4 <- 4]) /\ is_uint32_chunk(a_24) /\
+      is_uint64_chunk(a_23[a <- 8]) /\ is_uint8_chunk(a_11[a_6 <- 2]).
+  (* Heap *)
+  Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
+      (region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
+      (region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
+      (region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
+      linked(Malloc_0) /\ sconst(Mchar_0).
+  (* Goal *)
+  When: (0 <= i_1) /\ (i_1 <= i).
+  (* Pre-condition *)
+  Have: valid_rw(Malloc_0, a_12, 10) /\ valid_rw(Malloc_0, a_16, 10) /\
+      valid_rw(Malloc_0, a_20, 10) /\ valid_rw(Malloc_0, a_8, 10) /\
+      valid_rw(Malloc_0, a_14, 10) /\ valid_rw(Malloc_0, a_18, 10) /\
+      valid_rw(Malloc_0, a_22, 10) /\ valid_rw(Malloc_0, a_10, 10).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_23[shift_uint64(u64_0, i_2)] = 8))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_21[shift_sint64(i64_0, i_2)] = 7))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_19[shift_uint32(u32_0, i_2)] = 6))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_17[shift_sint32(i32_0, i_2)] = 5))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_15[shift_uint16(u16_0, i_2)] = 4))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_13[shift_sint16(i16_0, i_2)] = 3))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_11[shift_uint8(u8_0, i_2)] = 2))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_9[shift_sint8(i8_0, i_2)] = 1))).
+  (* Invariant 'NO_CHECK' *)
+  Have: (0 <= i) /\ (i <= 10).
+  (* Then *)
+  Have: i <= 9.
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_7, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_6, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_5, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_4, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_3, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_2, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_1, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a, 1).
+  (* Assertion 'rte,signed_overflow' *)
+  Have: i <= 2147483646.
+}
+Prove: a_24[shift_uint32(u32_0, i_1)] = 6.
+
+------------------------------------------------------------
+
+Goal Establishment of Invariant (file tests/wp_acsl/chunk_typing.i, line 37):
+Prove: true.
+
+------------------------------------------------------------
+
+Goal Preservation of Invariant (file tests/wp_acsl/chunk_typing.i, line 38):
+Let a = shift_uint64(u64_0, i).
+Let a_1 = shift_sint64(i64_0, i).
+Let a_2 = shift_uint32(u32_0, i).
+Let a_3 = shift_sint32(i32_0, i).
+Let a_4 = shift_uint16(u16_0, i).
+Let a_5 = shift_sint16(i16_0, i).
+Let a_6 = shift_uint8(u8_0, i).
+Let a_7 = shift_sint8(i8_0, i).
+Let a_8 = shift_sint8(i8_0, 0).
+Let a_9 = havoc(Mchar_undef_0, Mchar_0, a_8, 10).
+Let a_10 = shift_uint8(u8_0, 0).
+Let a_11 = havoc(Mint_undef_6, Mint_6, a_10, 10).
+Let a_12 = shift_sint16(i16_0, 0).
+Let a_13 = havoc(Mint_undef_1, Mint_1, a_12, 10).
+Let a_14 = shift_uint16(u16_0, 0).
+Let a_15 = havoc(Mint_undef_3, Mint_3, a_14, 10).
+Let a_16 = shift_sint32(i32_0, 0).
+Let a_17 = havoc(Mint_undef_2, Mint_2, a_16, 10).
+Let a_18 = shift_uint32(u32_0, 0).
+Let a_19 = havoc(Mint_undef_4, Mint_4, a_18, 10).
+Let a_20 = shift_sint64(i64_0, 0).
+Let a_21 = havoc(Mint_undef_0, Mint_0, a_20, 10).
+Let a_22 = shift_uint64(u64_0, 0).
+Let a_23 = havoc(Mint_undef_5, Mint_5, a_22, 10).
+Let a_24 = a_21[a_1 <- 7].
+Assume {
+  Type: is_sint16_chunk(Mint_1) /\ is_sint32_chunk(Mint_2) /\
+      is_sint64_chunk(Mint_0) /\ is_sint8_chunk(Mchar_0) /\
+      is_uint16_chunk(Mint_3) /\ is_uint32_chunk(Mint_4) /\
+      is_uint64_chunk(Mint_5) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
+      is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
+      is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
+      is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
+      is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
+      is_sint16_chunk(a_13[a_5 <- 3]) /\ is_sint32_chunk(a_17[a_3 <- 5]) /\
+      is_sint64_chunk(a_24) /\ is_sint8_chunk(a_9[a_7 <- 1]) /\
+      is_uint16_chunk(a_15[a_4 <- 4]) /\ is_uint32_chunk(a_19[a_2 <- 6]) /\
+      is_uint64_chunk(a_23[a <- 8]) /\ is_uint8_chunk(a_11[a_6 <- 2]).
+  (* Heap *)
+  Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
+      (region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
+      (region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
+      (region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
+      linked(Malloc_0) /\ sconst(Mchar_0).
+  (* Goal *)
+  When: (0 <= i_1) /\ (i_1 <= i).
+  (* Pre-condition *)
+  Have: valid_rw(Malloc_0, a_12, 10) /\ valid_rw(Malloc_0, a_16, 10) /\
+      valid_rw(Malloc_0, a_20, 10) /\ valid_rw(Malloc_0, a_8, 10) /\
+      valid_rw(Malloc_0, a_14, 10) /\ valid_rw(Malloc_0, a_18, 10) /\
+      valid_rw(Malloc_0, a_22, 10) /\ valid_rw(Malloc_0, a_10, 10).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_23[shift_uint64(u64_0, i_2)] = 8))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_21[shift_sint64(i64_0, i_2)] = 7))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_19[shift_uint32(u32_0, i_2)] = 6))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_17[shift_sint32(i32_0, i_2)] = 5))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_15[shift_uint16(u16_0, i_2)] = 4))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_13[shift_sint16(i16_0, i_2)] = 3))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_11[shift_uint8(u8_0, i_2)] = 2))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_9[shift_sint8(i8_0, i_2)] = 1))).
+  (* Invariant 'NO_CHECK' *)
+  Have: (0 <= i) /\ (i <= 10).
+  (* Then *)
+  Have: i <= 9.
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_7, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_6, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_5, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_4, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_3, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_2, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_1, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a, 1).
+  (* Assertion 'rte,signed_overflow' *)
+  Have: i <= 2147483646.
+}
+Prove: a_24[shift_sint64(i64_0, i_1)] = 7.
+
+------------------------------------------------------------
+
+Goal Establishment of Invariant (file tests/wp_acsl/chunk_typing.i, line 38):
+Prove: true.
+
+------------------------------------------------------------
+
+Goal Preservation of Invariant (file tests/wp_acsl/chunk_typing.i, line 39):
+Let a = shift_uint64(u64_0, i).
+Let a_1 = shift_sint64(i64_0, i).
+Let a_2 = shift_uint32(u32_0, i).
+Let a_3 = shift_sint32(i32_0, i).
+Let a_4 = shift_uint16(u16_0, i).
+Let a_5 = shift_sint16(i16_0, i).
+Let a_6 = shift_uint8(u8_0, i).
+Let a_7 = shift_sint8(i8_0, i).
+Let a_8 = shift_sint8(i8_0, 0).
+Let a_9 = havoc(Mchar_undef_0, Mchar_0, a_8, 10).
+Let a_10 = shift_uint8(u8_0, 0).
+Let a_11 = havoc(Mint_undef_6, Mint_6, a_10, 10).
+Let a_12 = shift_sint16(i16_0, 0).
+Let a_13 = havoc(Mint_undef_1, Mint_1, a_12, 10).
+Let a_14 = shift_uint16(u16_0, 0).
+Let a_15 = havoc(Mint_undef_4, Mint_4, a_14, 10).
+Let a_16 = shift_sint32(i32_0, 0).
+Let a_17 = havoc(Mint_undef_2, Mint_2, a_16, 10).
+Let a_18 = shift_uint32(u32_0, 0).
+Let a_19 = havoc(Mint_undef_5, Mint_5, a_18, 10).
+Let a_20 = shift_sint64(i64_0, 0).
+Let a_21 = havoc(Mint_undef_3, Mint_3, a_20, 10).
+Let a_22 = shift_uint64(u64_0, 0).
+Let a_23 = havoc(Mint_undef_0, Mint_0, a_22, 10).
+Let a_24 = a_23[a <- 8].
+Assume {
+  Type: is_sint16_chunk(Mint_1) /\ is_sint32_chunk(Mint_2) /\
+      is_sint64_chunk(Mint_3) /\ is_sint8_chunk(Mchar_0) /\
+      is_uint16_chunk(Mint_4) /\ is_uint32_chunk(Mint_5) /\
+      is_uint64_chunk(Mint_0) /\ is_uint8_chunk(Mint_6) /\ is_sint32(i) /\
+      is_sint16_chunk(a_13) /\ is_sint32_chunk(a_17) /\
+      is_sint64_chunk(a_21) /\ is_sint8_chunk(a_9) /\
+      is_uint16_chunk(a_15) /\ is_uint32_chunk(a_19) /\
+      is_uint64_chunk(a_23) /\ is_uint8_chunk(a_11) /\
+      is_sint16_chunk(a_13[a_5 <- 3]) /\ is_sint32_chunk(a_17[a_3 <- 5]) /\
+      is_sint64_chunk(a_21[a_1 <- 7]) /\ is_sint8_chunk(a_9[a_7 <- 1]) /\
+      is_uint16_chunk(a_15[a_4 <- 4]) /\ is_uint32_chunk(a_19[a_2 <- 6]) /\
+      is_uint64_chunk(a_24) /\ is_uint8_chunk(a_11[a_6 <- 2]).
+  (* Heap *)
+  Type: (region(i16_0.base) <= 0) /\ (region(i32_0.base) <= 0) /\
+      (region(i64_0.base) <= 0) /\ (region(i8_0.base) <= 0) /\
+      (region(u16_0.base) <= 0) /\ (region(u32_0.base) <= 0) /\
+      (region(u64_0.base) <= 0) /\ (region(u8_0.base) <= 0) /\
+      linked(Malloc_0) /\ sconst(Mchar_0).
+  (* Goal *)
+  When: (0 <= i_1) /\ (i_1 <= i).
+  (* Pre-condition *)
+  Have: valid_rw(Malloc_0, a_12, 10) /\ valid_rw(Malloc_0, a_16, 10) /\
+      valid_rw(Malloc_0, a_20, 10) /\ valid_rw(Malloc_0, a_8, 10) /\
+      valid_rw(Malloc_0, a_14, 10) /\ valid_rw(Malloc_0, a_18, 10) /\
+      valid_rw(Malloc_0, a_22, 10) /\ valid_rw(Malloc_0, a_10, 10).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_23[shift_uint64(u64_0, i_2)] = 8))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_21[shift_sint64(i64_0, i_2)] = 7))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_19[shift_uint32(u32_0, i_2)] = 6))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_17[shift_sint32(i32_0, i_2)] = 5))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_15[shift_uint16(u16_0, i_2)] = 4))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_13[shift_sint16(i16_0, i_2)] = 3))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_11[shift_uint8(u8_0, i_2)] = 2))).
+  (* Invariant *)
+  Have: forall i_2 : Z. ((0 <= i_2) -> ((i_2 < i) ->
+      (a_9[shift_sint8(i8_0, i_2)] = 1))).
+  (* Invariant 'NO_CHECK' *)
+  Have: (0 <= i) /\ (i <= 10).
+  (* Then *)
+  Have: i <= 9.
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_7, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_6, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_5, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_4, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_3, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_2, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a_1, 1).
+  (* Assertion 'rte,mem_access' *)
+  Have: valid_rw(Malloc_0, a, 1).
+  (* Assertion 'rte,signed_overflow' *)
+  Have: i <= 2147483646.
+}
+Prove: a_24[shift_uint64(u64_0, i_1)] = 8.
+
+------------------------------------------------------------
+
+Goal Establishment of Invariant (file tests/wp_acsl/chunk_typing.i, line 39):
+Prove: true.
+
+------------------------------------------------------------
+[wp] Warning: Memory model hypotheses for function 'function':
+  /*@
+     behavior typed:
+     requires \separated(x+(..),
+                \union(i8+(..),u8+(..),i16+(..),u16+(..),i32+(..),u32+(..),
+                  i64+(..),u64+(..)));
+     */
+  void function(signed char * /*[10]*/ i8, unsigned char * /*[10]*/ u8,
+                short * /*[10]*/ i16, unsigned short * /*[10]*/ u16,
+                int * /*[10]*/ i32, unsigned int * /*[10]*/ u32,
+                long long * /*[10]*/ i64, unsigned long long * /*[10]*/ u64);
diff --git a/src/plugins/wp/tests/wp_acsl/oracle_qualif/chunk_typing.res.oracle b/src/plugins/wp/tests/wp_acsl/oracle_qualif/chunk_typing.res.oracle
new file mode 100644
index 00000000000..8e409db95bf
--- /dev/null
+++ b/src/plugins/wp/tests/wp_acsl/oracle_qualif/chunk_typing.res.oracle
@@ -0,0 +1,41 @@
+# frama-c -wp -wp-rte [...]
+[kernel] Parsing tests/wp_acsl/chunk_typing.i (no preprocessing)
+[wp] Running WP plugin...
+[wp] Loading driver 'share/wp.driver'
+[rte] annotating function function
+[wp] 17 goals scheduled
+[wp] [Alt-Ergo] Goal typed_function_ensures : Valid
+[wp] [Alt-Ergo] Goal typed_function_loop_invariant_preserved : Valid
+[wp] [Qed] Goal typed_function_loop_invariant_established : Valid
+[wp] [Alt-Ergo] Goal typed_function_loop_invariant_2_preserved : Valid
+[wp] [Qed] Goal typed_function_loop_invariant_2_established : Valid
+[wp] [Alt-Ergo] Goal typed_function_loop_invariant_3_preserved : Valid
+[wp] [Qed] Goal typed_function_loop_invariant_3_established : Valid
+[wp] [Alt-Ergo] Goal typed_function_loop_invariant_4_preserved : Valid
+[wp] [Qed] Goal typed_function_loop_invariant_4_established : Valid
+[wp] [Alt-Ergo] Goal typed_function_loop_invariant_5_preserved : Valid
+[wp] [Qed] Goal typed_function_loop_invariant_5_established : Valid
+[wp] [Alt-Ergo] Goal typed_function_loop_invariant_6_preserved : Valid
+[wp] [Qed] Goal typed_function_loop_invariant_6_established : Valid
+[wp] [Alt-Ergo] Goal typed_function_loop_invariant_7_preserved : Valid
+[wp] [Qed] Goal typed_function_loop_invariant_7_established : Valid
+[wp] [Alt-Ergo] Goal typed_function_loop_invariant_8_preserved : Valid
+[wp] [Qed] Goal typed_function_loop_invariant_8_established : Valid
+[wp] Proved goals:   17 / 17
+  Qed:             8 
+  Alt-Ergo:        9
+------------------------------------------------------------
+ Functions                 WP     Alt-Ergo  Total   Success
+  function                  8        9       17       100%
+------------------------------------------------------------
+[wp] Warning: Memory model hypotheses for function 'function':
+  /*@
+     behavior typed:
+     requires \separated(x+(..),
+                \union(i8+(..),u8+(..),i16+(..),u16+(..),i32+(..),u32+(..),
+                  i64+(..),u64+(..)));
+     */
+  void function(signed char * /*[10]*/ i8, unsigned char * /*[10]*/ u8,
+                short * /*[10]*/ i16, unsigned short * /*[10]*/ u16,
+                int * /*[10]*/ i32, unsigned int * /*[10]*/ u32,
+                long long * /*[10]*/ i64, unsigned long long * /*[10]*/ u64);
-- 
GitLab