file_name
stringlengths
5
52
name
stringlengths
4
95
original_source_type
stringlengths
0
23k
source_type
stringlengths
9
23k
source_definition
stringlengths
9
57.9k
source
dict
source_range
dict
file_context
stringlengths
0
721k
dependencies
dict
opens_and_abbrevs
listlengths
2
94
vconfig
dict
interleaved
bool
1 class
verbose_type
stringlengths
1
7.42k
effect
stringclasses
118 values
effect_flags
sequencelengths
0
2
mutual_with
sequencelengths
0
11
ideal_premises
sequencelengths
0
236
proof_features
sequencelengths
0
1
is_simple_lemma
bool
2 classes
is_div
bool
2 classes
is_proof
bool
2 classes
is_simply_typed
bool
2 classes
is_type
bool
2 classes
partial_definition
stringlengths
5
3.99k
completed_definiton
stringlengths
1
1.63M
isa_cross_project_example
bool
1 class
FStar.ST.fst
FStar.ST.modifies_none
val modifies_none : h0: FStar.Monotonic.Heap.heap -> h1: FStar.Monotonic.Heap.heap -> Prims.logical
let modifies_none (h0:heap) (h1:heap) = modifies !{} h0 h1
{ "file_name": "ulib/FStar.ST.fst", "git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
{ "end_col": 58, "end_line": 132, "start_col": 0, "start_line": 132 }
(* Copyright 2008-2014 Nikhil Swamy, Aseem Rastogi, and Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.ST open FStar.TSet open FStar.Heap open FStar.Preorder module W = FStar.Monotonic.Witnessed (***** Global ST (GST) effect with put, get, witness, and recall *****) new_effect GST = STATE_h heap let gst_pre = st_pre_h heap let gst_post' (a:Type) (pre:Type) = st_post_h' heap a pre let gst_post (a:Type) = st_post_h heap a let gst_wp (a:Type) = st_wp_h heap a unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:heap) = wp (fun a -> p a h) sub_effect DIV ~> GST = lift_div_gst let heap_rel (h1:heap) (h2:heap) = forall (a:Type0) (rel:preorder a) (r:mref a rel). h1 `contains` r ==> (h2 `contains` r /\ rel (sel h1 r) (sel h2 r)) assume val gst_get: unit -> GST heap (fun p h0 -> p h0 h0) assume val gst_put: h1:heap -> GST unit (fun p h0 -> heap_rel h0 h1 /\ p () h1) type heap_predicate = heap -> Type0 let stable (p:heap_predicate) = forall (h1:heap) (h2:heap). (p h1 /\ heap_rel h1 h2) ==> p h2 [@@"opaque_to_smt"] let witnessed (p:heap_predicate{stable p}) : Type0 = W.witnessed heap_rel p assume val gst_witness: p:heap_predicate -> GST unit (fun post h0 -> stable p /\ p h0 /\ (witnessed p ==> post () h0)) assume val gst_recall: p:heap_predicate -> GST unit (fun post h0 -> stable p /\ witnessed p /\ (p h0 ==> post () h0)) val lemma_functoriality (p:heap_predicate{stable p /\ witnessed p}) (q:heap_predicate{stable q /\ (forall (h:heap). p h ==> q h)}) :Lemma (ensures (witnessed q)) let lemma_functoriality p q = reveal_opaque (`%witnessed) witnessed; W.lemma_witnessed_weakening heap_rel p q (***** ST effect *****) let st_pre = gst_pre let st_post' = gst_post' let st_post = gst_post let st_wp = gst_wp new_effect STATE = GST unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp sub_effect GST ~> STATE = lift_gst_state effect State (a:Type) (wp:st_wp a) = STATE a wp effect ST (a:Type) (pre:st_pre) (post: (h:heap -> Tot (st_post' a (pre h)))) = STATE a (fun (p:st_post a) (h:heap) -> pre h /\ (forall a h1. post h a h1 ==> p a h1)) effect St (a:Type) = ST a (fun h -> True) (fun h0 r h1 -> True) let contains_pred (#a:Type0) (#rel:preorder a) (r:mref a rel) = fun h -> h `contains` r type mref (a:Type0) (rel:preorder a) = r:Heap.mref a rel{is_mm r = false /\ witnessed (contains_pred r)} let recall (#a:Type) (#rel:preorder a) (r:mref a rel) :STATE unit (fun p h -> Heap.contains h r ==> p () h) = gst_recall (contains_pred r) let alloc (#a:Type) (#rel:preorder a) (init:a) :ST (mref a rel) (fun h -> True) (fun h0 r h1 -> fresh r h0 h1 /\ modifies Set.empty h0 h1 /\ sel h1 r == init) = let h0 = gst_get () in let r, h1 = alloc rel h0 init false in gst_put h1; gst_witness (contains_pred r); r let read (#a:Type) (#rel:preorder a) (r:mref a rel) :STATE a (fun p h -> p (sel h r) h) = let h0 = gst_get () in gst_recall (contains_pred r); Heap.lemma_sel_equals_sel_tot_for_contained_refs h0 r; sel_tot h0 r let write (#a:Type) (#rel:preorder a) (r:mref a rel) (v:a) : ST unit (fun h -> rel (sel h r) v) (fun h0 x h1 -> rel (sel h0 r) v /\ h0 `contains` r /\ modifies (Set.singleton (addr_of r)) h0 h1 /\ equal_dom h0 h1 /\ sel h1 r == v) = let h0 = gst_get () in gst_recall (contains_pred r); let h1 = upd_tot h0 r v in Heap.lemma_distinct_addrs_distinct_preorders (); Heap.lemma_distinct_addrs_distinct_mm (); Heap.lemma_upd_equals_upd_tot_for_contained_refs h0 r v; gst_put h1 let get (u:unit) :ST heap (fun h -> True) (fun h0 h h1 -> h0==h1 /\ h==h1) = gst_get () let op_Bang (#a:Type) (#rel:preorder a) (r:mref a rel) : STATE a (fun p h -> p (sel h r) h) = read #a #rel r let op_Colon_Equals (#a:Type) (#rel:preorder a) (r:mref a rel) (v:a) : ST unit (fun h -> rel (sel h r) v) (fun h0 x h1 -> rel (sel h0 r) v /\ h0 `contains` r /\ modifies (Set.singleton (addr_of r)) h0 h1 /\ equal_dom h0 h1 /\ sel h1 r == v) = write #a #rel r v type ref (a:Type0) = mref a (trivial_preorder a)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "FStar.TSet.fsti.checked", "FStar.Set.fsti.checked", "FStar.Preorder.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Monotonic.Witnessed.fsti.checked", "FStar.Heap.fst.checked" ], "interface_file": false, "source_file": "FStar.ST.fst" }
[ { "abbrev": true, "full_module": "FStar.Monotonic.Witnessed", "short_module": "W" }, { "abbrev": false, "full_module": "FStar.Preorder", "short_module": null }, { "abbrev": false, "full_module": "FStar.Heap", "short_module": null }, { "abbrev": false, "full_module": "FStar.TSet", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
h0: FStar.Monotonic.Heap.heap -> h1: FStar.Monotonic.Heap.heap -> Prims.logical
Prims.Tot
[ "total" ]
[]
[ "FStar.Monotonic.Heap.heap", "FStar.Monotonic.Heap.modifies", "FStar.Set.empty", "Prims.nat", "Prims.logical" ]
[]
false
false
false
true
true
let modifies_none (h0 h1: heap) =
modifies FStar.Set.empty h0 h1
false
FStar.ST.fst
FStar.ST.write
val write (#a: Type) (#rel: preorder a) (r: mref a rel) (v: a) : ST unit (fun h -> rel (sel h r) v) (fun h0 x h1 -> rel (sel h0 r) v /\ h0 `contains` r /\ modifies (Set.singleton (addr_of r)) h0 h1 /\ equal_dom h0 h1 /\ sel h1 r == v)
val write (#a: Type) (#rel: preorder a) (r: mref a rel) (v: a) : ST unit (fun h -> rel (sel h r) v) (fun h0 x h1 -> rel (sel h0 r) v /\ h0 `contains` r /\ modifies (Set.singleton (addr_of r)) h0 h1 /\ equal_dom h0 h1 /\ sel h1 r == v)
let write (#a:Type) (#rel:preorder a) (r:mref a rel) (v:a) : ST unit (fun h -> rel (sel h r) v) (fun h0 x h1 -> rel (sel h0 r) v /\ h0 `contains` r /\ modifies (Set.singleton (addr_of r)) h0 h1 /\ equal_dom h0 h1 /\ sel h1 r == v) = let h0 = gst_get () in gst_recall (contains_pred r); let h1 = upd_tot h0 r v in Heap.lemma_distinct_addrs_distinct_preorders (); Heap.lemma_distinct_addrs_distinct_mm (); Heap.lemma_upd_equals_upd_tot_for_contained_refs h0 r v; gst_put h1
{ "file_name": "ulib/FStar.ST.fst", "git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
{ "end_col": 14, "end_line": 114, "start_col": 0, "start_line": 102 }
(* Copyright 2008-2014 Nikhil Swamy, Aseem Rastogi, and Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module FStar.ST open FStar.TSet open FStar.Heap open FStar.Preorder module W = FStar.Monotonic.Witnessed (***** Global ST (GST) effect with put, get, witness, and recall *****) new_effect GST = STATE_h heap let gst_pre = st_pre_h heap let gst_post' (a:Type) (pre:Type) = st_post_h' heap a pre let gst_post (a:Type) = st_post_h heap a let gst_wp (a:Type) = st_wp_h heap a unfold let lift_div_gst (a:Type) (wp:pure_wp a) (p:gst_post a) (h:heap) = wp (fun a -> p a h) sub_effect DIV ~> GST = lift_div_gst let heap_rel (h1:heap) (h2:heap) = forall (a:Type0) (rel:preorder a) (r:mref a rel). h1 `contains` r ==> (h2 `contains` r /\ rel (sel h1 r) (sel h2 r)) assume val gst_get: unit -> GST heap (fun p h0 -> p h0 h0) assume val gst_put: h1:heap -> GST unit (fun p h0 -> heap_rel h0 h1 /\ p () h1) type heap_predicate = heap -> Type0 let stable (p:heap_predicate) = forall (h1:heap) (h2:heap). (p h1 /\ heap_rel h1 h2) ==> p h2 [@@"opaque_to_smt"] let witnessed (p:heap_predicate{stable p}) : Type0 = W.witnessed heap_rel p assume val gst_witness: p:heap_predicate -> GST unit (fun post h0 -> stable p /\ p h0 /\ (witnessed p ==> post () h0)) assume val gst_recall: p:heap_predicate -> GST unit (fun post h0 -> stable p /\ witnessed p /\ (p h0 ==> post () h0)) val lemma_functoriality (p:heap_predicate{stable p /\ witnessed p}) (q:heap_predicate{stable q /\ (forall (h:heap). p h ==> q h)}) :Lemma (ensures (witnessed q)) let lemma_functoriality p q = reveal_opaque (`%witnessed) witnessed; W.lemma_witnessed_weakening heap_rel p q (***** ST effect *****) let st_pre = gst_pre let st_post' = gst_post' let st_post = gst_post let st_wp = gst_wp new_effect STATE = GST unfold let lift_gst_state (a:Type) (wp:gst_wp a) = wp sub_effect GST ~> STATE = lift_gst_state effect State (a:Type) (wp:st_wp a) = STATE a wp effect ST (a:Type) (pre:st_pre) (post: (h:heap -> Tot (st_post' a (pre h)))) = STATE a (fun (p:st_post a) (h:heap) -> pre h /\ (forall a h1. post h a h1 ==> p a h1)) effect St (a:Type) = ST a (fun h -> True) (fun h0 r h1 -> True) let contains_pred (#a:Type0) (#rel:preorder a) (r:mref a rel) = fun h -> h `contains` r type mref (a:Type0) (rel:preorder a) = r:Heap.mref a rel{is_mm r = false /\ witnessed (contains_pred r)} let recall (#a:Type) (#rel:preorder a) (r:mref a rel) :STATE unit (fun p h -> Heap.contains h r ==> p () h) = gst_recall (contains_pred r) let alloc (#a:Type) (#rel:preorder a) (init:a) :ST (mref a rel) (fun h -> True) (fun h0 r h1 -> fresh r h0 h1 /\ modifies Set.empty h0 h1 /\ sel h1 r == init) = let h0 = gst_get () in let r, h1 = alloc rel h0 init false in gst_put h1; gst_witness (contains_pred r); r let read (#a:Type) (#rel:preorder a) (r:mref a rel) :STATE a (fun p h -> p (sel h r) h) = let h0 = gst_get () in gst_recall (contains_pred r); Heap.lemma_sel_equals_sel_tot_for_contained_refs h0 r; sel_tot h0 r
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "FStar.TSet.fsti.checked", "FStar.Set.fsti.checked", "FStar.Preorder.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Monotonic.Witnessed.fsti.checked", "FStar.Heap.fst.checked" ], "interface_file": false, "source_file": "FStar.ST.fst" }
[ { "abbrev": true, "full_module": "FStar.Monotonic.Witnessed", "short_module": "W" }, { "abbrev": false, "full_module": "FStar.Preorder", "short_module": null }, { "abbrev": false, "full_module": "FStar.Heap", "short_module": null }, { "abbrev": false, "full_module": "FStar.TSet", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
r: FStar.ST.mref a rel -> v: a -> FStar.ST.ST Prims.unit
FStar.ST.ST
[]
[]
[ "FStar.Preorder.preorder", "FStar.ST.mref", "FStar.ST.gst_put", "Prims.unit", "FStar.Monotonic.Heap.lemma_upd_equals_upd_tot_for_contained_refs", "FStar.Monotonic.Heap.lemma_distinct_addrs_distinct_mm", "FStar.Monotonic.Heap.lemma_distinct_addrs_distinct_preorders", "FStar.Monotonic.Heap.heap", "FStar.Monotonic.Heap.upd_tot", "FStar.ST.gst_recall", "FStar.ST.contains_pred", "FStar.ST.gst_get", "FStar.Monotonic.Heap.sel", "Prims.l_and", "FStar.Monotonic.Heap.contains", "FStar.Monotonic.Heap.modifies", "FStar.Set.singleton", "Prims.nat", "FStar.Monotonic.Heap.addr_of", "FStar.Monotonic.Heap.equal_dom", "Prims.eq2" ]
[]
false
true
false
false
false
let write (#a: Type) (#rel: preorder a) (r: mref a rel) (v: a) : ST unit (fun h -> rel (sel h r) v) (fun h0 x h1 -> rel (sel h0 r) v /\ h0 `contains` r /\ modifies (Set.singleton (addr_of r)) h0 h1 /\ equal_dom h0 h1 /\ sel h1 r == v) =
let h0 = gst_get () in gst_recall (contains_pred r); let h1 = upd_tot h0 r v in Heap.lemma_distinct_addrs_distinct_preorders (); Heap.lemma_distinct_addrs_distinct_mm (); Heap.lemma_upd_equals_upd_tot_for_contained_refs h0 r v; gst_put h1
false
Spec.Agile.CTR.fst
Spec.Agile.CTR.counter_mode
val counter_mode: a:cipher_alg -> k:key a -> n:nonce a -> plain:bytes { length plain <= max_size_t } -> Tot (cipher:bytes { length cipher = length plain })
val counter_mode: a:cipher_alg -> k:key a -> n:nonce a -> plain:bytes { length plain <= max_size_t } -> Tot (cipher:bytes { length cipher = length plain })
let counter_mode a k n plain = let stream = ctr_stream a k n (length plain) in map2 ( ^. ) (plain <: lbytes (length plain)) (stream <: lbytes (length plain))
{ "file_name": "specs/Spec.Agile.CTR.fst", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 80, "end_line": 22, "start_col": 0, "start_line": 20 }
module Spec.Agile.CTR open FStar.Mul open Lib.IntTypes open Lib.Sequence open Lib.ByteSequence open Spec.Agile.Cipher #reset-options "--z3rlimit 20 --max_fuel 0 --max_ifuel 1" // So that clients don't need to open both modules include Spec.Agile.Cipher val counter_mode: a:cipher_alg -> k:key a -> n:nonce a -> plain:bytes { length plain <= max_size_t } ->
{ "checked_file": "/", "dependencies": [ "Spec.Agile.Cipher.fsti.checked", "prims.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.ByteSequence.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked" ], "interface_file": false, "source_file": "Spec.Agile.CTR.fst" }
[ { "abbrev": false, "full_module": "Spec.Agile.Cipher", "short_module": null }, { "abbrev": false, "full_module": "Spec.Agile.Cipher", "short_module": null }, { "abbrev": false, "full_module": "Lib.ByteSequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.Sequence", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "Spec.Agile", "short_module": null }, { "abbrev": false, "full_module": "Spec.Agile", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 1, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 20, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
a: Spec.Agile.Cipher.cipher_alg -> k: Spec.Agile.Cipher.key a -> n: Spec.Agile.Cipher.nonce a -> plain: Lib.ByteSequence.bytes{Lib.Sequence.length plain <= Lib.IntTypes.max_size_t} -> cipher: Lib.ByteSequence.bytes{Lib.Sequence.length cipher = Lib.Sequence.length plain}
Prims.Tot
[ "total" ]
[]
[ "Spec.Agile.Cipher.cipher_alg", "Spec.Agile.Cipher.key", "Spec.Agile.Cipher.nonce", "Lib.ByteSequence.bytes", "Prims.b2t", "Prims.op_LessThanOrEqual", "Lib.Sequence.length", "Lib.IntTypes.uint_t", "Lib.IntTypes.U8", "Lib.IntTypes.SEC", "Lib.IntTypes.max_size_t", "Lib.Sequence.map2", "Lib.IntTypes.op_Hat_Dot", "Lib.ByteSequence.lbytes", "Lib.Sequence.seq", "Lib.IntTypes.int_t", "Prims.op_Equality", "Prims.nat", "Spec.Agile.Cipher.ctr_stream" ]
[]
false
false
false
false
false
let counter_mode a k n plain =
let stream = ctr_stream a k n (length plain) in map2 ( ^. ) (plain <: lbytes (length plain)) (stream <: lbytes (length plain))
false
SigeltOpts.fst
SigeltOpts.sp1
val sp1 : Prims.unit
let sp1 = assert (List.length [1] == 1)
{ "file_name": "examples/tactics/SigeltOpts.fst", "git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
{ "end_col": 39, "end_line": 8, "start_col": 0, "start_line": 8 }
module SigeltOpts open FStar.Tactics.V2 #set-options "--max_fuel 0"
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "FStar.Tactics.V2.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.fst.checked" ], "interface_file": false, "source_file": "SigeltOpts.fst" }
[ { "abbrev": false, "full_module": "FStar.Tactics.V2", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 2, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
Prims.unit
Prims.Tot
[ "total" ]
[]
[ "Prims._assert", "Prims.eq2", "Prims.int", "FStar.List.Tot.Base.length", "Prims.Cons", "Prims.Nil" ]
[]
false
false
false
true
false
let sp1 =
assert (List.length [1] == 1)
false
SigeltOpts.fst
SigeltOpts.tau
val tau: Prims.unit -> Tac decls
val tau: Prims.unit -> Tac decls
let tau () : Tac decls = match lookup_typ (top_env ()) ["SigeltOpts"; "sp1"] with | None -> fail "1" | Some se -> match sigelt_opts se with | None -> fail "2" | Some opts -> let lb = { lb_fv = pack_fv ["SigeltOpts"; "blah"]; lb_us = []; lb_typ = (`_); lb_def = (`(assert (List.length [2] == 1))) } in let se : sigelt = pack_sigelt (Sg_Let {isrec=false;lbs=[lb]}) in let se = add_check_with opts se in [se]
{ "file_name": "examples/tactics/SigeltOpts.fst", "git_rev": "10183ea187da8e8c426b799df6c825e24c0767d3", "git_url": "https://github.com/FStarLang/FStar.git", "project_name": "FStar" }
{ "end_col": 10, "end_line": 30, "start_col": 0, "start_line": 15 }
module SigeltOpts open FStar.Tactics.V2 #set-options "--max_fuel 0" #push-options "--max_fuel 2 --record_options" let sp1 = assert (List.length [1] == 1) #pop-options (* Fails without fuel *) [@@expect_failure] let sp2 = assert (List.length [1] == 1)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "FStar.Tactics.V2.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.fst.checked" ], "interface_file": false, "source_file": "SigeltOpts.fst" }
[ { "abbrev": false, "full_module": "FStar.Tactics.V2", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
_: Prims.unit -> FStar.Tactics.Effect.Tac FStar.Stubs.Reflection.Types.decls
FStar.Tactics.Effect.Tac
[]
[]
[ "Prims.unit", "FStar.Tactics.V2.Derived.fail", "FStar.Stubs.Reflection.Types.decls", "FStar.Stubs.Reflection.Types.sigelt", "FStar.Stubs.Reflection.V2.Builtins.sigelt_opts", "FStar.VConfig.vconfig", "Prims.Cons", "Prims.Nil", "FStar.Reflection.V2.Derived.add_check_with", "FStar.Tactics.NamedView.pack_sigelt", "FStar.Tactics.NamedView.Sg_Let", "FStar.Tactics.NamedView.Mknamed_sigelt_view__Sg_Let__payload", "FStar.Tactics.NamedView.letbinding", "FStar.Tactics.NamedView.Mkletbinding", "FStar.Stubs.Reflection.V2.Builtins.pack_fv", "Prims.string", "FStar.Tactics.NamedView.univ_name", "FStar.Pervasives.Native.option", "FStar.Stubs.Reflection.V2.Builtins.lookup_typ", "FStar.Stubs.Reflection.Types.env", "FStar.Stubs.Tactics.V2.Builtins.top_env" ]
[]
false
true
false
false
false
let tau () : Tac decls =
match lookup_typ (top_env ()) ["SigeltOpts"; "sp1"] with | None -> fail "1" | Some se -> match sigelt_opts se with | None -> fail "2" | Some opts -> let lb = { lb_fv = pack_fv ["SigeltOpts"; "blah"]; lb_us = []; lb_typ = (`_); lb_def = (`(assert (List.length [2] == 1))) } in let se:sigelt = pack_sigelt (Sg_Let ({ isrec = false; lbs = [lb] })) in let se = add_check_with opts se in [se]
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.parse
val parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input))
val parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input))
let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 9, "end_line": 33, "start_col": 0, "start_line": 28 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b))
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p: LowParse.Spec.Base.bare_parser t -> input: LowParse.Bytes.bytes -> Prims.GTot (FStar.Pervasives.Native.option (t * LowParse.Spec.Base.consumed_length input))
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.bare_parser", "LowParse.Bytes.bytes", "FStar.Pervasives.Native.option", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length" ]
[]
false
false
false
false
false
let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) =
p input
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.injective
val injective (#t: Type) (p: bare_parser t) : GTot Type0
val injective (#t: Type) (p: bare_parser t) : GTot Type0
let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 28, "end_line": 93, "start_col": 0, "start_line": 90 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = ()
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p: LowParse.Spec.Base.bare_parser t -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.bare_parser", "Prims.l_Forall", "LowParse.Bytes.bytes", "Prims.l_imp", "LowParse.Spec.Base.injective_precond", "LowParse.Spec.Base.injective_postcond" ]
[]
false
false
false
false
true
let injective (#t: Type) (p: bare_parser t) : GTot Type0 =
forall (b1: bytes) (b2: bytes). {:pattern (injective_precond p b1 b2)\/(injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2
false
Hacl.Impl.Poly1305.fsti
Hacl.Impl.Poly1305.poly1305_update_st
val poly1305_update_st : s: Hacl.Impl.Poly1305.Fields.field_spec -> Type0
let poly1305_update_st (s:field_spec) = ctx:poly1305_ctx s -> len:size_t -> text:lbuffer uint8 len -> Stack unit (requires fun h -> live h text /\ live h ctx /\ disjoint ctx text /\ state_inv_t #s h ctx) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ as_get_r h0 ctx == as_get_r h1 ctx /\ as_get_acc h1 ctx == S.poly1305_update (as_seq h0 text) (as_get_acc h0 ctx) (as_get_r h0 ctx))
{ "file_name": "code/poly1305/Hacl.Impl.Poly1305.fsti", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 98, "end_line": 104, "start_col": 0, "start_line": 92 }
module Hacl.Impl.Poly1305 open FStar.HyperStack open FStar.HyperStack.All open FStar.Mul open Lib.IntTypes open Lib.Buffer open Hacl.Impl.Poly1305.Fields module S = Spec.Poly1305 #reset-options "--z3rlimit 50 --max_fuel 0 --max_ifuel 1" inline_for_extraction noextract let poly1305_ctx (s:field_spec) = lbuffer (limb s) (nlimb s +! precomplen s) noextract val as_get_acc: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> GTot S.felem noextract val as_get_r: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> GTot S.felem noextract val state_inv_t: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> Type0 // If the ctx is not modified, all the components and invariants are preserved val reveal_ctx_inv': #s:field_spec -> ctx:poly1305_ctx s -> ctx':poly1305_ctx s -> h0:mem -> h1:mem -> Lemma (requires Seq.equal (as_seq h0 ctx) (as_seq h1 ctx') /\ state_inv_t h0 ctx) (ensures as_get_r h0 ctx == as_get_r h1 ctx' /\ as_get_acc h0 ctx == as_get_acc h1 ctx' /\ state_inv_t h1 ctx') let reveal_ctx_inv #s ctx h0 h1: Lemma (requires Seq.equal (as_seq h0 ctx) (as_seq h1 ctx) /\ state_inv_t h0 ctx) (ensures as_get_r h0 ctx == as_get_r h1 ctx /\ as_get_acc h0 ctx == as_get_acc h1 ctx /\ state_inv_t h1 ctx) = reveal_ctx_inv' #s ctx ctx h0 h1 val ctx_inv_zeros: #s:field_spec -> ctx:poly1305_ctx s -> h:mem -> Lemma (requires as_seq h ctx == Lib.Sequence.create (v (nlimb s +! precomplen s)) (limb_zero s)) (ensures state_inv_t #s h ctx) inline_for_extraction noextract let poly1305_init_st (s:field_spec) = ctx:poly1305_ctx s -> key:lbuffer uint8 32ul -> Stack unit (requires fun h -> live h ctx /\ live h key /\ disjoint ctx key) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ (as_get_acc h1 ctx, as_get_r h1 ctx) == S.poly1305_init (as_seq h0 key)) [@ Meta.Attribute.specialize ] inline_for_extraction noextract val poly1305_init: #s:field_spec -> poly1305_init_st s inline_for_extraction noextract let poly1305_update1_st (s:field_spec) = ctx:poly1305_ctx s -> b:lbuffer uint8 16ul -> Stack unit (requires fun h -> live h ctx /\ live h b /\ disjoint b ctx /\ state_inv_t #s h ctx) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ as_get_r h0 ctx == as_get_r h1 ctx /\ as_get_acc h1 ctx == S.poly1305_update1 (as_get_r h0 ctx) 16 (as_seq h0 b) (as_get_acc h0 ctx)) inline_for_extraction noextract val poly1305_update1: (#s:field_spec) -> poly1305_update1_st s
{ "checked_file": "/", "dependencies": [ "Spec.Poly1305.fst.checked", "prims.fst.checked", "Meta.Attribute.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.Buffer.fsti.checked", "Hacl.Impl.Poly1305.Fields.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked", "FStar.HyperStack.All.fst.checked", "FStar.HyperStack.fst.checked" ], "interface_file": false, "source_file": "Hacl.Impl.Poly1305.fsti" }
[ { "abbrev": true, "full_module": "Hacl.Impl.Poly1305.Field32xN", "short_module": "F32xN" }, { "abbrev": true, "full_module": "Hacl.Spec.Poly1305.Equiv", "short_module": "Equiv" }, { "abbrev": true, "full_module": "Hacl.Spec.Poly1305.Vec", "short_module": "Vec" }, { "abbrev": true, "full_module": "Spec.Poly1305", "short_module": "S" }, { "abbrev": true, "full_module": "Lib.Sequence", "short_module": "LSeq" }, { "abbrev": true, "full_module": "Lib.ByteSequence", "short_module": "BSeq" }, { "abbrev": true, "full_module": "FStar.HyperStack.ST", "short_module": "ST" }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Bignum128", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Fields", "short_module": null }, { "abbrev": false, "full_module": "Lib.ByteBuffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.Buffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.All", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack", "short_module": null }, { "abbrev": true, "full_module": "Spec.Poly1305", "short_module": "S" }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Fields", "short_module": null }, { "abbrev": false, "full_module": "Lib.Buffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.All", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 1, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: Hacl.Impl.Poly1305.Fields.field_spec -> Type0
Prims.Tot
[ "total" ]
[]
[ "Hacl.Impl.Poly1305.Fields.field_spec", "Hacl.Impl.Poly1305.poly1305_ctx", "Lib.IntTypes.size_t", "Lib.Buffer.lbuffer", "Lib.IntTypes.uint8", "Prims.unit", "FStar.Monotonic.HyperStack.mem", "Prims.l_and", "Lib.Buffer.live", "Lib.Buffer.MUT", "Hacl.Impl.Poly1305.Fields.limb", "Lib.Buffer.disjoint", "Hacl.Impl.Poly1305.state_inv_t", "Lib.Buffer.modifies", "Lib.Buffer.loc", "Prims.eq2", "Spec.Poly1305.felem", "Hacl.Impl.Poly1305.as_get_r", "Hacl.Impl.Poly1305.as_get_acc", "Spec.Poly1305.poly1305_update", "Lib.Buffer.as_seq" ]
[]
false
false
false
true
true
let poly1305_update_st (s: field_spec) =
ctx: poly1305_ctx s -> len: size_t -> text: lbuffer uint8 len -> Stack unit (requires fun h -> live h text /\ live h ctx /\ disjoint ctx text /\ state_inv_t #s h ctx) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ as_get_r h0 ctx == as_get_r h1 ctx /\ as_get_acc h1 ctx == S.poly1305_update (as_seq h0 text) (as_get_acc h0 ctx) (as_get_r h0 ctx))
false
Hacl.Impl.Poly1305.fsti
Hacl.Impl.Poly1305.poly1305_init_st
val poly1305_init_st : s: Hacl.Impl.Poly1305.Fields.field_spec -> Type0
let poly1305_init_st (s:field_spec) = ctx:poly1305_ctx s -> key:lbuffer uint8 32ul -> Stack unit (requires fun h -> live h ctx /\ live h key /\ disjoint ctx key) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ (as_get_acc h1 ctx, as_get_r h1 ctx) == S.poly1305_init (as_seq h0 key))
{ "file_name": "code/poly1305/Hacl.Impl.Poly1305.fsti", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 76, "end_line": 65, "start_col": 0, "start_line": 56 }
module Hacl.Impl.Poly1305 open FStar.HyperStack open FStar.HyperStack.All open FStar.Mul open Lib.IntTypes open Lib.Buffer open Hacl.Impl.Poly1305.Fields module S = Spec.Poly1305 #reset-options "--z3rlimit 50 --max_fuel 0 --max_ifuel 1" inline_for_extraction noextract let poly1305_ctx (s:field_spec) = lbuffer (limb s) (nlimb s +! precomplen s) noextract val as_get_acc: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> GTot S.felem noextract val as_get_r: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> GTot S.felem noextract val state_inv_t: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> Type0 // If the ctx is not modified, all the components and invariants are preserved val reveal_ctx_inv': #s:field_spec -> ctx:poly1305_ctx s -> ctx':poly1305_ctx s -> h0:mem -> h1:mem -> Lemma (requires Seq.equal (as_seq h0 ctx) (as_seq h1 ctx') /\ state_inv_t h0 ctx) (ensures as_get_r h0 ctx == as_get_r h1 ctx' /\ as_get_acc h0 ctx == as_get_acc h1 ctx' /\ state_inv_t h1 ctx') let reveal_ctx_inv #s ctx h0 h1: Lemma (requires Seq.equal (as_seq h0 ctx) (as_seq h1 ctx) /\ state_inv_t h0 ctx) (ensures as_get_r h0 ctx == as_get_r h1 ctx /\ as_get_acc h0 ctx == as_get_acc h1 ctx /\ state_inv_t h1 ctx) = reveal_ctx_inv' #s ctx ctx h0 h1 val ctx_inv_zeros: #s:field_spec -> ctx:poly1305_ctx s -> h:mem -> Lemma (requires as_seq h ctx == Lib.Sequence.create (v (nlimb s +! precomplen s)) (limb_zero s)) (ensures state_inv_t #s h ctx)
{ "checked_file": "/", "dependencies": [ "Spec.Poly1305.fst.checked", "prims.fst.checked", "Meta.Attribute.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.Buffer.fsti.checked", "Hacl.Impl.Poly1305.Fields.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked", "FStar.HyperStack.All.fst.checked", "FStar.HyperStack.fst.checked" ], "interface_file": false, "source_file": "Hacl.Impl.Poly1305.fsti" }
[ { "abbrev": true, "full_module": "Hacl.Impl.Poly1305.Field32xN", "short_module": "F32xN" }, { "abbrev": true, "full_module": "Hacl.Spec.Poly1305.Equiv", "short_module": "Equiv" }, { "abbrev": true, "full_module": "Hacl.Spec.Poly1305.Vec", "short_module": "Vec" }, { "abbrev": true, "full_module": "Spec.Poly1305", "short_module": "S" }, { "abbrev": true, "full_module": "Lib.Sequence", "short_module": "LSeq" }, { "abbrev": true, "full_module": "Lib.ByteSequence", "short_module": "BSeq" }, { "abbrev": true, "full_module": "FStar.HyperStack.ST", "short_module": "ST" }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Bignum128", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Fields", "short_module": null }, { "abbrev": false, "full_module": "Lib.ByteBuffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.Buffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.All", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack", "short_module": null }, { "abbrev": true, "full_module": "Spec.Poly1305", "short_module": "S" }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Fields", "short_module": null }, { "abbrev": false, "full_module": "Lib.Buffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.All", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 1, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: Hacl.Impl.Poly1305.Fields.field_spec -> Type0
Prims.Tot
[ "total" ]
[]
[ "Hacl.Impl.Poly1305.Fields.field_spec", "Hacl.Impl.Poly1305.poly1305_ctx", "Lib.Buffer.lbuffer", "Lib.IntTypes.uint8", "FStar.UInt32.__uint_to_t", "Prims.unit", "FStar.Monotonic.HyperStack.mem", "Prims.l_and", "Lib.Buffer.live", "Lib.Buffer.MUT", "Hacl.Impl.Poly1305.Fields.limb", "Lib.Buffer.disjoint", "Lib.Buffer.modifies", "Lib.Buffer.loc", "Hacl.Impl.Poly1305.state_inv_t", "Prims.eq2", "FStar.Pervasives.Native.tuple2", "Spec.Poly1305.felem", "FStar.Pervasives.Native.Mktuple2", "Hacl.Impl.Poly1305.as_get_acc", "Hacl.Impl.Poly1305.as_get_r", "Spec.Poly1305.poly1305_init", "Lib.Buffer.as_seq" ]
[]
false
false
false
true
true
let poly1305_init_st (s: field_spec) =
ctx: poly1305_ctx s -> key: lbuffer uint8 32ul -> Stack unit (requires fun h -> live h ctx /\ live h key /\ disjoint ctx key) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ (as_get_acc h1 ctx, as_get_r h1 ctx) == S.poly1305_init (as_seq h0 key))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.no_lookahead_on
val no_lookahead_on (#t: Type) (f: bare_parser t) (x x': bytes) : GTot Type0
val no_lookahead_on (#t: Type) (f: bare_parser t) (x x': bytes) : GTot Type0
let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x'
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 68, "end_line": 139, "start_col": 0, "start_line": 134 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' ))
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
f: LowParse.Spec.Base.bare_parser t -> x: LowParse.Bytes.bytes -> x': LowParse.Bytes.bytes -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.bare_parser", "LowParse.Bytes.bytes", "Prims.l_imp", "LowParse.Spec.Base.no_lookahead_on_precond", "LowParse.Spec.Base.no_lookahead_on_postcond" ]
[]
false
false
false
false
true
let no_lookahead_on (#t: Type) (f: bare_parser t) (x x': bytes) : GTot Type0 =
no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x'
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.no_lookahead
val no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0
val no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0
let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x'
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 129, "end_line": 159, "start_col": 0, "start_line": 155 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = ()
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
f: LowParse.Spec.Base.bare_parser t -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.bare_parser", "Prims.l_Forall", "LowParse.Bytes.bytes", "LowParse.Spec.Base.no_lookahead_on", "LowParse.Spec.Base.no_lookahead_on_postcond", "LowParse.Spec.Base.no_lookahead_on_precond" ]
[]
false
false
false
false
true
let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 =
forall (x: bytes) (x': bytes). {:pattern (no_lookahead_on_precond f x x')\/(no_lookahead_on_postcond f x x')} no_lookahead_on f x x'
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.injective_postcond
val injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0
val injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0
let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 )
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 74, "start_col": 0, "start_line": 63 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = ()
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p: LowParse.Spec.Base.bare_parser t -> b1: LowParse.Bytes.bytes -> b2: LowParse.Bytes.bytes -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.bare_parser", "LowParse.Bytes.bytes", "Prims.l_and", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "Prims.eq2", "Prims.nat", "FStar.Seq.Base.seq", "LowParse.Bytes.byte", "FStar.Seq.Base.slice", "Prims.logical", "FStar.Pervasives.Native.option" ]
[]
false
false
false
false
true
let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 =
Some? (parse p b1) /\ Some? (parse p b2) /\ (let Some (v1, len1) = parse p b1 in let Some (v2, len2) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2)
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.injective_precond
val injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0
val injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0
let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 )
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 47, "start_col": 0, "start_line": 37 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p: LowParse.Spec.Base.bare_parser t -> b1: LowParse.Bytes.bytes -> b2: LowParse.Bytes.bytes -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.bare_parser", "LowParse.Bytes.bytes", "Prims.l_and", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "Prims.eq2", "Prims.logical", "FStar.Pervasives.Native.option" ]
[]
false
false
false
false
true
let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 =
Some? (parse p b1) /\ Some? (parse p b2) /\ (let Some (v1, len1) = parse p b1 in let Some (v2, len2) = parse p b2 in v1 == v2)
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.is_total_constant_size_parser
val is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0
val is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0
let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s))
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 44, "end_line": 265, "start_col": 0, "start_line": 259 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = ()
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
sz: Prims.nat -> f: LowParse.Spec.Base.bare_parser t -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "Prims.nat", "LowParse.Spec.Base.bare_parser", "Prims.l_Forall", "LowParse.Bytes.bytes", "Prims.eq2", "Prims.bool", "Prims.op_LessThan", "FStar.Seq.Base.length", "LowParse.Bytes.byte", "FStar.Pervasives.Native.uu___is_None", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse" ]
[]
false
false
false
false
true
let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 =
forall (s: bytes). {:pattern (parse f s)} (Seq.length s < sz) == (None? (parse f s))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.no_lookahead_on_postcond
val no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x': bytes) : GTot Type0
val no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x': bytes) : GTot Type0
let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' ))
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 4, "end_line": 132, "start_col": 0, "start_line": 120 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off )
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
f: LowParse.Spec.Base.bare_parser t -> x: LowParse.Bytes.bytes -> x': LowParse.Bytes.bytes -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.bare_parser", "LowParse.Bytes.bytes", "Prims.l_imp", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "Prims.l_and", "Prims.eq2", "Prims.logical", "FStar.Pervasives.Native.option" ]
[]
false
false
false
false
true
let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x': bytes) : GTot Type0 =
Some? (parse f x) ==> (let Some v = parse f x in let y, _ = v in Some? (parse f x') /\ (let Some v' = parse f x' in let y', _ = v' in y == y'))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.parser_always_fails
val parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0
val parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0
let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 65, "end_line": 314, "start_col": 0, "start_line": 313 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
f: LowParse.Spec.Base.bare_parser t -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.bare_parser", "Prims.l_Forall", "LowParse.Bytes.bytes", "Prims.eq2", "FStar.Pervasives.Native.option", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "FStar.Pervasives.Native.None" ]
[]
false
false
false
false
true
let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 =
forall input. {:pattern (parse f input)} parse f input == None
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.no_lookahead_on_precond
val no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x': bytes) : GTot Type0
val no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x': bytes) : GTot Type0
let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off )
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 118, "start_col": 0, "start_line": 108 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1))
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
f: LowParse.Spec.Base.bare_parser t -> x: LowParse.Bytes.bytes -> x': LowParse.Bytes.bytes -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.bare_parser", "LowParse.Bytes.bytes", "Prims.l_and", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "Prims.op_LessThanOrEqual", "FStar.Seq.Base.length", "LowParse.Bytes.byte", "Prims.eq2", "FStar.Seq.Base.seq", "FStar.Seq.Base.slice", "Prims.logical", "FStar.Pervasives.Native.option" ]
[]
false
false
false
false
true
let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x': bytes) : GTot Type0 =
Some? (parse f x) /\ (let Some v = parse f x in let _, off = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off)
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.injective_ext
val injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires (forall (b: bytes). parse p2 b == parse p1 b)) (ensures (injective p2 <==> injective p1))
val injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires (forall (b: bytes). parse p2 b == parse p1 b)) (ensures (injective p2 <==> injective p1))
let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1))
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 96, "end_line": 106, "start_col": 0, "start_line": 95 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p1: LowParse.Spec.Base.bare_parser t -> p2: LowParse.Spec.Base.bare_parser t -> FStar.Pervasives.Lemma (requires forall (b: LowParse.Bytes.bytes). LowParse.Spec.Base.parse p2 b == LowParse.Spec.Base.parse p1 b) (ensures LowParse.Spec.Base.injective p2 <==> LowParse.Spec.Base.injective p1)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "LowParse.Spec.Base.bare_parser", "FStar.Classical.forall_intro_2", "LowParse.Bytes.bytes", "Prims.l_imp", "Prims.l_and", "Prims.eq2", "FStar.Pervasives.Native.option", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "Prims.l_iff", "LowParse.Spec.Base.injective_postcond", "FStar.Classical.move_requires", "LowParse.Spec.Base.injective_postcond_ext", "Prims.unit", "Prims.l_True", "Prims.squash", "Prims.Nil", "FStar.Pervasives.pattern", "LowParse.Spec.Base.injective_precond", "LowParse.Spec.Base.injective_precond_ext", "Prims.l_Forall", "LowParse.Spec.Base.injective" ]
[]
false
false
true
false
false
let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires (forall (b: bytes). parse p2 b == parse p1 b)) (ensures (injective p2 <==> injective p1)) =
Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.parser_kind
val parser_kind : Type0
let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high })
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 2, "end_line": 294, "start_col": 0, "start_line": 292 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; }
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
Type0
Prims.Tot
[ "total" ]
[]
[ "LowParse.Spec.Base.parser_kind'", "Prims.l_imp", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some", "Prims.nat", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high", "Prims.op_LessThanOrEqual", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low", "FStar.Pervasives.Native.__proj__Some__item__v" ]
[]
false
false
false
true
true
let parser_kind =
(x: parser_kind'{Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high})
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.parser_kind_prop'
val parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0
val parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0
let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 31, "end_line": 327, "start_col": 0, "start_line": 322 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
k: LowParse.Spec.Base.parser_kind -> f: LowParse.Spec.Base.bare_parser t -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.bare_parser", "Prims.l_and", "LowParse.Spec.Base.injective", "Prims.l_imp", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some", "LowParse.Spec.Base.parser_subkind", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind", "LowParse.Spec.Base.parser_subkind_prop", "FStar.Pervasives.Native.__proj__Some__item__v", "LowParse.Spec.Base.parses_at_least", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low", "Prims.nat", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high", "LowParse.Spec.Base.parses_at_most", "LowParse.Spec.Base.parser_kind_metadata_prop" ]
[]
false
false
false
false
true
let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 =
injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.consumes_all
val consumes_all (#t: Type) (p: bare_parser t) : GTot Type0
val consumes_all (#t: Type) (p: bare_parser t) : GTot Type0
let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len )
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 183, "start_col": 0, "start_line": 176 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p: LowParse.Spec.Base.bare_parser t -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.bare_parser", "Prims.l_Forall", "LowParse.Bytes.bytes", "Prims.l_imp", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "Prims.eq2", "Prims.nat", "FStar.Seq.Base.length", "LowParse.Bytes.byte", "Prims.logical", "FStar.Pervasives.Native.option" ]
[]
false
false
false
false
true
let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 =
forall (b: bytes). {:pattern (parse p b)} Some? (parse p b) ==> (let Some (_, len) = parse p b in Seq.length b == len)
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.parses_at_most
val parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0
val parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0
let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) )
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 238, "start_col": 0, "start_line": 229 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
sz: Prims.nat -> f: LowParse.Spec.Base.bare_parser t -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "Prims.nat", "LowParse.Spec.Base.bare_parser", "Prims.l_Forall", "LowParse.Bytes.bytes", "Prims.l_imp", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "Prims.op_GreaterThanOrEqual", "Prims.logical", "FStar.Pervasives.Native.__proj__Some__item__v" ]
[]
false
false
false
false
true
let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 =
forall (s: bytes). {:pattern (parse f s)} Some? (parse f s) ==> (let _, consumed = Some?.v (parse f s) in sz >= (consumed <: nat))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.no_lookahead_ext
val no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires (forall (b: bytes). parse p2 b == parse p1 b)) (ensures (no_lookahead p2 <==> no_lookahead p1))
val no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires (forall (b: bytes). parse p2 b == parse p1 b)) (ensures (no_lookahead p2 <==> no_lookahead p1))
let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1))
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 93, "end_line": 171, "start_col": 0, "start_line": 161 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x'
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p1: LowParse.Spec.Base.bare_parser t -> p2: LowParse.Spec.Base.bare_parser t -> FStar.Pervasives.Lemma (requires forall (b: LowParse.Bytes.bytes). LowParse.Spec.Base.parse p2 b == LowParse.Spec.Base.parse p1 b) (ensures LowParse.Spec.Base.no_lookahead p2 <==> LowParse.Spec.Base.no_lookahead p1)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "LowParse.Spec.Base.bare_parser", "FStar.Classical.forall_intro_2", "LowParse.Bytes.bytes", "Prims.l_imp", "Prims.l_and", "Prims.eq2", "FStar.Pervasives.Native.option", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "Prims.l_iff", "LowParse.Spec.Base.no_lookahead_on", "FStar.Classical.move_requires", "LowParse.Spec.Base.no_lookahead_on_ext", "Prims.unit", "Prims.l_True", "Prims.squash", "Prims.Nil", "FStar.Pervasives.pattern", "Prims.l_Forall", "LowParse.Spec.Base.no_lookahead" ]
[]
false
false
true
false
false
let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires (forall (b: bytes). parse p2 b == parse p1 b)) (ensures (no_lookahead p2 <==> no_lookahead p1)) =
Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.parses_at_least
val parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0
val parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0
let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) )
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 196, "start_col": 0, "start_line": 187 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
sz: Prims.nat -> f: LowParse.Spec.Base.bare_parser t -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "Prims.nat", "LowParse.Spec.Base.bare_parser", "Prims.l_Forall", "LowParse.Bytes.bytes", "Prims.l_imp", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "Prims.op_LessThanOrEqual", "Prims.logical", "FStar.Pervasives.Native.__proj__Some__item__v" ]
[]
false
false
false
false
true
let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 =
forall (s: bytes). {:pattern (parse f s)} Some? (parse f s) ==> (let _, consumed = Some?.v (parse f s) in sz <= (consumed <: nat))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.get_parser_kind
val get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind
val get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind
let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 368, "start_col": 0, "start_line": 363 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } )
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p: LowParse.Spec.Base.parser k t -> LowParse.Spec.Base.parser_kind
Prims.Tot
[ "total" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser" ]
[]
false
false
false
false
false
let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind =
k
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.parser_subkind_prop
val parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0
val parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0
let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 18, "end_line": 276, "start_col": 0, "start_line": 271 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
k: LowParse.Spec.Base.parser_subkind -> f: LowParse.Spec.Base.bare_parser t -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.parser_subkind", "LowParse.Spec.Base.bare_parser", "LowParse.Spec.Base.no_lookahead", "LowParse.Spec.Base.consumes_all" ]
[]
false
false
false
false
true
let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 =
match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.is_constant_size_parser
val is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0
val is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0
let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) )
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 249, "start_col": 0, "start_line": 240 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) )
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
sz: Prims.nat -> f: LowParse.Spec.Base.bare_parser t -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "Prims.nat", "LowParse.Spec.Base.bare_parser", "Prims.l_Forall", "LowParse.Bytes.bytes", "Prims.l_imp", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "Prims.eq2", "Prims.logical", "FStar.Pervasives.Native.__proj__Some__item__v" ]
[]
false
false
false
false
true
let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 =
forall (s: bytes). {:pattern (parse f s)} Some? (parse f s) ==> (let _, consumed = Some?.v (parse f s) in sz == (consumed <: nat))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.coerce_to_bare_parser
val coerce_to_bare_parser (t: Type) (k2: parser_kind) (p: parser k2 t) : Tot (bare_parser t)
val coerce_to_bare_parser (t: Type) (k2: parser_kind) (p: parser k2 t) : Tot (bare_parser t)
let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 26, "end_line": 416, "start_col": 7, "start_line": 415 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f))
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
t: Type -> k2: LowParse.Spec.Base.parser_kind -> p: LowParse.Spec.Base.parser k2 t -> LowParse.Spec.Base.bare_parser t
Prims.Tot
[ "total" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.bare_parser" ]
[]
false
false
false
false
false
let coerce_to_bare_parser (t: Type) (k2: parser_kind) (p: parser k2 t) : Tot (bare_parser t) =
p
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.strong_parser_kind
val strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True))
val strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True))
let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; }
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 305, "start_col": 0, "start_line": 297 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high })
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
lo: Prims.nat -> hi: Prims.nat -> md: LowParse.Spec.Base.parser_kind_metadata_t -> Prims.Pure LowParse.Spec.Base.parser_kind
Prims.Pure
[]
[]
[ "Prims.nat", "LowParse.Spec.Base.parser_kind_metadata_t", "LowParse.Spec.Base.Mkparser_kind'", "FStar.Pervasives.Native.Some", "LowParse.Spec.Base.parser_subkind", "LowParse.Spec.Base.ParserStrong", "LowParse.Spec.Base.parser_kind", "Prims.b2t", "Prims.op_LessThanOrEqual", "Prims.l_True" ]
[]
false
false
false
false
false
let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) =
{ parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md }
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.parser_kind_metadata_prop
val parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0
val parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0
let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 56, "end_line": 320, "start_col": 0, "start_line": 316 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
k: LowParse.Spec.Base.parser_kind -> f: LowParse.Spec.Base.bare_parser t -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.bare_parser", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_metadata", "Prims.l_True", "Prims.l_imp", "Prims.eq2", "FStar.Pervasives.Native.option", "Prims.nat", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high", "FStar.Pervasives.Native.Some", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low", "LowParse.Spec.Base.is_total_constant_size_parser", "LowParse.Spec.Base.parser_always_fails" ]
[]
false
false
false
false
true
let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 =
match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.coerce_to_tot_bare_parser
val coerce_to_tot_bare_parser (t: Type) (k2: parser_kind) (p: tot_parser k2 t) : Tot (tot_bare_parser t)
val coerce_to_tot_bare_parser (t: Type) (k2: parser_kind) (p: tot_parser k2 t) : Tot (tot_bare_parser t)
let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 30, "end_line": 426, "start_col": 7, "start_line": 425 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
t: Type -> k2: LowParse.Spec.Base.parser_kind -> p: LowParse.Spec.Base.tot_parser k2 t -> LowParse.Spec.Base.tot_bare_parser t
Prims.Tot
[ "total" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.tot_parser", "LowParse.Spec.Base.tot_bare_parser" ]
[]
false
false
false
false
false
let coerce_to_tot_bare_parser (t: Type) (k2: parser_kind) (p: tot_parser k2 t) : Tot (tot_bare_parser t) =
p
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.total_constant_size_parser_kind
val total_constant_size_parser_kind (sz: nat) : Tot parser_kind
val total_constant_size_parser_kind (sz: nat) : Tot parser_kind
let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal)
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 57, "end_line": 311, "start_col": 0, "start_line": 308 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; }
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
sz: Prims.nat -> LowParse.Spec.Base.parser_kind
Prims.Tot
[ "total" ]
[]
[ "Prims.nat", "LowParse.Spec.Base.strong_parser_kind", "FStar.Pervasives.Native.Some", "LowParse.Spec.Base.parser_kind_metadata_some", "LowParse.Spec.Base.ParserKindMetadataTotal", "LowParse.Spec.Base.parser_kind" ]
[]
false
false
false
true
false
let total_constant_size_parser_kind (sz: nat) : Tot parser_kind =
strong_parser_kind sz sz (Some ParserKindMetadataTotal)
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.weaken
val weaken (k1 #k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True))
val weaken (k1 #k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True))
let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 18, "end_line": 423, "start_col": 0, "start_line": 418 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
k1: LowParse.Spec.Base.parser_kind -> p2: LowParse.Spec.Base.parser k2 t -> Prims.Pure (LowParse.Spec.Base.parser k1 t)
Prims.Pure
[]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "Prims.unit", "LowParse.Spec.Base.is_weaker_than_correct", "LowParse.Spec.Base.bare_parser", "LowParse.Spec.Base.coerce_to_bare_parser", "LowParse.Spec.Base.is_weaker_than", "Prims.l_True" ]
[]
false
false
false
false
false
let weaken (k1 #k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) =
let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.is_weaker_than
val is_weaker_than (k1 k2: parser_kind) : GTot Type0
val is_weaker_than (k1 k2: parser_kind) : GTot Type0
let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high ))))
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 6, "end_line": 403, "start_col": 0, "start_line": 393 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
k1: LowParse.Spec.Base.parser_kind -> k2: LowParse.Spec.Base.parser_kind -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.parser_kind", "Prims.l_and", "Prims.l_imp", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some", "LowParse.Spec.Base.parser_kind_metadata_some", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_metadata", "Prims.eq2", "LowParse.Spec.Base.parser_kind_metadata_t", "Prims.op_disEquality", "FStar.Pervasives.Native.option", "FStar.Pervasives.Native.Some", "LowParse.Spec.Base.ParserKindMetadataFail", "LowParse.Spec.Base.parser_subkind", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind", "Prims.op_LessThanOrEqual", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low", "Prims.nat", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high", "FStar.Pervasives.Native.__proj__Some__item__v" ]
[]
false
false
false
false
true
let is_weaker_than (k1 k2: parser_kind) : GTot Type0 =
(Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ((Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> (Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high))))
false
Hacl.Impl.Poly1305.fsti
Hacl.Impl.Poly1305.poly1305_update1_st
val poly1305_update1_st : s: Hacl.Impl.Poly1305.Fields.field_spec -> Type0
let poly1305_update1_st (s:field_spec) = ctx:poly1305_ctx s -> b:lbuffer uint8 16ul -> Stack unit (requires fun h -> live h ctx /\ live h b /\ disjoint b ctx /\ state_inv_t #s h ctx) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ as_get_r h0 ctx == as_get_r h1 ctx /\ as_get_acc h1 ctx == S.poly1305_update1 (as_get_r h0 ctx) 16 (as_seq h0 b) (as_get_acc h0 ctx))
{ "file_name": "code/poly1305/Hacl.Impl.Poly1305.fsti", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 99, "end_line": 84, "start_col": 0, "start_line": 73 }
module Hacl.Impl.Poly1305 open FStar.HyperStack open FStar.HyperStack.All open FStar.Mul open Lib.IntTypes open Lib.Buffer open Hacl.Impl.Poly1305.Fields module S = Spec.Poly1305 #reset-options "--z3rlimit 50 --max_fuel 0 --max_ifuel 1" inline_for_extraction noextract let poly1305_ctx (s:field_spec) = lbuffer (limb s) (nlimb s +! precomplen s) noextract val as_get_acc: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> GTot S.felem noextract val as_get_r: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> GTot S.felem noextract val state_inv_t: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> Type0 // If the ctx is not modified, all the components and invariants are preserved val reveal_ctx_inv': #s:field_spec -> ctx:poly1305_ctx s -> ctx':poly1305_ctx s -> h0:mem -> h1:mem -> Lemma (requires Seq.equal (as_seq h0 ctx) (as_seq h1 ctx') /\ state_inv_t h0 ctx) (ensures as_get_r h0 ctx == as_get_r h1 ctx' /\ as_get_acc h0 ctx == as_get_acc h1 ctx' /\ state_inv_t h1 ctx') let reveal_ctx_inv #s ctx h0 h1: Lemma (requires Seq.equal (as_seq h0 ctx) (as_seq h1 ctx) /\ state_inv_t h0 ctx) (ensures as_get_r h0 ctx == as_get_r h1 ctx /\ as_get_acc h0 ctx == as_get_acc h1 ctx /\ state_inv_t h1 ctx) = reveal_ctx_inv' #s ctx ctx h0 h1 val ctx_inv_zeros: #s:field_spec -> ctx:poly1305_ctx s -> h:mem -> Lemma (requires as_seq h ctx == Lib.Sequence.create (v (nlimb s +! precomplen s)) (limb_zero s)) (ensures state_inv_t #s h ctx) inline_for_extraction noextract let poly1305_init_st (s:field_spec) = ctx:poly1305_ctx s -> key:lbuffer uint8 32ul -> Stack unit (requires fun h -> live h ctx /\ live h key /\ disjoint ctx key) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ (as_get_acc h1 ctx, as_get_r h1 ctx) == S.poly1305_init (as_seq h0 key)) [@ Meta.Attribute.specialize ] inline_for_extraction noextract val poly1305_init: #s:field_spec -> poly1305_init_st s
{ "checked_file": "/", "dependencies": [ "Spec.Poly1305.fst.checked", "prims.fst.checked", "Meta.Attribute.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.Buffer.fsti.checked", "Hacl.Impl.Poly1305.Fields.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked", "FStar.HyperStack.All.fst.checked", "FStar.HyperStack.fst.checked" ], "interface_file": false, "source_file": "Hacl.Impl.Poly1305.fsti" }
[ { "abbrev": true, "full_module": "Hacl.Impl.Poly1305.Field32xN", "short_module": "F32xN" }, { "abbrev": true, "full_module": "Hacl.Spec.Poly1305.Equiv", "short_module": "Equiv" }, { "abbrev": true, "full_module": "Hacl.Spec.Poly1305.Vec", "short_module": "Vec" }, { "abbrev": true, "full_module": "Spec.Poly1305", "short_module": "S" }, { "abbrev": true, "full_module": "Lib.Sequence", "short_module": "LSeq" }, { "abbrev": true, "full_module": "Lib.ByteSequence", "short_module": "BSeq" }, { "abbrev": true, "full_module": "FStar.HyperStack.ST", "short_module": "ST" }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Bignum128", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Fields", "short_module": null }, { "abbrev": false, "full_module": "Lib.ByteBuffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.Buffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.All", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack", "short_module": null }, { "abbrev": true, "full_module": "Spec.Poly1305", "short_module": "S" }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Fields", "short_module": null }, { "abbrev": false, "full_module": "Lib.Buffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.All", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 1, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: Hacl.Impl.Poly1305.Fields.field_spec -> Type0
Prims.Tot
[ "total" ]
[]
[ "Hacl.Impl.Poly1305.Fields.field_spec", "Hacl.Impl.Poly1305.poly1305_ctx", "Lib.Buffer.lbuffer", "Lib.IntTypes.uint8", "FStar.UInt32.__uint_to_t", "Prims.unit", "FStar.Monotonic.HyperStack.mem", "Prims.l_and", "Lib.Buffer.live", "Lib.Buffer.MUT", "Hacl.Impl.Poly1305.Fields.limb", "Lib.Buffer.disjoint", "Hacl.Impl.Poly1305.state_inv_t", "Lib.Buffer.modifies", "Lib.Buffer.loc", "Prims.eq2", "Spec.Poly1305.felem", "Hacl.Impl.Poly1305.as_get_r", "Hacl.Impl.Poly1305.as_get_acc", "Spec.Poly1305.poly1305_update1", "Lib.Buffer.as_seq" ]
[]
false
false
false
true
true
let poly1305_update1_st (s: field_spec) =
ctx: poly1305_ctx s -> b: lbuffer uint8 16ul -> Stack unit (requires fun h -> live h ctx /\ live h b /\ disjoint b ctx /\ state_inv_t #s h ctx) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ as_get_r h0 ctx == as_get_r h1 ctx /\ as_get_acc h1 ctx == S.poly1305_update1 (as_get_r h0 ctx) 16 (as_seq h0 b) (as_get_acc h0 ctx))
false
Hacl.Impl.Poly1305.fsti
Hacl.Impl.Poly1305.poly1305_finish_st
val poly1305_finish_st : s: Hacl.Impl.Poly1305.Fields.field_spec -> Type0
let poly1305_finish_st (s:field_spec) = tag:lbuffer uint8 16ul -> key:lbuffer uint8 32ul -> ctx:poly1305_ctx s -> Stack unit (requires fun h -> live h tag /\ live h key /\ live h ctx /\ disjoint tag key /\ disjoint tag ctx /\ disjoint key ctx /\ state_inv_t #s h ctx) (ensures fun h0 _ h1 -> modifies (loc tag |+| loc ctx) h0 h1 /\ as_seq h1 tag == S.poly1305_finish (as_seq h0 key) (as_get_acc h0 ctx))
{ "file_name": "code/poly1305/Hacl.Impl.Poly1305.fsti", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 75, "end_line": 124, "start_col": 0, "start_line": 113 }
module Hacl.Impl.Poly1305 open FStar.HyperStack open FStar.HyperStack.All open FStar.Mul open Lib.IntTypes open Lib.Buffer open Hacl.Impl.Poly1305.Fields module S = Spec.Poly1305 #reset-options "--z3rlimit 50 --max_fuel 0 --max_ifuel 1" inline_for_extraction noextract let poly1305_ctx (s:field_spec) = lbuffer (limb s) (nlimb s +! precomplen s) noextract val as_get_acc: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> GTot S.felem noextract val as_get_r: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> GTot S.felem noextract val state_inv_t: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> Type0 // If the ctx is not modified, all the components and invariants are preserved val reveal_ctx_inv': #s:field_spec -> ctx:poly1305_ctx s -> ctx':poly1305_ctx s -> h0:mem -> h1:mem -> Lemma (requires Seq.equal (as_seq h0 ctx) (as_seq h1 ctx') /\ state_inv_t h0 ctx) (ensures as_get_r h0 ctx == as_get_r h1 ctx' /\ as_get_acc h0 ctx == as_get_acc h1 ctx' /\ state_inv_t h1 ctx') let reveal_ctx_inv #s ctx h0 h1: Lemma (requires Seq.equal (as_seq h0 ctx) (as_seq h1 ctx) /\ state_inv_t h0 ctx) (ensures as_get_r h0 ctx == as_get_r h1 ctx /\ as_get_acc h0 ctx == as_get_acc h1 ctx /\ state_inv_t h1 ctx) = reveal_ctx_inv' #s ctx ctx h0 h1 val ctx_inv_zeros: #s:field_spec -> ctx:poly1305_ctx s -> h:mem -> Lemma (requires as_seq h ctx == Lib.Sequence.create (v (nlimb s +! precomplen s)) (limb_zero s)) (ensures state_inv_t #s h ctx) inline_for_extraction noextract let poly1305_init_st (s:field_spec) = ctx:poly1305_ctx s -> key:lbuffer uint8 32ul -> Stack unit (requires fun h -> live h ctx /\ live h key /\ disjoint ctx key) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ (as_get_acc h1 ctx, as_get_r h1 ctx) == S.poly1305_init (as_seq h0 key)) [@ Meta.Attribute.specialize ] inline_for_extraction noextract val poly1305_init: #s:field_spec -> poly1305_init_st s inline_for_extraction noextract let poly1305_update1_st (s:field_spec) = ctx:poly1305_ctx s -> b:lbuffer uint8 16ul -> Stack unit (requires fun h -> live h ctx /\ live h b /\ disjoint b ctx /\ state_inv_t #s h ctx) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ as_get_r h0 ctx == as_get_r h1 ctx /\ as_get_acc h1 ctx == S.poly1305_update1 (as_get_r h0 ctx) 16 (as_seq h0 b) (as_get_acc h0 ctx)) inline_for_extraction noextract val poly1305_update1: (#s:field_spec) -> poly1305_update1_st s inline_for_extraction noextract let poly1305_update_st (s:field_spec) = ctx:poly1305_ctx s -> len:size_t -> text:lbuffer uint8 len -> Stack unit (requires fun h -> live h text /\ live h ctx /\ disjoint ctx text /\ state_inv_t #s h ctx) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ as_get_r h0 ctx == as_get_r h1 ctx /\ as_get_acc h1 ctx == S.poly1305_update (as_seq h0 text) (as_get_acc h0 ctx) (as_get_r h0 ctx)) inline_for_extraction noextract [@ Meta.Attribute.specialize ] val poly1305_update: #s:field_spec -> poly1305_update_st s
{ "checked_file": "/", "dependencies": [ "Spec.Poly1305.fst.checked", "prims.fst.checked", "Meta.Attribute.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.Buffer.fsti.checked", "Hacl.Impl.Poly1305.Fields.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked", "FStar.HyperStack.All.fst.checked", "FStar.HyperStack.fst.checked" ], "interface_file": false, "source_file": "Hacl.Impl.Poly1305.fsti" }
[ { "abbrev": true, "full_module": "Hacl.Impl.Poly1305.Field32xN", "short_module": "F32xN" }, { "abbrev": true, "full_module": "Hacl.Spec.Poly1305.Equiv", "short_module": "Equiv" }, { "abbrev": true, "full_module": "Hacl.Spec.Poly1305.Vec", "short_module": "Vec" }, { "abbrev": true, "full_module": "Spec.Poly1305", "short_module": "S" }, { "abbrev": true, "full_module": "Lib.Sequence", "short_module": "LSeq" }, { "abbrev": true, "full_module": "Lib.ByteSequence", "short_module": "BSeq" }, { "abbrev": true, "full_module": "FStar.HyperStack.ST", "short_module": "ST" }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Bignum128", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Fields", "short_module": null }, { "abbrev": false, "full_module": "Lib.ByteBuffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.Buffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.All", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack", "short_module": null }, { "abbrev": true, "full_module": "Spec.Poly1305", "short_module": "S" }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Fields", "short_module": null }, { "abbrev": false, "full_module": "Lib.Buffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.All", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 1, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: Hacl.Impl.Poly1305.Fields.field_spec -> Type0
Prims.Tot
[ "total" ]
[]
[ "Hacl.Impl.Poly1305.Fields.field_spec", "Lib.Buffer.lbuffer", "Lib.IntTypes.uint8", "FStar.UInt32.__uint_to_t", "Hacl.Impl.Poly1305.poly1305_ctx", "Prims.unit", "FStar.Monotonic.HyperStack.mem", "Prims.l_and", "Lib.Buffer.live", "Lib.Buffer.MUT", "Hacl.Impl.Poly1305.Fields.limb", "Lib.Buffer.disjoint", "Hacl.Impl.Poly1305.state_inv_t", "Lib.Buffer.modifies", "Lib.Buffer.op_Bar_Plus_Bar", "Lib.Buffer.loc", "Prims.eq2", "Lib.Sequence.lseq", "Lib.IntTypes.v", "Lib.IntTypes.U32", "Lib.IntTypes.PUB", "Lib.Buffer.as_seq", "Spec.Poly1305.poly1305_finish", "Hacl.Impl.Poly1305.as_get_acc" ]
[]
false
false
false
true
true
let poly1305_finish_st (s: field_spec) =
tag: lbuffer uint8 16ul -> key: lbuffer uint8 32ul -> ctx: poly1305_ctx s -> Stack unit (requires fun h -> live h tag /\ live h key /\ live h ctx /\ disjoint tag key /\ disjoint tag ctx /\ disjoint key ctx /\ state_inv_t #s h ctx) (ensures fun h0 _ h1 -> modifies (loc tag |+| loc ctx) h0 h1 /\ as_seq h1 tag == S.poly1305_finish (as_seq h0 key) (as_get_acc h0 ctx))
false
Hacl.Impl.Poly1305.fsti
Hacl.Impl.Poly1305.poly1305_ctx
val poly1305_ctx : s: Hacl.Impl.Poly1305.Fields.field_spec -> Type0
let poly1305_ctx (s:field_spec) = lbuffer (limb s) (nlimb s +! precomplen s)
{ "file_name": "code/poly1305/Hacl.Impl.Poly1305.fsti", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 76, "end_line": 18, "start_col": 0, "start_line": 18 }
module Hacl.Impl.Poly1305 open FStar.HyperStack open FStar.HyperStack.All open FStar.Mul open Lib.IntTypes open Lib.Buffer open Hacl.Impl.Poly1305.Fields module S = Spec.Poly1305 #reset-options "--z3rlimit 50 --max_fuel 0 --max_ifuel 1"
{ "checked_file": "/", "dependencies": [ "Spec.Poly1305.fst.checked", "prims.fst.checked", "Meta.Attribute.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.Buffer.fsti.checked", "Hacl.Impl.Poly1305.Fields.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked", "FStar.HyperStack.All.fst.checked", "FStar.HyperStack.fst.checked" ], "interface_file": false, "source_file": "Hacl.Impl.Poly1305.fsti" }
[ { "abbrev": true, "full_module": "Spec.Poly1305", "short_module": "S" }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Fields", "short_module": null }, { "abbrev": false, "full_module": "Lib.Buffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.All", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 1, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: Hacl.Impl.Poly1305.Fields.field_spec -> Type0
Prims.Tot
[ "total" ]
[]
[ "Hacl.Impl.Poly1305.Fields.field_spec", "Lib.Buffer.lbuffer", "Hacl.Impl.Poly1305.Fields.limb", "Lib.IntTypes.op_Plus_Bang", "Lib.IntTypes.U32", "Lib.IntTypes.PUB", "Hacl.Impl.Poly1305.Fields.nlimb", "Hacl.Impl.Poly1305.Fields.precomplen" ]
[]
false
false
false
true
true
let poly1305_ctx (s: field_spec) =
lbuffer (limb s) (nlimb s +! precomplen s)
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.bool_and
val bool_and (b1 b2: bool) : Tot (y: bool{y == (b1 && b2)})
val bool_and (b1 b2: bool) : Tot (y: bool{y == (b1 && b2)})
let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 26, "end_line": 473, "start_col": 0, "start_line": 470 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"]
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 16, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
b1: Prims.bool -> b2: Prims.bool -> y: Prims.bool{y == (b1 && b2)}
Prims.Tot
[ "total" ]
[]
[ "Prims.bool", "Prims.eq2", "Prims.op_AmpAmp" ]
[]
false
false
false
false
false
let bool_and (b1 b2: bool) : Tot (y: bool{y == (b1 && b2)}) =
if b1 then b2 else false
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.serialize
val serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes
val serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes
let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 5, "end_line": 763, "start_col": 0, "start_line": 756 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: LowParse.Spec.Base.serializer p -> x: t -> Prims.GTot LowParse.Bytes.bytes
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.serializer", "LowParse.Bytes.bytes" ]
[]
false
false
false
false
false
let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes =
s x
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.tot_strengthen
val tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True))
val tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True))
let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 439, "start_col": 0, "start_line": 436 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
k: LowParse.Spec.Base.parser_kind -> f: LowParse.Spec.Base.tot_bare_parser t -> Prims.Pure (LowParse.Spec.Base.tot_parser k t)
Prims.Pure
[]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.tot_bare_parser", "LowParse.Spec.Base.tot_parser", "LowParse.Spec.Base.parser_kind_prop", "Prims.l_True" ]
[]
false
false
false
false
false
let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) =
f
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.some_v
val some_v (#t: Type) (x: option t {Some? x}) : Tot (y: t{y == Some?.v x})
val some_v (#t: Type) (x: option t {Some? x}) : Tot (y: t{y == Some?.v x})
let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 15, "end_line": 466, "start_col": 0, "start_line": 461 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"]
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 16, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
x: FStar.Pervasives.Native.option t {Some? x} -> y: t{y == Some?.v x}
Prims.Tot
[ "total" ]
[]
[ "FStar.Pervasives.Native.option", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some", "Prims.eq2", "FStar.Pervasives.Native.__proj__Some__item__v" ]
[]
false
false
false
false
false
let some_v (#t: Type) (x: option t {Some? x}) : Tot (y: t{y == Some?.v x}) =
match x with | Some y -> y
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.bool_or
val bool_or (b1 b2: bool) : Tot (y: bool{y == (b1 || b2)})
val bool_or (b1 b2: bool) : Tot (y: bool{y == (b1 || b2)})
let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 25, "end_line": 480, "start_col": 0, "start_line": 477 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"]
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 16, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
b1: Prims.bool -> b2: Prims.bool -> y: Prims.bool{y == (b1 || b2)}
Prims.Tot
[ "total" ]
[]
[ "Prims.bool", "Prims.eq2", "Prims.op_BarBar" ]
[]
false
false
false
false
false
let bool_or (b1 b2: bool) : Tot (y: bool{y == (b1 || b2)}) =
if b1 then true else b2
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.is_strong
val is_strong (#k: parser_kind) (#t: Type) (p: parser k t) : Tot (r: bool{r ==> k.parser_kind_subkind == Some (ParserStrong)})
val is_strong (#k: parser_kind) (#t: Type) (p: parser k t) : Tot (r: bool{r ==> k.parser_kind_subkind == Some (ParserStrong)})
let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong)
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 45, "end_line": 391, "start_col": 0, "start_line": 386 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = ()
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p: LowParse.Spec.Base.parser k t -> r: Prims.bool { r ==> Mkparser_kind'?.parser_kind_subkind k == FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong }
Prims.Tot
[ "total" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "Prims.op_Equality", "FStar.Pervasives.Native.option", "LowParse.Spec.Base.parser_subkind", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind", "FStar.Pervasives.Native.Some", "LowParse.Spec.Base.ParserStrong", "Prims.bool", "Prims.l_imp", "Prims.b2t", "Prims.eq2" ]
[]
false
false
false
false
false
let is_strong (#k: parser_kind) (#t: Type) (p: parser k t) : Tot (r: bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) =
k.parser_kind_subkind = Some (ParserStrong)
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.tot_weaken
val tot_weaken (k1 #k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True))
val tot_weaken (k1 #k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True))
let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 22, "end_line": 433, "start_col": 0, "start_line": 428 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
k1: LowParse.Spec.Base.parser_kind -> p2: LowParse.Spec.Base.tot_parser k2 t -> Prims.Pure (LowParse.Spec.Base.tot_parser k1 t)
Prims.Pure
[]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.tot_parser", "Prims.unit", "LowParse.Spec.Base.is_weaker_than_correct", "LowParse.Spec.Base.tot_bare_parser", "LowParse.Spec.Base.coerce_to_tot_bare_parser", "LowParse.Spec.Base.is_weaker_than", "Prims.l_True" ]
[]
false
false
false
false
false
let tot_weaken (k1 #k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) =
let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t
false
Hacl.Impl.Poly1305.fsti
Hacl.Impl.Poly1305.poly1305_mac_st
val poly1305_mac_st : s: Hacl.Impl.Poly1305.Fields.field_spec -> Type0
let poly1305_mac_st (s:field_spec) = output:lbuffer uint8 16ul -> input:buffer uint8 -> input_len:size_t { length input = v input_len } -> key:lbuffer uint8 32ul -> Stack unit (requires fun h -> live h input /\ live h output /\ live h key /\ disjoint output input /\ disjoint output key) (ensures fun h0 _ h1 -> modifies (loc output) h0 h1 /\ as_seq h1 output == S.poly1305_mac (as_seq h0 (input <: lbuffer uint8 input_len)) (as_seq h0 key))
{ "file_name": "code/poly1305/Hacl.Impl.Poly1305.fsti", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 102, "end_line": 143, "start_col": 0, "start_line": 132 }
module Hacl.Impl.Poly1305 open FStar.HyperStack open FStar.HyperStack.All open FStar.Mul open Lib.IntTypes open Lib.Buffer open Hacl.Impl.Poly1305.Fields module S = Spec.Poly1305 #reset-options "--z3rlimit 50 --max_fuel 0 --max_ifuel 1" inline_for_extraction noextract let poly1305_ctx (s:field_spec) = lbuffer (limb s) (nlimb s +! precomplen s) noextract val as_get_acc: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> GTot S.felem noextract val as_get_r: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> GTot S.felem noextract val state_inv_t: #s:field_spec -> h:mem -> ctx:poly1305_ctx s -> Type0 // If the ctx is not modified, all the components and invariants are preserved val reveal_ctx_inv': #s:field_spec -> ctx:poly1305_ctx s -> ctx':poly1305_ctx s -> h0:mem -> h1:mem -> Lemma (requires Seq.equal (as_seq h0 ctx) (as_seq h1 ctx') /\ state_inv_t h0 ctx) (ensures as_get_r h0 ctx == as_get_r h1 ctx' /\ as_get_acc h0 ctx == as_get_acc h1 ctx' /\ state_inv_t h1 ctx') let reveal_ctx_inv #s ctx h0 h1: Lemma (requires Seq.equal (as_seq h0 ctx) (as_seq h1 ctx) /\ state_inv_t h0 ctx) (ensures as_get_r h0 ctx == as_get_r h1 ctx /\ as_get_acc h0 ctx == as_get_acc h1 ctx /\ state_inv_t h1 ctx) = reveal_ctx_inv' #s ctx ctx h0 h1 val ctx_inv_zeros: #s:field_spec -> ctx:poly1305_ctx s -> h:mem -> Lemma (requires as_seq h ctx == Lib.Sequence.create (v (nlimb s +! precomplen s)) (limb_zero s)) (ensures state_inv_t #s h ctx) inline_for_extraction noextract let poly1305_init_st (s:field_spec) = ctx:poly1305_ctx s -> key:lbuffer uint8 32ul -> Stack unit (requires fun h -> live h ctx /\ live h key /\ disjoint ctx key) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ (as_get_acc h1 ctx, as_get_r h1 ctx) == S.poly1305_init (as_seq h0 key)) [@ Meta.Attribute.specialize ] inline_for_extraction noextract val poly1305_init: #s:field_spec -> poly1305_init_st s inline_for_extraction noextract let poly1305_update1_st (s:field_spec) = ctx:poly1305_ctx s -> b:lbuffer uint8 16ul -> Stack unit (requires fun h -> live h ctx /\ live h b /\ disjoint b ctx /\ state_inv_t #s h ctx) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ as_get_r h0 ctx == as_get_r h1 ctx /\ as_get_acc h1 ctx == S.poly1305_update1 (as_get_r h0 ctx) 16 (as_seq h0 b) (as_get_acc h0 ctx)) inline_for_extraction noextract val poly1305_update1: (#s:field_spec) -> poly1305_update1_st s inline_for_extraction noextract let poly1305_update_st (s:field_spec) = ctx:poly1305_ctx s -> len:size_t -> text:lbuffer uint8 len -> Stack unit (requires fun h -> live h text /\ live h ctx /\ disjoint ctx text /\ state_inv_t #s h ctx) (ensures fun h0 _ h1 -> modifies (loc ctx) h0 h1 /\ state_inv_t #s h1 ctx /\ as_get_r h0 ctx == as_get_r h1 ctx /\ as_get_acc h1 ctx == S.poly1305_update (as_seq h0 text) (as_get_acc h0 ctx) (as_get_r h0 ctx)) inline_for_extraction noextract [@ Meta.Attribute.specialize ] val poly1305_update: #s:field_spec -> poly1305_update_st s inline_for_extraction noextract let poly1305_finish_st (s:field_spec) = tag:lbuffer uint8 16ul -> key:lbuffer uint8 32ul -> ctx:poly1305_ctx s -> Stack unit (requires fun h -> live h tag /\ live h key /\ live h ctx /\ disjoint tag key /\ disjoint tag ctx /\ disjoint key ctx /\ state_inv_t #s h ctx) (ensures fun h0 _ h1 -> modifies (loc tag |+| loc ctx) h0 h1 /\ as_seq h1 tag == S.poly1305_finish (as_seq h0 key) (as_get_acc h0 ctx)) [@ Meta.Attribute.specialize ] noextract inline_for_extraction val poly1305_finish: #s:field_spec -> poly1305_finish_st s
{ "checked_file": "/", "dependencies": [ "Spec.Poly1305.fst.checked", "prims.fst.checked", "Meta.Attribute.fst.checked", "Lib.Sequence.fsti.checked", "Lib.IntTypes.fsti.checked", "Lib.Buffer.fsti.checked", "Hacl.Impl.Poly1305.Fields.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.Mul.fst.checked", "FStar.HyperStack.All.fst.checked", "FStar.HyperStack.fst.checked" ], "interface_file": false, "source_file": "Hacl.Impl.Poly1305.fsti" }
[ { "abbrev": true, "full_module": "Hacl.Impl.Poly1305.Field32xN", "short_module": "F32xN" }, { "abbrev": true, "full_module": "Hacl.Spec.Poly1305.Equiv", "short_module": "Equiv" }, { "abbrev": true, "full_module": "Hacl.Spec.Poly1305.Vec", "short_module": "Vec" }, { "abbrev": true, "full_module": "Spec.Poly1305", "short_module": "S" }, { "abbrev": true, "full_module": "Lib.Sequence", "short_module": "LSeq" }, { "abbrev": true, "full_module": "Lib.ByteSequence", "short_module": "BSeq" }, { "abbrev": true, "full_module": "FStar.HyperStack.ST", "short_module": "ST" }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Bignum128", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Fields", "short_module": null }, { "abbrev": false, "full_module": "Lib.ByteBuffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.Buffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.All", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack", "short_module": null }, { "abbrev": true, "full_module": "Spec.Poly1305", "short_module": "S" }, { "abbrev": false, "full_module": "Hacl.Impl.Poly1305.Fields", "short_module": null }, { "abbrev": false, "full_module": "Lib.Buffer", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.Mul", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.All", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl", "short_module": null }, { "abbrev": false, "full_module": "Hacl.Impl", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 1, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: Hacl.Impl.Poly1305.Fields.field_spec -> Type0
Prims.Tot
[ "total" ]
[]
[ "Hacl.Impl.Poly1305.Fields.field_spec", "Lib.Buffer.lbuffer", "Lib.IntTypes.uint8", "FStar.UInt32.__uint_to_t", "Lib.Buffer.buffer", "Lib.IntTypes.size_t", "Prims.b2t", "Prims.op_Equality", "Prims.int", "Prims.l_or", "Prims.op_GreaterThanOrEqual", "Lib.IntTypes.range", "Lib.IntTypes.U32", "Lib.Buffer.length", "Lib.Buffer.MUT", "Lib.IntTypes.v", "Lib.IntTypes.PUB", "Prims.unit", "FStar.Monotonic.HyperStack.mem", "Prims.l_and", "Lib.Buffer.live", "Lib.Buffer.disjoint", "Lib.Buffer.modifies", "Lib.Buffer.loc", "Prims.eq2", "Lib.Sequence.lseq", "Lib.Buffer.as_seq", "Spec.Poly1305.poly1305_mac" ]
[]
false
false
false
true
true
let poly1305_mac_st (s: field_spec) =
output: lbuffer uint8 16ul -> input: buffer uint8 -> input_len: size_t{length input = v input_len} -> key: lbuffer uint8 32ul -> Stack unit (requires fun h -> live h input /\ live h output /\ live h key /\ disjoint output input /\ disjoint output key) (ensures fun h0 _ h1 -> modifies (loc output) h0 h1 /\ as_seq h1 output == S.poly1305_mac (as_seq h0 (input <: lbuffer uint8 input_len)) (as_seq h0 key))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.default_parser_kind
val default_parser_kind:(x: parser_kind { forall (t: Type) (p: bare_parser t). {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p })
val default_parser_kind:(x: parser_kind { forall (t: Type) (p: bare_parser t). {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p })
let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 539, "start_col": 0, "start_line": 524 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
x: LowParse.Spec.Base.parser_kind { forall (t: Type) (p: LowParse.Spec.Base.bare_parser t). {:pattern LowParse.Spec.Base.parser_kind_prop x p} LowParse.Spec.Base.injective p ==> LowParse.Spec.Base.parser_kind_prop x p }
Prims.Tot
[ "total" ]
[]
[ "LowParse.Spec.Base.parser_kind'", "LowParse.Spec.Base.Mkparser_kind'", "FStar.Pervasives.Native.None", "Prims.nat", "LowParse.Spec.Base.parser_subkind", "LowParse.Spec.Base.parser_kind_metadata_some", "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.bare_parser", "Prims.unit", "Prims.l_True", "Prims.squash", "Prims.l_iff", "LowParse.Spec.Base.parser_kind_prop", "LowParse.Spec.Base.parser_kind_prop'", "Prims.Cons", "FStar.Pervasives.pattern", "FStar.Pervasives.smt_pat", "Prims.Nil", "LowParse.Spec.Base.parser_kind_prop_equiv" ]
[]
false
false
false
false
false
let default_parser_kind:(x: parser_kind { forall (t: Type) (p: bare_parser t). {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) =
let aux (t: Type) (k: parser_kind) (p: bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None } in x
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.strengthen
val strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True))
val strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True))
let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 445, "start_col": 0, "start_line": 442 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
k: LowParse.Spec.Base.parser_kind -> f: LowParse.Spec.Base.bare_parser t -> Prims.Pure (LowParse.Spec.Base.parser k t)
Prims.Pure
[]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.bare_parser", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.parser_kind_prop", "Prims.l_True" ]
[]
false
false
false
false
false
let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) =
f
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.serializer_complete
val serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0
val serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0
let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len )
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 684, "start_col": 0, "start_line": 674 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = ()
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p: LowParse.Spec.Base.parser k t -> f: LowParse.Spec.Base.bare_serializer t -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.bare_serializer", "Prims.l_Forall", "LowParse.Bytes.bytes", "Prims.l_imp", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "Prims.eq2", "FStar.Seq.Base.seq", "LowParse.Bytes.byte", "FStar.Seq.Base.slice", "Prims.logical", "FStar.Pervasives.Native.option" ]
[]
false
false
false
false
true
let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 =
forall (s: bytes). {:pattern (parse p s)} Some? (parse p s) ==> (let Some (x, len) = parse p s in f x == Seq.slice s 0 len)
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.is_some
val is_some (#t: Type) (x: option t) : Tot (y: bool{y == Some? x})
val is_some (#t: Type) (x: option t) : Tot (y: bool{y == Some? x})
let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 14, "end_line": 457, "start_col": 0, "start_line": 451 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"]
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 16, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
x: FStar.Pervasives.Native.option t -> y: Prims.bool{y == Some? x}
Prims.Tot
[ "total" ]
[]
[ "FStar.Pervasives.Native.option", "Prims.bool", "Prims.eq2", "FStar.Pervasives.Native.uu___is_Some" ]
[]
false
false
false
false
false
let is_some (#t: Type) (x: option t) : Tot (y: bool{y == Some? x}) =
match x with | Some _ -> true | _ -> false
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.coerce_parser
val coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True))
val coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True))
let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 606, "start_col": 0, "start_line": 598 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
t2: Type -> p: LowParse.Spec.Base.parser k t1 -> Prims.Pure (LowParse.Spec.Base.parser k t2)
Prims.Pure
[]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "Prims.eq2", "Prims.l_True" ]
[]
false
false
false
false
false
let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) =
p
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.coerce'
val coerce' (t2 #t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True))
val coerce' (t2 #t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True))
let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2)
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 11, "end_line": 595, "start_col": 0, "start_line": 588 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
t2: Type -> x: t1 -> Prims.Pure t2
Prims.Pure
[]
[]
[ "Prims.eq2", "Prims.l_True" ]
[]
false
false
false
false
false
let coerce' (t2 #t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) =
(x <: t2)
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.glb_list_of
val glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl. L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k'. (Cons? l /\ (forall kl. L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k)))
val glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl. L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k'. (Cons? l /\ (forall kl. L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k)))
let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k'
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 17, "end_line": 561, "start_col": 0, "start_line": 546 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
f: (_: t -> LowParse.Spec.Base.parser_kind) -> l: Prims.list t -> Prims.Pure LowParse.Spec.Base.parser_kind
Prims.Pure
[]
[]
[ "Prims.eqtype", "LowParse.Spec.Base.parser_kind", "Prims.list", "LowParse.Spec.Base.default_parser_kind", "LowParse.Spec.Base.glb", "LowParse.Spec.Base.glb_list_of", "Prims.l_True", "Prims.l_and", "Prims.l_Forall", "Prims.l_imp", "Prims.b2t", "FStar.List.Tot.Base.mem", "LowParse.Spec.Base.is_weaker_than", "Prims.uu___is_Cons" ]
[ "recursion" ]
false
false
false
false
false
let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl. L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k'. (Cons? l /\ (forall kl. L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k))) =
match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k'
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.serializer_correct
val serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0
val serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0
let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x))
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 87, "end_line": 659, "start_col": 0, "start_line": 653 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p: LowParse.Spec.Base.parser k t -> f: LowParse.Spec.Base.bare_serializer t -> Prims.GTot Type0
Prims.GTot
[ "sometrivial" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.bare_serializer", "Prims.l_Forall", "Prims.eq2", "FStar.Pervasives.Native.option", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "FStar.Pervasives.Native.Some", "FStar.Pervasives.Native.Mktuple2", "FStar.Seq.Base.length", "LowParse.Bytes.byte" ]
[]
false
false
false
false
true
let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 =
forall (x: t). {:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.glb_list
val glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl. {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl)))
val glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl. {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl)))
let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 18, "end_line": 573, "start_col": 0, "start_line": 565 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
l: Prims.list LowParse.Spec.Base.parser_kind -> Prims.Pure LowParse.Spec.Base.parser_kind
Prims.Pure
[]
[]
[ "Prims.list", "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.glb_list_of", "FStar.Pervasives.id", "Prims.l_True", "Prims.l_Forall", "Prims.l_imp", "Prims.b2t", "FStar.List.Tot.Base.mem", "LowParse.Spec.Base.is_weaker_than" ]
[]
false
false
false
false
false
let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl. {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl))) =
glb_list_of id l
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.coerce
val coerce (t2 #t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True))
val coerce (t2 #t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True))
let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2)
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 11, "end_line": 586, "start_col": 0, "start_line": 579 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
t2: Type -> x: t1 -> Prims.Pure t2
Prims.Pure
[]
[]
[ "Prims.eq2", "Prims.l_True" ]
[]
false
false
false
false
false
let coerce (t2 #t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) =
(x <: t2)
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.mk_serializer
val mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: (x: t -> Lemma (parse p (f x) == Some (x, Seq.length (f x))))) : Tot (serializer p)
val mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: (x: t -> Lemma (parse p (f x) == Some (x, Seq.length (f x))))) : Tot (serializer p)
let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 716, "start_col": 0, "start_line": 704 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } )
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p: LowParse.Spec.Base.parser k t -> f: LowParse.Spec.Base.bare_serializer t -> prf: (x: t -> FStar.Pervasives.Lemma (ensures LowParse.Spec.Base.parse p (f x) == FStar.Pervasives.Native.Some (x, FStar.Seq.Base.length (f x)))) -> LowParse.Spec.Base.serializer p
Prims.Tot
[ "total" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.bare_serializer", "Prims.unit", "Prims.l_True", "Prims.squash", "Prims.eq2", "FStar.Pervasives.Native.option", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "FStar.Pervasives.Native.Some", "FStar.Pervasives.Native.Mktuple2", "FStar.Seq.Base.length", "LowParse.Bytes.byte", "Prims.Nil", "FStar.Pervasives.pattern", "FStar.Classical.forall_intro", "LowParse.Spec.Base.serializer" ]
[]
false
false
false
false
false
let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: (x: t -> Lemma (parse p (f x) == Some (x, Seq.length (f x))))) : Tot (serializer p) =
Classical.forall_intro prf; f
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.coerce_serializer
val coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit{t2 == t1}) : Tot (serializer (coerce_parser t2 p))
val coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit{t2 == t1}) : Tot (serializer (coerce_parser t2 p))
let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 727, "start_col": 0, "start_line": 719 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
t2: Type -> s: LowParse.Spec.Base.serializer p -> u467: u468: Prims.unit{t2 == t1} -> LowParse.Spec.Base.serializer (LowParse.Spec.Base.coerce_parser t2 p)
Prims.Tot
[ "total" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.serializer", "Prims.unit", "Prims.eq2", "LowParse.Spec.Base.coerce_parser" ]
[]
false
false
false
false
false
let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit{t2 == t1}) : Tot (serializer (coerce_parser t2 p)) =
s
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.serialize_ext
val serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes). parse p1 input == parse p2 input))) (ensures (fun _ -> True))
val serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes). parse p1 input == parse p2 input))) (ensures (fun _ -> True))
let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2)
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 28, "end_line": 741, "start_col": 0, "start_line": 729 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p1: LowParse.Spec.Base.parser k1 t1 -> s1: LowParse.Spec.Base.serializer p1 -> p2: LowParse.Spec.Base.parser k2 t2 -> Prims.Pure (LowParse.Spec.Base.serializer p2)
Prims.Pure
[]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.serializer", "LowParse.Spec.Base.bare_serializer", "Prims.unit", "LowParse.Spec.Base.serializer_correct_ext", "Prims.l_and", "Prims.eq2", "Prims.l_Forall", "LowParse.Bytes.bytes", "FStar.Pervasives.Native.option", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "Prims.l_True" ]
[]
false
false
false
false
false
let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes). parse p1 input == parse p2 input))) (ensures (fun _ -> True)) =
serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2)
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.glb
val glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2))
val glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2))
let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None }
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 3, "end_line": 519, "start_col": 0, "start_line": 483 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 16, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
k1: LowParse.Spec.Base.parser_kind -> k2: LowParse.Spec.Base.parser_kind -> Prims.Pure LowParse.Spec.Base.parser_kind
Prims.Pure
[]
[]
[ "LowParse.Spec.Base.parser_kind", "FStar.Pervasives.Native.Mktuple2", "FStar.Pervasives.Native.option", "LowParse.Spec.Base.parser_kind_metadata_some", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_metadata", "LowParse.Spec.Base.Mkparser_kind'", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_low", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_high", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind", "FStar.Pervasives.Native.Some", "LowParse.Spec.Base.ParserKindMetadataFail", "FStar.Pervasives.Native.None", "LowParse.Spec.Base.parser_kind_metadata_t", "FStar.Pervasives.Native.tuple2", "Prims.op_LessThan", "Prims.bool", "Prims.nat", "LowParse.Spec.Base.bool_and", "LowParse.Spec.Base.is_some", "LowParse.Spec.Base.some_v", "Prims.op_Equality", "LowParse.Spec.Base.parser_subkind", "Prims.l_True", "Prims.l_and", "LowParse.Spec.Base.is_weaker_than" ]
[]
false
false
false
false
false
let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2)) =
match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with | Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None) } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = (if (is_some k1.parser_kind_high) `bool_and` (is_some k2.parser_kind_high) then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None }
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.serializer_unique
val serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x)
val serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x)
let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 42, "end_line": 801, "start_col": 0, "start_line": 790 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p: LowParse.Spec.Base.parser k t -> s1: LowParse.Spec.Base.serializer p -> s2: LowParse.Spec.Base.serializer p -> x: t -> FStar.Pervasives.Lemma (ensures s1 x == s2 x)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.serializer", "LowParse.Spec.Base.serializer_correct_implies_complete", "FStar.Pervasives.Native.option", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "Prims.unit", "Prims.l_True", "Prims.squash", "Prims.eq2", "LowParse.Bytes.bytes", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) =
let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.parsed_data_is_serialize
val parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures (let Some (y, consumed) = parse p x in ((serialize s y) `Seq.append` (Seq.slice x consumed (Seq.length x))) `Seq.equal` x))
val parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures (let Some (y, consumed) = parse p x in ((serialize s y) `Seq.append` (Seq.slice x consumed (Seq.length x))) `Seq.equal` x))
let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 37, "end_line": 788, "start_col": 0, "start_line": 775 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = ()
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: LowParse.Spec.Base.serializer p -> x: LowParse.Bytes.bytes -> FStar.Pervasives.Lemma (requires Some? (LowParse.Spec.Base.parse p x)) (ensures (let _ = LowParse.Spec.Base.parse p x in (let FStar.Pervasives.Native.Some #_ (FStar.Pervasives.Native.Mktuple2 #_ #_ y consumed) = _ in FStar.Seq.Base.equal (FStar.Seq.Base.append (LowParse.Spec.Base.serialize s y) (FStar.Seq.Base.slice x consumed (FStar.Seq.Base.length x))) x) <: Type0))
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.serializer", "LowParse.Bytes.bytes", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse_injective", "LowParse.Spec.Base.serialize", "Prims.unit", "FStar.Pervasives.Native.option", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.parse", "Prims.b2t", "FStar.Pervasives.Native.uu___is_Some", "Prims.squash", "FStar.Seq.Base.equal", "LowParse.Bytes.byte", "FStar.Seq.Base.append", "FStar.Seq.Base.slice", "FStar.Seq.Base.length", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
false
false
true
false
false
let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures (let Some (y, consumed) = parse p x in ((serialize s y) `Seq.append` (Seq.slice x consumed (Seq.length x))) `Seq.equal` x)) =
let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.serialize_ext'
val serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True))
val serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True))
let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 24, "end_line": 754, "start_col": 0, "start_line": 743 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p1: LowParse.Spec.Base.parser k1 t1 -> s1: LowParse.Spec.Base.serializer p1 -> p2: LowParse.Spec.Base.parser k2 t2 -> Prims.Pure (LowParse.Spec.Base.serializer p2)
Prims.Pure
[]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.serializer", "LowParse.Spec.Base.serialize_ext", "Prims.l_and", "Prims.eq2", "Prims.l_True" ]
[]
false
false
false
false
false
let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) =
serialize_ext p1 s1 p2
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.serialize_strong_prefix
val serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires (k.parser_kind_subkind == Some ParserStrong /\ (serialize s x1) `Seq.append` q1 == (serialize s x2) `Seq.append` q2)) (ensures (x1 == x2 /\ q1 == q2))
val serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires (k.parser_kind_subkind == Some ParserStrong /\ (serialize s x1) `Seq.append` q1 == (serialize s x2) `Seq.append` q2)) (ensures (x1 == x2 /\ q1 == q2))
let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 62, "end_line": 899, "start_col": 0, "start_line": 884 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)]
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: LowParse.Spec.Base.serializer p -> x1: t -> x2: t -> q1: LowParse.Bytes.bytes -> q2: LowParse.Bytes.bytes -> FStar.Pervasives.Lemma (requires Mkparser_kind'?.parser_kind_subkind k == FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong /\ FStar.Seq.Base.append (LowParse.Spec.Base.serialize s x1) q1 == FStar.Seq.Base.append (LowParse.Spec.Base.serialize s x2) q2) (ensures x1 == x2 /\ q1 == q2)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.serializer", "LowParse.Bytes.bytes", "FStar.Seq.Properties.lemma_append_inj", "LowParse.Bytes.byte", "LowParse.Spec.Base.serialize", "Prims.unit", "LowParse.Spec.Base.parse_strong_prefix", "FStar.Seq.Base.append", "Prims.l_and", "Prims.eq2", "FStar.Pervasives.Native.option", "LowParse.Spec.Base.parser_subkind", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind", "FStar.Pervasives.Native.Some", "LowParse.Spec.Base.ParserStrong", "FStar.Seq.Base.seq", "Prims.squash", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires (k.parser_kind_subkind == Some ParserStrong /\ (serialize s x1) `Seq.append` q1 == (serialize s x2) `Seq.append` q2)) (ensures (x1 == x2 /\ q1 == q2)) =
parse_strong_prefix p (serialize s x1) ((serialize s x1) `Seq.append` q1); parse_strong_prefix p (serialize s x2) ((serialize s x2) `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq
val seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Pure (s_: Seq.seq t {Seq.length s_ == Seq.length s}) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True))
val seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Pure (s_: Seq.seq t {Seq.length s_ == Seq.length s}) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True))
let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s)))
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 68, "end_line": 932, "start_col": 0, "start_line": 922 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> i: Prims.nat -> s': FStar.Seq.Base.seq t -> Prims.Pure (s_: FStar.Seq.Base.seq t {FStar.Seq.Base.length s_ == FStar.Seq.Base.length s})
Prims.Pure
[]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "FStar.Seq.Base.append", "FStar.Seq.Base.slice", "Prims.op_Addition", "FStar.Seq.Base.length", "Prims.eq2", "Prims.b2t", "Prims.op_LessThanOrEqual", "Prims.l_True" ]
[]
false
false
false
false
false
let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Pure (s_: Seq.seq t {Seq.length s_ == Seq.length s}) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) =
Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s)))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.serializer_parser_unique
val serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires (is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s) ) (ensures (p1 x == p2 x))
val serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires (is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s) ) (ensures (p1 x == p2 x))
let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else ()
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 9, "end_line": 857, "start_col": 0, "start_line": 835 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x ))
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p1: LowParse.Spec.Base.parser k1 t -> p2: LowParse.Spec.Base.parser k2 t -> s: LowParse.Spec.Base.bare_serializer t -> x: LowParse.Bytes.bytes -> FStar.Pervasives.Lemma (requires LowParse.Spec.Base.is_strong p1 /\ LowParse.Spec.Base.is_strong p2 /\ LowParse.Spec.Base.serializer_correct p1 s /\ LowParse.Spec.Base.serializer_correct p2 s) (ensures p1 x == p2 x)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.bare_serializer", "LowParse.Bytes.bytes", "FStar.Pervasives.Native.uu___is_Some", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.serializer_parser_unique'", "Prims.bool", "Prims.unit", "Prims.l_and", "Prims.b2t", "LowParse.Spec.Base.is_strong", "LowParse.Spec.Base.serializer_correct", "Prims.squash", "Prims.eq2", "FStar.Pervasives.Native.option", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
false
false
true
false
false
let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires (is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s) ) (ensures (p1 x == p2 x)) =
if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.serializer_injective
val serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2))
val serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2))
let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2))
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 43, "end_line": 813, "start_col": 0, "start_line": 803 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
p: LowParse.Spec.Base.parser k t -> s: LowParse.Spec.Base.serializer p -> x1: t -> x2: t -> FStar.Pervasives.Lemma (requires s x1 == s x2) (ensures x1 == x2)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.serializer", "Prims._assert", "Prims.eq2", "FStar.Pervasives.Native.option", "FStar.Pervasives.Native.tuple2", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse", "Prims.unit", "LowParse.Bytes.bytes", "Prims.squash", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) =
assert (parse p (s x1) == parse p (s x2))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq_slice'
val seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i)))
val seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i)))
let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i)
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 78, "end_line": 966, "start_col": 0, "start_line": 956 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s')
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> i: Prims.nat -> s': FStar.Seq.Base.seq t -> j1: Prims.nat -> j2: Prims.nat -> FStar.Pervasives.Lemma (requires i + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + FStar.Seq.Base.length s') (ensures FStar.Seq.Base.slice (LowParse.Spec.Base.seq_upd_seq s i s') j1 j2 == FStar.Seq.Base.slice s' (j1 - i) (j2 - i))
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "FStar.Seq.Properties.slice_slice", "LowParse.Spec.Base.seq_upd_seq", "Prims.op_Addition", "FStar.Seq.Base.length", "Prims.op_Subtraction", "Prims.unit", "LowParse.Spec.Base.seq_upd_seq_slice", "Prims.l_and", "Prims.b2t", "Prims.op_LessThanOrEqual", "Prims.squash", "Prims.eq2", "FStar.Seq.Base.slice", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) =
seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i)
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq_seq_upd
val seq_upd_seq_seq_upd (#t: Type) (s: Seq.seq t) (i: nat) (x: t) : Lemma (requires (i < Seq.length s)) (ensures (Seq.upd s i x == seq_upd_seq s i (Seq.create 1 x)))
val seq_upd_seq_seq_upd (#t: Type) (s: Seq.seq t) (i: nat) (x: t) : Lemma (requires (i < Seq.length s)) (ensures (Seq.upd s i x == seq_upd_seq s i (Seq.create 1 x)))
let seq_upd_seq_seq_upd (#t: Type) (s: Seq.seq t) (i: nat) (x: t) : Lemma (requires (i < Seq.length s)) (ensures (Seq.upd s i x == seq_upd_seq s i (Seq.create 1 x))) = assert (Seq.upd s i x `Seq.equal` seq_upd_seq s i (Seq.create 1 x))
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 69, "end_line": 1103, "start_col": 0, "start_line": 1095 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i) let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2 let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) = assert (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) `Seq.equal` Seq.slice s (i + Seq.length s') (Seq.length s)) let seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_right s i s'; Seq.slice_slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) (j1 - (i + Seq.length s')) (j2 - (i + Seq.length s')) let seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s)) = assert (seq_upd_seq s i s' `Seq.equal` s) let seq_upd_seq_slice_idem (#t: Type) (s: Seq.seq t) (lo hi: nat) : Lemma (requires (lo <= hi /\ hi <= Seq.length s)) (ensures (seq_upd_seq s lo (Seq.slice s lo hi) == s)) = assert (seq_upd_seq s lo (Seq.slice s lo hi) `Seq.equal` s) let seq_upd_seq_left (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s 0 s' == Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s)))) = assert (seq_upd_seq s 0 s' `Seq.equal` Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s))) let seq_upd_seq_right (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s (Seq.length s - Seq.length s') s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s')) = assert (seq_upd_seq s (Seq.length s - Seq.length s') s' `Seq.equal` Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s') let seq_upd_seq_right_to_left (#t: Type) (s1: Seq.seq t) (i1: nat) (s2: Seq.seq t) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 + Seq.length s2 <= Seq.length s1 /\ i2 + Seq.length s3 <= Seq.length s2)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) == seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) `Seq.equal` seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3) let seq_upd_seq_seq_upd_seq_slice (#t: Type) (s1: Seq.seq t) (i1: nat) (hi: nat) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 <= hi /\ hi <= Seq.length s1 /\ i1 + i2 + Seq.length s3 <= hi)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) == seq_upd_seq s1 (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) `Seq.equal` seq_upd_seq s1 (i1 + i2) s3) let seq_upd_seq_disj_comm (#t: Type) (s: Seq.seq t) (i1: nat) (s1: Seq.seq t) (i2: nat) (s2: Seq.seq t) : Lemma (requires ( i1 + Seq.length s1 <= Seq.length s /\ i2 + Seq.length s2 <= Seq.length s /\ (i1 + Seq.length s1 <= i2 \/ i2 + Seq.length s2 <= i1) )) (ensures ( seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 == seq_upd_seq (seq_upd_seq s i2 s2) i1 s1 )) = assert (seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 `Seq.equal` seq_upd_seq (seq_upd_seq s i2 s2) i1 s1)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> i: Prims.nat -> x: t -> FStar.Pervasives.Lemma (requires i < FStar.Seq.Base.length s) (ensures FStar.Seq.Base.upd s i x == LowParse.Spec.Base.seq_upd_seq s i (FStar.Seq.Base.create 1 x))
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "Prims._assert", "FStar.Seq.Base.equal", "FStar.Seq.Base.upd", "LowParse.Spec.Base.seq_upd_seq", "FStar.Seq.Base.create", "Prims.unit", "Prims.b2t", "Prims.op_LessThan", "FStar.Seq.Base.length", "Prims.squash", "Prims.eq2", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_seq_seq_upd (#t: Type) (s: Seq.seq t) (i: nat) (x: t) : Lemma (requires (i < Seq.length s)) (ensures (Seq.upd s i x == seq_upd_seq s i (Seq.create 1 x))) =
assert ((Seq.upd s i x) `Seq.equal` (seq_upd_seq s i (Seq.create 1 x)))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq_right
val seq_upd_seq_right (#t: Type) (s s': Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s (Seq.length s - Seq.length s') s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s'))
val seq_upd_seq_right (#t: Type) (s s': Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s (Seq.length s - Seq.length s') s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s'))
let seq_upd_seq_right (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s (Seq.length s - Seq.length s') s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s')) = assert (seq_upd_seq s (Seq.length s - Seq.length s') s' `Seq.equal` Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s')
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 131, "end_line": 1047, "start_col": 0, "start_line": 1040 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i) let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2 let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) = assert (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) `Seq.equal` Seq.slice s (i + Seq.length s') (Seq.length s)) let seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_right s i s'; Seq.slice_slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) (j1 - (i + Seq.length s')) (j2 - (i + Seq.length s')) let seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s)) = assert (seq_upd_seq s i s' `Seq.equal` s) let seq_upd_seq_slice_idem (#t: Type) (s: Seq.seq t) (lo hi: nat) : Lemma (requires (lo <= hi /\ hi <= Seq.length s)) (ensures (seq_upd_seq s lo (Seq.slice s lo hi) == s)) = assert (seq_upd_seq s lo (Seq.slice s lo hi) `Seq.equal` s) let seq_upd_seq_left (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s 0 s' == Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s)))) = assert (seq_upd_seq s 0 s' `Seq.equal` Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s)))
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> s': FStar.Seq.Base.seq t -> FStar.Pervasives.Lemma (requires FStar.Seq.Base.length s' <= FStar.Seq.Base.length s) (ensures LowParse.Spec.Base.seq_upd_seq s (FStar.Seq.Base.length s - FStar.Seq.Base.length s') s' == FStar.Seq.Base.append (FStar.Seq.Base.slice s 0 (FStar.Seq.Base.length s - FStar.Seq.Base.length s')) s')
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims._assert", "FStar.Seq.Base.equal", "LowParse.Spec.Base.seq_upd_seq", "Prims.op_Subtraction", "FStar.Seq.Base.length", "FStar.Seq.Base.append", "FStar.Seq.Base.slice", "Prims.unit", "Prims.b2t", "Prims.op_LessThanOrEqual", "Prims.squash", "Prims.eq2", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_seq_right (#t: Type) (s s': Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s (Seq.length s - Seq.length s') s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s')) =
assert ((seq_upd_seq s (Seq.length s - Seq.length s') s') `Seq.equal` (Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s'))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq_slice
val seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s'))
val seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s'))
let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s')
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 78, "end_line": 954, "start_col": 0, "start_line": 946 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = ()
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> i: Prims.nat -> s': FStar.Seq.Base.seq t -> FStar.Pervasives.Lemma (requires i + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s) (ensures FStar.Seq.Base.slice (LowParse.Spec.Base.seq_upd_seq s i s') i (i + FStar.Seq.Base.length s') == s')
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "Prims._assert", "FStar.Seq.Base.equal", "FStar.Seq.Base.slice", "LowParse.Spec.Base.seq_upd_seq", "Prims.op_Addition", "FStar.Seq.Base.length", "Prims.unit", "Prims.b2t", "Prims.op_LessThanOrEqual", "Prims.squash", "Prims.eq2", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) =
assert ((Seq.slice (seq_upd_seq s i s') i (i + Seq.length s')) `Seq.equal` s')
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq_slice_left
val seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i))
val seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i))
let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i)
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 73, "end_line": 976, "start_col": 0, "start_line": 968 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> i: Prims.nat -> s': FStar.Seq.Base.seq t -> FStar.Pervasives.Lemma (requires i + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s) (ensures FStar.Seq.Base.slice (LowParse.Spec.Base.seq_upd_seq s i s') 0 i == FStar.Seq.Base.slice s 0 i)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "Prims._assert", "FStar.Seq.Base.equal", "FStar.Seq.Base.slice", "LowParse.Spec.Base.seq_upd_seq", "Prims.unit", "Prims.b2t", "Prims.op_LessThanOrEqual", "Prims.op_Addition", "FStar.Seq.Base.length", "Prims.squash", "Prims.eq2", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) =
assert ((Seq.slice (seq_upd_seq s i s') 0 i) `Seq.equal` (Seq.slice s 0 i))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.parse_truncate
val parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p {k.parser_kind_subkind == Some ParserStrong}) (x: t) (n: nat) : Lemma (requires (n < Seq.length (serialize s x))) (ensures (parse p (Seq.slice (serialize s x) 0 n) == None))
val parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p {k.parser_kind_subkind == Some ParserStrong}) (x: t) (n: nat) : Lemma (requires (n < Seq.length (serialize s x))) (ensures (parse p (Seq.slice (serialize s x) 0 n) == None))
let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 33, "end_line": 920, "start_col": 0, "start_line": 901 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: LowParse.Spec.Base.serializer p { Mkparser_kind'?.parser_kind_subkind k == FStar.Pervasives.Native.Some LowParse.Spec.Base.ParserStrong } -> x: t -> n: Prims.nat -> FStar.Pervasives.Lemma (requires n < FStar.Seq.Base.length (LowParse.Spec.Base.serialize s x)) (ensures LowParse.Spec.Base.parse p (FStar.Seq.Base.slice (LowParse.Spec.Base.serialize s x) 0 n) == FStar.Pervasives.Native.None)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "LowParse.Spec.Base.parser_kind", "LowParse.Spec.Base.parser", "LowParse.Spec.Base.serializer", "Prims.eq2", "FStar.Pervasives.Native.option", "LowParse.Spec.Base.parser_subkind", "LowParse.Spec.Base.__proj__Mkparser_kind'__item__parser_kind_subkind", "FStar.Pervasives.Native.Some", "LowParse.Spec.Base.ParserStrong", "Prims.nat", "LowParse.Spec.Base.parse", "LowParse.Spec.Base.consumed_length", "LowParse.Spec.Base.parse_strong_prefix", "Prims.unit", "FStar.Seq.Base.seq", "LowParse.Bytes.byte", "FStar.Seq.Base.slice", "LowParse.Bytes.bytes", "LowParse.Spec.Base.serialize", "Prims.b2t", "Prims.op_LessThan", "FStar.Seq.Base.length", "Prims.squash", "FStar.Pervasives.Native.tuple2", "FStar.Pervasives.Native.None", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
false
false
true
false
false
let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p {k.parser_kind_subkind == Some ParserStrong}) (x: t) (n: nat) : Lemma (requires (n < Seq.length (serialize s x))) (ensures (parse p (Seq.slice (serialize s x) 0 n) == None)) =
let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq_slice_left'
val seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2))
val seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2))
let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 48, "end_line": 988, "start_col": 0, "start_line": 978 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> i: Prims.nat -> s': FStar.Seq.Base.seq t -> j1: Prims.nat -> j2: Prims.nat -> FStar.Pervasives.Lemma (requires i + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s /\ j1 <= j2 /\ j2 <= i) (ensures FStar.Seq.Base.slice (LowParse.Spec.Base.seq_upd_seq s i s') j1 j2 == FStar.Seq.Base.slice s j1 j2)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "FStar.Seq.Properties.slice_slice", "LowParse.Spec.Base.seq_upd_seq", "Prims.unit", "LowParse.Spec.Base.seq_upd_seq_slice_left", "Prims.l_and", "Prims.b2t", "Prims.op_LessThanOrEqual", "Prims.op_Addition", "FStar.Seq.Base.length", "Prims.squash", "Prims.eq2", "FStar.Seq.Base.slice", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) =
seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq_empty
val seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s))
val seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s))
let seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s)) = assert (seq_upd_seq s i s' `Seq.equal` s)
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 43, "end_line": 1020, "start_col": 0, "start_line": 1012 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i) let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2 let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) = assert (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) `Seq.equal` Seq.slice s (i + Seq.length s') (Seq.length s)) let seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_right s i s'; Seq.slice_slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) (j1 - (i + Seq.length s')) (j2 - (i + Seq.length s'))
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> i: Prims.nat -> s': FStar.Seq.Base.seq t -> FStar.Pervasives.Lemma (requires i <= FStar.Seq.Base.length s /\ FStar.Seq.Base.length s' == 0) (ensures LowParse.Spec.Base.seq_upd_seq s i s' == s)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "Prims._assert", "FStar.Seq.Base.equal", "LowParse.Spec.Base.seq_upd_seq", "Prims.unit", "Prims.l_and", "Prims.b2t", "Prims.op_LessThanOrEqual", "FStar.Seq.Base.length", "Prims.eq2", "Prims.int", "Prims.squash", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s)) =
assert ((seq_upd_seq s i s') `Seq.equal` s)
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_bw_seq
val seq_upd_bw_seq (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Pure (s_: Seq.seq t {Seq.length s_ == Seq.length s}) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True))
val seq_upd_bw_seq (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Pure (s_: Seq.seq t {Seq.length s_ == Seq.length s}) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True))
let seq_upd_bw_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = seq_upd_seq s (Seq.length s - i - Seq.length s') s'
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 53, "end_line": 1141, "start_col": 0, "start_line": 1133 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i) let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2 let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) = assert (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) `Seq.equal` Seq.slice s (i + Seq.length s') (Seq.length s)) let seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_right s i s'; Seq.slice_slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) (j1 - (i + Seq.length s')) (j2 - (i + Seq.length s')) let seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s)) = assert (seq_upd_seq s i s' `Seq.equal` s) let seq_upd_seq_slice_idem (#t: Type) (s: Seq.seq t) (lo hi: nat) : Lemma (requires (lo <= hi /\ hi <= Seq.length s)) (ensures (seq_upd_seq s lo (Seq.slice s lo hi) == s)) = assert (seq_upd_seq s lo (Seq.slice s lo hi) `Seq.equal` s) let seq_upd_seq_left (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s 0 s' == Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s)))) = assert (seq_upd_seq s 0 s' `Seq.equal` Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s))) let seq_upd_seq_right (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s (Seq.length s - Seq.length s') s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s')) = assert (seq_upd_seq s (Seq.length s - Seq.length s') s' `Seq.equal` Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s') let seq_upd_seq_right_to_left (#t: Type) (s1: Seq.seq t) (i1: nat) (s2: Seq.seq t) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 + Seq.length s2 <= Seq.length s1 /\ i2 + Seq.length s3 <= Seq.length s2)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) == seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) `Seq.equal` seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3) let seq_upd_seq_seq_upd_seq_slice (#t: Type) (s1: Seq.seq t) (i1: nat) (hi: nat) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 <= hi /\ hi <= Seq.length s1 /\ i1 + i2 + Seq.length s3 <= hi)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) == seq_upd_seq s1 (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) `Seq.equal` seq_upd_seq s1 (i1 + i2) s3) let seq_upd_seq_disj_comm (#t: Type) (s: Seq.seq t) (i1: nat) (s1: Seq.seq t) (i2: nat) (s2: Seq.seq t) : Lemma (requires ( i1 + Seq.length s1 <= Seq.length s /\ i2 + Seq.length s2 <= Seq.length s /\ (i1 + Seq.length s1 <= i2 \/ i2 + Seq.length s2 <= i1) )) (ensures ( seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 == seq_upd_seq (seq_upd_seq s i2 s2) i1 s1 )) = assert (seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 `Seq.equal` seq_upd_seq (seq_upd_seq s i2 s2) i1 s1) let seq_upd_seq_seq_upd (#t: Type) (s: Seq.seq t) (i: nat) (x: t) : Lemma (requires (i < Seq.length s)) (ensures (Seq.upd s i x == seq_upd_seq s i (Seq.create 1 x))) = assert (Seq.upd s i x `Seq.equal` seq_upd_seq s i (Seq.create 1 x)) let seq_append_seq_upd_seq_l (#t: Type) (s: Seq.seq t) (i': nat) (s' : Seq.seq t) (sl : Seq.seq t) : Lemma (requires (i' + Seq.length s' <= Seq.length s)) (ensures ( Seq.length sl + i' <= Seq.length (sl `Seq.append` s) /\ sl `Seq.append` seq_upd_seq s i' s' == seq_upd_seq (sl `Seq.append` s) (Seq.length sl + i') s' )) = assert (sl `Seq.append` seq_upd_seq s i' s' `Seq.equal` seq_upd_seq (sl `Seq.append` s) (Seq.length sl + i') s') let seq_append_seq_upd_seq_r (#t: Type) (s: Seq.seq t) (i': nat) (s' : Seq.seq t) (sr : Seq.seq t) : Lemma (requires (i' + Seq.length s' <= Seq.length s)) (ensures ( i' <= Seq.length (s `Seq.append` sr) /\ seq_upd_seq s i' s' `Seq.append` sr == seq_upd_seq (s `Seq.append` sr) i' s' )) = assert ((seq_upd_seq s i' s' `Seq.append` sr) `Seq.equal` seq_upd_seq (s `Seq.append` sr) i' s')
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> i: Prims.nat -> s': FStar.Seq.Base.seq t -> Prims.Pure (s_: FStar.Seq.Base.seq t {FStar.Seq.Base.length s_ == FStar.Seq.Base.length s})
Prims.Pure
[]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "LowParse.Spec.Base.seq_upd_seq", "Prims.op_Subtraction", "FStar.Seq.Base.length", "Prims.eq2", "Prims.b2t", "Prims.op_LessThanOrEqual", "Prims.op_Addition", "Prims.l_True" ]
[]
false
false
false
false
false
let seq_upd_bw_seq (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Pure (s_: Seq.seq t {Seq.length s_ == Seq.length s}) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) =
seq_upd_seq s (Seq.length s - i - Seq.length s') s'
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq_slice_right
val seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s)))
val seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s)))
let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) = assert (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) `Seq.equal` Seq.slice s (i + Seq.length s') (Seq.length s))
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 135, "end_line": 998, "start_col": 0, "start_line": 990 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i) let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> i: Prims.nat -> s': FStar.Seq.Base.seq t -> FStar.Pervasives.Lemma (requires i + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s) (ensures FStar.Seq.Base.slice (LowParse.Spec.Base.seq_upd_seq s i s') (i + FStar.Seq.Base.length s') (FStar.Seq.Base.length s) == FStar.Seq.Base.slice s (i + FStar.Seq.Base.length s') (FStar.Seq.Base.length s))
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "Prims._assert", "FStar.Seq.Base.equal", "FStar.Seq.Base.slice", "LowParse.Spec.Base.seq_upd_seq", "Prims.op_Addition", "FStar.Seq.Base.length", "Prims.unit", "Prims.b2t", "Prims.op_LessThanOrEqual", "Prims.squash", "Prims.eq2", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) =
assert ((Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s)) `Seq.equal` (Seq.slice s (i + Seq.length s') (Seq.length s)))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq_slice_idem
val seq_upd_seq_slice_idem (#t: Type) (s: Seq.seq t) (lo hi: nat) : Lemma (requires (lo <= hi /\ hi <= Seq.length s)) (ensures (seq_upd_seq s lo (Seq.slice s lo hi) == s))
val seq_upd_seq_slice_idem (#t: Type) (s: Seq.seq t) (lo hi: nat) : Lemma (requires (lo <= hi /\ hi <= Seq.length s)) (ensures (seq_upd_seq s lo (Seq.slice s lo hi) == s))
let seq_upd_seq_slice_idem (#t: Type) (s: Seq.seq t) (lo hi: nat) : Lemma (requires (lo <= hi /\ hi <= Seq.length s)) (ensures (seq_upd_seq s lo (Seq.slice s lo hi) == s)) = assert (seq_upd_seq s lo (Seq.slice s lo hi) `Seq.equal` s)
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 61, "end_line": 1029, "start_col": 0, "start_line": 1022 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i) let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2 let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) = assert (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) `Seq.equal` Seq.slice s (i + Seq.length s') (Seq.length s)) let seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_right s i s'; Seq.slice_slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) (j1 - (i + Seq.length s')) (j2 - (i + Seq.length s')) let seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s)) = assert (seq_upd_seq s i s' `Seq.equal` s)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> lo: Prims.nat -> hi: Prims.nat -> FStar.Pervasives.Lemma (requires lo <= hi /\ hi <= FStar.Seq.Base.length s) (ensures LowParse.Spec.Base.seq_upd_seq s lo (FStar.Seq.Base.slice s lo hi) == s)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "Prims._assert", "FStar.Seq.Base.equal", "LowParse.Spec.Base.seq_upd_seq", "FStar.Seq.Base.slice", "Prims.unit", "Prims.l_and", "Prims.b2t", "Prims.op_LessThanOrEqual", "FStar.Seq.Base.length", "Prims.squash", "Prims.eq2", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_seq_slice_idem (#t: Type) (s: Seq.seq t) (lo hi: nat) : Lemma (requires (lo <= hi /\ hi <= Seq.length s)) (ensures (seq_upd_seq s lo (Seq.slice s lo hi) == s)) =
assert ((seq_upd_seq s lo (Seq.slice s lo hi)) `Seq.equal` s)
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq_left
val seq_upd_seq_left (#t: Type) (s s': Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s 0 s' == Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s))))
val seq_upd_seq_left (#t: Type) (s s': Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s 0 s' == Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s))))
let seq_upd_seq_left (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s 0 s' == Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s)))) = assert (seq_upd_seq s 0 s' `Seq.equal` Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s)))
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 100, "end_line": 1038, "start_col": 0, "start_line": 1031 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i) let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2 let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) = assert (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) `Seq.equal` Seq.slice s (i + Seq.length s') (Seq.length s)) let seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_right s i s'; Seq.slice_slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) (j1 - (i + Seq.length s')) (j2 - (i + Seq.length s')) let seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s)) = assert (seq_upd_seq s i s' `Seq.equal` s) let seq_upd_seq_slice_idem (#t: Type) (s: Seq.seq t) (lo hi: nat) : Lemma (requires (lo <= hi /\ hi <= Seq.length s)) (ensures (seq_upd_seq s lo (Seq.slice s lo hi) == s)) = assert (seq_upd_seq s lo (Seq.slice s lo hi) `Seq.equal` s)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> s': FStar.Seq.Base.seq t -> FStar.Pervasives.Lemma (requires FStar.Seq.Base.length s' <= FStar.Seq.Base.length s) (ensures LowParse.Spec.Base.seq_upd_seq s 0 s' == FStar.Seq.Base.append s' (FStar.Seq.Base.slice s (FStar.Seq.Base.length s') (FStar.Seq.Base.length s)))
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims._assert", "FStar.Seq.Base.equal", "LowParse.Spec.Base.seq_upd_seq", "FStar.Seq.Base.append", "FStar.Seq.Base.slice", "FStar.Seq.Base.length", "Prims.unit", "Prims.b2t", "Prims.op_LessThanOrEqual", "Prims.squash", "Prims.eq2", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_seq_left (#t: Type) (s s': Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s 0 s' == Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s)))) =
assert ((seq_upd_seq s 0 s') `Seq.equal` (Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s))))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq_slice_right'
val seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2))
val seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2))
let seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_right s i s'; Seq.slice_slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) (j1 - (i + Seq.length s')) (j2 - (i + Seq.length s'))
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 127, "end_line": 1010, "start_col": 0, "start_line": 1000 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i) let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2 let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) = assert (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) `Seq.equal` Seq.slice s (i + Seq.length s') (Seq.length s))
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> i: Prims.nat -> s': FStar.Seq.Base.seq t -> j1: Prims.nat -> j2: Prims.nat -> FStar.Pervasives.Lemma (requires i + FStar.Seq.Base.length s' <= j1 /\ j1 <= j2 /\ j2 <= FStar.Seq.Base.length s) (ensures FStar.Seq.Base.slice (LowParse.Spec.Base.seq_upd_seq s i s') j1 j2 == FStar.Seq.Base.slice s j1 j2)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "FStar.Seq.Properties.slice_slice", "LowParse.Spec.Base.seq_upd_seq", "Prims.op_Addition", "FStar.Seq.Base.length", "Prims.op_Subtraction", "Prims.unit", "LowParse.Spec.Base.seq_upd_seq_slice_right", "Prims.l_and", "Prims.b2t", "Prims.op_LessThanOrEqual", "Prims.squash", "Prims.eq2", "FStar.Seq.Base.slice", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s': Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) =
seq_upd_seq_slice_right s i s'; Seq.slice_slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) (j1 - (i + Seq.length s')) (j2 - (i + Seq.length s'))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq_right_to_left
val seq_upd_seq_right_to_left (#t: Type) (s1: Seq.seq t) (i1: nat) (s2: Seq.seq t) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 + Seq.length s2 <= Seq.length s1 /\ i2 + Seq.length s3 <= Seq.length s2)) (ensures (seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) == seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3 ))
val seq_upd_seq_right_to_left (#t: Type) (s1: Seq.seq t) (i1: nat) (s2: Seq.seq t) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 + Seq.length s2 <= Seq.length s1 /\ i2 + Seq.length s3 <= Seq.length s2)) (ensures (seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) == seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3 ))
let seq_upd_seq_right_to_left (#t: Type) (s1: Seq.seq t) (i1: nat) (s2: Seq.seq t) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 + Seq.length s2 <= Seq.length s1 /\ i2 + Seq.length s3 <= Seq.length s2)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) == seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) `Seq.equal` seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3)
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 111, "end_line": 1061, "start_col": 0, "start_line": 1049 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i) let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2 let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) = assert (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) `Seq.equal` Seq.slice s (i + Seq.length s') (Seq.length s)) let seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_right s i s'; Seq.slice_slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) (j1 - (i + Seq.length s')) (j2 - (i + Seq.length s')) let seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s)) = assert (seq_upd_seq s i s' `Seq.equal` s) let seq_upd_seq_slice_idem (#t: Type) (s: Seq.seq t) (lo hi: nat) : Lemma (requires (lo <= hi /\ hi <= Seq.length s)) (ensures (seq_upd_seq s lo (Seq.slice s lo hi) == s)) = assert (seq_upd_seq s lo (Seq.slice s lo hi) `Seq.equal` s) let seq_upd_seq_left (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s 0 s' == Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s)))) = assert (seq_upd_seq s 0 s' `Seq.equal` Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s))) let seq_upd_seq_right (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s (Seq.length s - Seq.length s') s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s')) = assert (seq_upd_seq s (Seq.length s - Seq.length s') s' `Seq.equal` Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s')
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s1: FStar.Seq.Base.seq t -> i1: Prims.nat -> s2: FStar.Seq.Base.seq t -> i2: Prims.nat -> s3: FStar.Seq.Base.seq t -> FStar.Pervasives.Lemma (requires i1 + FStar.Seq.Base.length s2 <= FStar.Seq.Base.length s1 /\ i2 + FStar.Seq.Base.length s3 <= FStar.Seq.Base.length s2) (ensures LowParse.Spec.Base.seq_upd_seq s1 i1 (LowParse.Spec.Base.seq_upd_seq s2 i2 s3) == LowParse.Spec.Base.seq_upd_seq (LowParse.Spec.Base.seq_upd_seq s1 i1 s2) (i1 + i2) s3)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "Prims._assert", "FStar.Seq.Base.equal", "LowParse.Spec.Base.seq_upd_seq", "Prims.op_Addition", "Prims.unit", "Prims.l_and", "Prims.b2t", "Prims.op_LessThanOrEqual", "FStar.Seq.Base.length", "Prims.squash", "Prims.eq2", "Prims.l_or", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_seq_right_to_left (#t: Type) (s1: Seq.seq t) (i1: nat) (s2: Seq.seq t) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 + Seq.length s2 <= Seq.length s1 /\ i2 + Seq.length s3 <= Seq.length s2)) (ensures (seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) == seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3 )) =
assert ((seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3)) `Seq.equal` (seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_append_seq_upd_seq_r
val seq_append_seq_upd_seq_r (#t: Type) (s: Seq.seq t) (i': nat) (s' sr: Seq.seq t) : Lemma (requires (i' + Seq.length s' <= Seq.length s)) (ensures (i' <= Seq.length (s `Seq.append` sr) /\ (seq_upd_seq s i' s') `Seq.append` sr == seq_upd_seq (s `Seq.append` sr) i' s'))
val seq_append_seq_upd_seq_r (#t: Type) (s: Seq.seq t) (i': nat) (s' sr: Seq.seq t) : Lemma (requires (i' + Seq.length s' <= Seq.length s)) (ensures (i' <= Seq.length (s `Seq.append` sr) /\ (seq_upd_seq s i' s') `Seq.append` sr == seq_upd_seq (s `Seq.append` sr) i' s'))
let seq_append_seq_upd_seq_r (#t: Type) (s: Seq.seq t) (i': nat) (s' : Seq.seq t) (sr : Seq.seq t) : Lemma (requires (i' + Seq.length s' <= Seq.length s)) (ensures ( i' <= Seq.length (s `Seq.append` sr) /\ seq_upd_seq s i' s' `Seq.append` sr == seq_upd_seq (s `Seq.append` sr) i' s' )) = assert ((seq_upd_seq s i' s' `Seq.append` sr) `Seq.equal` seq_upd_seq (s `Seq.append` sr) i' s')
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 98, "end_line": 1131, "start_col": 0, "start_line": 1119 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i) let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2 let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) = assert (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) `Seq.equal` Seq.slice s (i + Seq.length s') (Seq.length s)) let seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_right s i s'; Seq.slice_slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) (j1 - (i + Seq.length s')) (j2 - (i + Seq.length s')) let seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s)) = assert (seq_upd_seq s i s' `Seq.equal` s) let seq_upd_seq_slice_idem (#t: Type) (s: Seq.seq t) (lo hi: nat) : Lemma (requires (lo <= hi /\ hi <= Seq.length s)) (ensures (seq_upd_seq s lo (Seq.slice s lo hi) == s)) = assert (seq_upd_seq s lo (Seq.slice s lo hi) `Seq.equal` s) let seq_upd_seq_left (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s 0 s' == Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s)))) = assert (seq_upd_seq s 0 s' `Seq.equal` Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s))) let seq_upd_seq_right (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s (Seq.length s - Seq.length s') s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s')) = assert (seq_upd_seq s (Seq.length s - Seq.length s') s' `Seq.equal` Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s') let seq_upd_seq_right_to_left (#t: Type) (s1: Seq.seq t) (i1: nat) (s2: Seq.seq t) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 + Seq.length s2 <= Seq.length s1 /\ i2 + Seq.length s3 <= Seq.length s2)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) == seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) `Seq.equal` seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3) let seq_upd_seq_seq_upd_seq_slice (#t: Type) (s1: Seq.seq t) (i1: nat) (hi: nat) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 <= hi /\ hi <= Seq.length s1 /\ i1 + i2 + Seq.length s3 <= hi)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) == seq_upd_seq s1 (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) `Seq.equal` seq_upd_seq s1 (i1 + i2) s3) let seq_upd_seq_disj_comm (#t: Type) (s: Seq.seq t) (i1: nat) (s1: Seq.seq t) (i2: nat) (s2: Seq.seq t) : Lemma (requires ( i1 + Seq.length s1 <= Seq.length s /\ i2 + Seq.length s2 <= Seq.length s /\ (i1 + Seq.length s1 <= i2 \/ i2 + Seq.length s2 <= i1) )) (ensures ( seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 == seq_upd_seq (seq_upd_seq s i2 s2) i1 s1 )) = assert (seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 `Seq.equal` seq_upd_seq (seq_upd_seq s i2 s2) i1 s1) let seq_upd_seq_seq_upd (#t: Type) (s: Seq.seq t) (i: nat) (x: t) : Lemma (requires (i < Seq.length s)) (ensures (Seq.upd s i x == seq_upd_seq s i (Seq.create 1 x))) = assert (Seq.upd s i x `Seq.equal` seq_upd_seq s i (Seq.create 1 x)) let seq_append_seq_upd_seq_l (#t: Type) (s: Seq.seq t) (i': nat) (s' : Seq.seq t) (sl : Seq.seq t) : Lemma (requires (i' + Seq.length s' <= Seq.length s)) (ensures ( Seq.length sl + i' <= Seq.length (sl `Seq.append` s) /\ sl `Seq.append` seq_upd_seq s i' s' == seq_upd_seq (sl `Seq.append` s) (Seq.length sl + i') s' )) = assert (sl `Seq.append` seq_upd_seq s i' s' `Seq.equal` seq_upd_seq (sl `Seq.append` s) (Seq.length sl + i') s')
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> i': Prims.nat -> s': FStar.Seq.Base.seq t -> sr: FStar.Seq.Base.seq t -> FStar.Pervasives.Lemma (requires i' + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s) (ensures i' <= FStar.Seq.Base.length (FStar.Seq.Base.append s sr) /\ FStar.Seq.Base.append (LowParse.Spec.Base.seq_upd_seq s i' s') sr == LowParse.Spec.Base.seq_upd_seq (FStar.Seq.Base.append s sr) i' s')
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "Prims._assert", "FStar.Seq.Base.equal", "FStar.Seq.Base.append", "LowParse.Spec.Base.seq_upd_seq", "Prims.unit", "Prims.b2t", "Prims.op_LessThanOrEqual", "Prims.op_Addition", "FStar.Seq.Base.length", "Prims.squash", "Prims.l_and", "Prims.eq2", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_append_seq_upd_seq_r (#t: Type) (s: Seq.seq t) (i': nat) (s' sr: Seq.seq t) : Lemma (requires (i' + Seq.length s' <= Seq.length s)) (ensures (i' <= Seq.length (s `Seq.append` sr) /\ (seq_upd_seq s i' s') `Seq.append` sr == seq_upd_seq (s `Seq.append` sr) i' s')) =
assert (((seq_upd_seq s i' s') `Seq.append` sr) `Seq.equal` (seq_upd_seq (s `Seq.append` sr) i' s'))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq_seq_upd_seq_slice
val seq_upd_seq_seq_upd_seq_slice (#t: Type) (s1: Seq.seq t) (i1 hi i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 <= hi /\ hi <= Seq.length s1 /\ i1 + i2 + Seq.length s3 <= hi)) (ensures (seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) == seq_upd_seq s1 (i1 + i2) s3))
val seq_upd_seq_seq_upd_seq_slice (#t: Type) (s1: Seq.seq t) (i1 hi i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 <= hi /\ hi <= Seq.length s1 /\ i1 + i2 + Seq.length s3 <= hi)) (ensures (seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) == seq_upd_seq s1 (i1 + i2) s3))
let seq_upd_seq_seq_upd_seq_slice (#t: Type) (s1: Seq.seq t) (i1: nat) (hi: nat) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 <= hi /\ hi <= Seq.length s1 /\ i1 + i2 + Seq.length s3 <= hi)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) == seq_upd_seq s1 (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) `Seq.equal` seq_upd_seq s1 (i1 + i2) s3)
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 109, "end_line": 1075, "start_col": 0, "start_line": 1063 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i) let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2 let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) = assert (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) `Seq.equal` Seq.slice s (i + Seq.length s') (Seq.length s)) let seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_right s i s'; Seq.slice_slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) (j1 - (i + Seq.length s')) (j2 - (i + Seq.length s')) let seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s)) = assert (seq_upd_seq s i s' `Seq.equal` s) let seq_upd_seq_slice_idem (#t: Type) (s: Seq.seq t) (lo hi: nat) : Lemma (requires (lo <= hi /\ hi <= Seq.length s)) (ensures (seq_upd_seq s lo (Seq.slice s lo hi) == s)) = assert (seq_upd_seq s lo (Seq.slice s lo hi) `Seq.equal` s) let seq_upd_seq_left (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s 0 s' == Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s)))) = assert (seq_upd_seq s 0 s' `Seq.equal` Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s))) let seq_upd_seq_right (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s (Seq.length s - Seq.length s') s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s')) = assert (seq_upd_seq s (Seq.length s - Seq.length s') s' `Seq.equal` Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s') let seq_upd_seq_right_to_left (#t: Type) (s1: Seq.seq t) (i1: nat) (s2: Seq.seq t) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 + Seq.length s2 <= Seq.length s1 /\ i2 + Seq.length s3 <= Seq.length s2)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) == seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) `Seq.equal` seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s1: FStar.Seq.Base.seq t -> i1: Prims.nat -> hi: Prims.nat -> i2: Prims.nat -> s3: FStar.Seq.Base.seq t -> FStar.Pervasives.Lemma (requires i1 <= hi /\ hi <= FStar.Seq.Base.length s1 /\ i1 + i2 + FStar.Seq.Base.length s3 <= hi) (ensures LowParse.Spec.Base.seq_upd_seq s1 i1 (LowParse.Spec.Base.seq_upd_seq (FStar.Seq.Base.slice s1 i1 hi) i2 s3) == LowParse.Spec.Base.seq_upd_seq s1 (i1 + i2) s3)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "Prims._assert", "FStar.Seq.Base.equal", "LowParse.Spec.Base.seq_upd_seq", "FStar.Seq.Base.slice", "Prims.op_Addition", "Prims.unit", "Prims.l_and", "Prims.b2t", "Prims.op_LessThanOrEqual", "FStar.Seq.Base.length", "Prims.squash", "Prims.eq2", "Prims.l_or", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_seq_seq_upd_seq_slice (#t: Type) (s1: Seq.seq t) (i1 hi i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 <= hi /\ hi <= Seq.length s1 /\ i1 + i2 + Seq.length s3 <= hi)) (ensures (seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) == seq_upd_seq s1 (i1 + i2) s3)) =
assert ((seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3)) `Seq.equal` (seq_upd_seq s1 (i1 + i2) s3))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_seq_disj_comm
val seq_upd_seq_disj_comm (#t: Type) (s: Seq.seq t) (i1: nat) (s1: Seq.seq t) (i2: nat) (s2: Seq.seq t) : Lemma (requires (i1 + Seq.length s1 <= Seq.length s /\ i2 + Seq.length s2 <= Seq.length s /\ (i1 + Seq.length s1 <= i2 \/ i2 + Seq.length s2 <= i1))) (ensures (seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 == seq_upd_seq (seq_upd_seq s i2 s2) i1 s1))
val seq_upd_seq_disj_comm (#t: Type) (s: Seq.seq t) (i1: nat) (s1: Seq.seq t) (i2: nat) (s2: Seq.seq t) : Lemma (requires (i1 + Seq.length s1 <= Seq.length s /\ i2 + Seq.length s2 <= Seq.length s /\ (i1 + Seq.length s1 <= i2 \/ i2 + Seq.length s2 <= i1))) (ensures (seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 == seq_upd_seq (seq_upd_seq s i2 s2) i1 s1))
let seq_upd_seq_disj_comm (#t: Type) (s: Seq.seq t) (i1: nat) (s1: Seq.seq t) (i2: nat) (s2: Seq.seq t) : Lemma (requires ( i1 + Seq.length s1 <= Seq.length s /\ i2 + Seq.length s2 <= Seq.length s /\ (i1 + Seq.length s1 <= i2 \/ i2 + Seq.length s2 <= i1) )) (ensures ( seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 == seq_upd_seq (seq_upd_seq s i2 s2) i1 s1 )) = assert (seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 `Seq.equal` seq_upd_seq (seq_upd_seq s i2 s2) i1 s1)
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 102, "end_line": 1093, "start_col": 0, "start_line": 1077 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i) let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2 let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) = assert (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) `Seq.equal` Seq.slice s (i + Seq.length s') (Seq.length s)) let seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_right s i s'; Seq.slice_slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) (j1 - (i + Seq.length s')) (j2 - (i + Seq.length s')) let seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s)) = assert (seq_upd_seq s i s' `Seq.equal` s) let seq_upd_seq_slice_idem (#t: Type) (s: Seq.seq t) (lo hi: nat) : Lemma (requires (lo <= hi /\ hi <= Seq.length s)) (ensures (seq_upd_seq s lo (Seq.slice s lo hi) == s)) = assert (seq_upd_seq s lo (Seq.slice s lo hi) `Seq.equal` s) let seq_upd_seq_left (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s 0 s' == Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s)))) = assert (seq_upd_seq s 0 s' `Seq.equal` Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s))) let seq_upd_seq_right (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s (Seq.length s - Seq.length s') s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s')) = assert (seq_upd_seq s (Seq.length s - Seq.length s') s' `Seq.equal` Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s') let seq_upd_seq_right_to_left (#t: Type) (s1: Seq.seq t) (i1: nat) (s2: Seq.seq t) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 + Seq.length s2 <= Seq.length s1 /\ i2 + Seq.length s3 <= Seq.length s2)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) == seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) `Seq.equal` seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3) let seq_upd_seq_seq_upd_seq_slice (#t: Type) (s1: Seq.seq t) (i1: nat) (hi: nat) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 <= hi /\ hi <= Seq.length s1 /\ i1 + i2 + Seq.length s3 <= hi)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) == seq_upd_seq s1 (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) `Seq.equal` seq_upd_seq s1 (i1 + i2) s3)
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> i1: Prims.nat -> s1: FStar.Seq.Base.seq t -> i2: Prims.nat -> s2: FStar.Seq.Base.seq t -> FStar.Pervasives.Lemma (requires i1 + FStar.Seq.Base.length s1 <= FStar.Seq.Base.length s /\ i2 + FStar.Seq.Base.length s2 <= FStar.Seq.Base.length s /\ (i1 + FStar.Seq.Base.length s1 <= i2 \/ i2 + FStar.Seq.Base.length s2 <= i1)) (ensures LowParse.Spec.Base.seq_upd_seq (LowParse.Spec.Base.seq_upd_seq s i1 s1) i2 s2 == LowParse.Spec.Base.seq_upd_seq (LowParse.Spec.Base.seq_upd_seq s i2 s2) i1 s1)
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "Prims._assert", "FStar.Seq.Base.equal", "LowParse.Spec.Base.seq_upd_seq", "Prims.unit", "Prims.l_and", "Prims.b2t", "Prims.op_LessThanOrEqual", "Prims.op_Addition", "FStar.Seq.Base.length", "Prims.l_or", "Prims.squash", "Prims.eq2", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_seq_disj_comm (#t: Type) (s: Seq.seq t) (i1: nat) (s1: Seq.seq t) (i2: nat) (s2: Seq.seq t) : Lemma (requires (i1 + Seq.length s1 <= Seq.length s /\ i2 + Seq.length s2 <= Seq.length s /\ (i1 + Seq.length s1 <= i2 \/ i2 + Seq.length s2 <= i1))) (ensures (seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 == seq_upd_seq (seq_upd_seq s i2 s2) i1 s1)) =
assert ((seq_upd_seq (seq_upd_seq s i1 s1) i2 s2) `Seq.equal` (seq_upd_seq (seq_upd_seq s i2 s2) i1 s1))
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_upd_bw_seq_right
val seq_upd_bw_seq_right (#t: Type) (s s': Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_bw_seq s 0 s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s'))
val seq_upd_bw_seq_right (#t: Type) (s s': Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_bw_seq s 0 s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s'))
let seq_upd_bw_seq_right (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_bw_seq s 0 s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s')) = seq_upd_seq_right s s'
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 24, "end_line": 1150, "start_col": 0, "start_line": 1143 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i) let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2 let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) = assert (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) `Seq.equal` Seq.slice s (i + Seq.length s') (Seq.length s)) let seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_right s i s'; Seq.slice_slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) (j1 - (i + Seq.length s')) (j2 - (i + Seq.length s')) let seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s)) = assert (seq_upd_seq s i s' `Seq.equal` s) let seq_upd_seq_slice_idem (#t: Type) (s: Seq.seq t) (lo hi: nat) : Lemma (requires (lo <= hi /\ hi <= Seq.length s)) (ensures (seq_upd_seq s lo (Seq.slice s lo hi) == s)) = assert (seq_upd_seq s lo (Seq.slice s lo hi) `Seq.equal` s) let seq_upd_seq_left (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s 0 s' == Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s)))) = assert (seq_upd_seq s 0 s' `Seq.equal` Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s))) let seq_upd_seq_right (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s (Seq.length s - Seq.length s') s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s')) = assert (seq_upd_seq s (Seq.length s - Seq.length s') s' `Seq.equal` Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s') let seq_upd_seq_right_to_left (#t: Type) (s1: Seq.seq t) (i1: nat) (s2: Seq.seq t) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 + Seq.length s2 <= Seq.length s1 /\ i2 + Seq.length s3 <= Seq.length s2)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) == seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) `Seq.equal` seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3) let seq_upd_seq_seq_upd_seq_slice (#t: Type) (s1: Seq.seq t) (i1: nat) (hi: nat) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 <= hi /\ hi <= Seq.length s1 /\ i1 + i2 + Seq.length s3 <= hi)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) == seq_upd_seq s1 (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) `Seq.equal` seq_upd_seq s1 (i1 + i2) s3) let seq_upd_seq_disj_comm (#t: Type) (s: Seq.seq t) (i1: nat) (s1: Seq.seq t) (i2: nat) (s2: Seq.seq t) : Lemma (requires ( i1 + Seq.length s1 <= Seq.length s /\ i2 + Seq.length s2 <= Seq.length s /\ (i1 + Seq.length s1 <= i2 \/ i2 + Seq.length s2 <= i1) )) (ensures ( seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 == seq_upd_seq (seq_upd_seq s i2 s2) i1 s1 )) = assert (seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 `Seq.equal` seq_upd_seq (seq_upd_seq s i2 s2) i1 s1) let seq_upd_seq_seq_upd (#t: Type) (s: Seq.seq t) (i: nat) (x: t) : Lemma (requires (i < Seq.length s)) (ensures (Seq.upd s i x == seq_upd_seq s i (Seq.create 1 x))) = assert (Seq.upd s i x `Seq.equal` seq_upd_seq s i (Seq.create 1 x)) let seq_append_seq_upd_seq_l (#t: Type) (s: Seq.seq t) (i': nat) (s' : Seq.seq t) (sl : Seq.seq t) : Lemma (requires (i' + Seq.length s' <= Seq.length s)) (ensures ( Seq.length sl + i' <= Seq.length (sl `Seq.append` s) /\ sl `Seq.append` seq_upd_seq s i' s' == seq_upd_seq (sl `Seq.append` s) (Seq.length sl + i') s' )) = assert (sl `Seq.append` seq_upd_seq s i' s' `Seq.equal` seq_upd_seq (sl `Seq.append` s) (Seq.length sl + i') s') let seq_append_seq_upd_seq_r (#t: Type) (s: Seq.seq t) (i': nat) (s' : Seq.seq t) (sr : Seq.seq t) : Lemma (requires (i' + Seq.length s' <= Seq.length s)) (ensures ( i' <= Seq.length (s `Seq.append` sr) /\ seq_upd_seq s i' s' `Seq.append` sr == seq_upd_seq (s `Seq.append` sr) i' s' )) = assert ((seq_upd_seq s i' s' `Seq.append` sr) `Seq.equal` seq_upd_seq (s `Seq.append` sr) i' s') let seq_upd_bw_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = seq_upd_seq s (Seq.length s - i - Seq.length s') s'
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> s': FStar.Seq.Base.seq t -> FStar.Pervasives.Lemma (requires FStar.Seq.Base.length s' <= FStar.Seq.Base.length s) (ensures LowParse.Spec.Base.seq_upd_bw_seq s 0 s' == FStar.Seq.Base.append (FStar.Seq.Base.slice s 0 (FStar.Seq.Base.length s - FStar.Seq.Base.length s')) s')
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "LowParse.Spec.Base.seq_upd_seq_right", "Prims.unit", "Prims.b2t", "Prims.op_LessThanOrEqual", "FStar.Seq.Base.length", "Prims.squash", "Prims.eq2", "LowParse.Spec.Base.seq_upd_bw_seq", "FStar.Seq.Base.append", "FStar.Seq.Base.slice", "Prims.op_Subtraction", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_upd_bw_seq_right (#t: Type) (s s': Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_bw_seq s 0 s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s')) =
seq_upd_seq_right s s'
false
LowParse.Spec.Base.fsti
LowParse.Spec.Base.seq_append_seq_upd_seq_l
val seq_append_seq_upd_seq_l (#t: Type) (s: Seq.seq t) (i': nat) (s' sl: Seq.seq t) : Lemma (requires (i' + Seq.length s' <= Seq.length s)) (ensures (Seq.length sl + i' <= Seq.length (sl `Seq.append` s) /\ sl `Seq.append` (seq_upd_seq s i' s') == seq_upd_seq (sl `Seq.append` s) (Seq.length sl + i') s'))
val seq_append_seq_upd_seq_l (#t: Type) (s: Seq.seq t) (i': nat) (s' sl: Seq.seq t) : Lemma (requires (i' + Seq.length s' <= Seq.length s)) (ensures (Seq.length sl + i' <= Seq.length (sl `Seq.append` s) /\ sl `Seq.append` (seq_upd_seq s i' s') == seq_upd_seq (sl `Seq.append` s) (Seq.length sl + i') s'))
let seq_append_seq_upd_seq_l (#t: Type) (s: Seq.seq t) (i': nat) (s' : Seq.seq t) (sl : Seq.seq t) : Lemma (requires (i' + Seq.length s' <= Seq.length s)) (ensures ( Seq.length sl + i' <= Seq.length (sl `Seq.append` s) /\ sl `Seq.append` seq_upd_seq s i' s' == seq_upd_seq (sl `Seq.append` s) (Seq.length sl + i') s' )) = assert (sl `Seq.append` seq_upd_seq s i' s' `Seq.equal` seq_upd_seq (sl `Seq.append` s) (Seq.length sl + i') s')
{ "file_name": "src/lowparse/LowParse.Spec.Base.fsti", "git_rev": "00217c4a89f5ba56002ba9aa5b4a9d5903bfe9fa", "git_url": "https://github.com/project-everest/everparse.git", "project_name": "everparse" }
{ "end_col": 114, "end_line": 1117, "start_col": 0, "start_line": 1105 }
module LowParse.Spec.Base include LowParse.Bytes include LowParse.Norm module Seq = FStar.Seq module U8 = FStar.UInt8 module U32 = FStar.UInt32 /// parse a value of type t /// /// - the parser can fail (currently reporting an uninformative [None]) /// - it returns the parsed value as well as the number of bytes read /// (this is intended to be the number of bytes to advance the input pointer) /// /// note that the type now forbids lookahead; the parser cannot depend on /// values beyond the returned offset /// /// these parsers are used as specifications, and thus use unrepresentable types /// such as byte sequences and natural numbers and are always pure [@"substitute"] inline_for_extraction let consumed_length (b: bytes) : Tot Type = (n: nat { n <= Seq.length b } ) inline_for_extraction let bare_parser (t:Type) : Tot Type = (b: bytes) -> GTot (option (t * consumed_length b)) let parse (#t: Type) (p: bare_parser t) (input: bytes) : GTot (option (t * consumed_length input)) = p input (** Injectivity of parsing *) let injective_precond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in v1 == v2 ) let injective_precond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_precond p2 b1 b2 <==> injective_precond p1 b1 b2 )) = () let injective_postcond (#t: Type) (p: bare_parser t) (b1 b2: bytes) : GTot Type0 = Some? (parse p b1) /\ Some? (parse p b2) /\ ( let (Some (v1, len1)) = parse p b1 in let (Some (v2, len2)) = parse p b2 in (len1 <: nat) == (len2 <: nat) /\ Seq.slice b1 0 len1 == Seq.slice b2 0 len2 ) let injective_postcond_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( injective_postcond p2 b1 b2 <==> injective_postcond p1 b1 b2 )) = () let injective (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b1 b2: bytes) . {:pattern (injective_precond p b1 b2) \/ (injective_postcond p b1 b2)} injective_precond p b1 b2 ==> injective_postcond p b1 b2 let injective_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( injective p2 <==> injective p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_precond_ext p1 p2 b1)); Classical.forall_intro_2 (fun b1 -> Classical.move_requires (injective_postcond_ext p1 p2 b1)) let no_lookahead_on_precond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) /\ ( let (Some v) = parse f x in let (_, off) = v in off <= Seq.length x' /\ Seq.slice x' 0 off == Seq.slice x 0 off ) let no_lookahead_on_postcond (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = Some? (parse f x) ==> ( let (Some v) = parse f x in let (y, _) = v in Some? (parse f x') /\ ( let (Some v') = parse f x' in let (y', _) = v' in y == y' )) let no_lookahead_on (#t: Type) (f: bare_parser t) (x x' : bytes) : GTot Type0 = no_lookahead_on_precond f x x' ==> no_lookahead_on_postcond f x x' let no_lookahead_on_ext (#t: Type) (p1 p2: bare_parser t) (b1 b2: bytes) : Lemma (requires ( parse p2 b1 == parse p1 b1 /\ parse p2 b2 == parse p1 b2 )) (ensures ( no_lookahead_on p2 b1 b2 <==> no_lookahead_on p1 b1 b2 )) = () let no_lookahead (#t: Type) (f: bare_parser t) : GTot Type0 = forall (x x' : bytes) . {:pattern (no_lookahead_on_precond f x x') \/ (no_lookahead_on_postcond f x x')} no_lookahead_on f x x' let no_lookahead_ext (#t: Type) (p1 p2: bare_parser t) : Lemma (requires ( forall (b: bytes) . parse p2 b == parse p1 b )) (ensures ( no_lookahead p2 <==> no_lookahead p1 )) = Classical.forall_intro_2 (fun b1 -> Classical.move_requires (no_lookahead_on_ext p1 p2 b1)) (** A parser that always consumes all its input *) let consumes_all (#t: Type) (p: bare_parser t) : GTot Type0 = forall (b: bytes) . {:pattern (parse p b)} Some? (parse p b) ==> ( let (Some (_, len)) = parse p b in Seq.length b == len ) (** Parsing data of bounded size *) let parses_at_least (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz <= (consumed <: nat) ) let parses_at_least_0 (#t: Type) (f: bare_parser t) : Lemma (parses_at_least 0 f) = () let parses_at_least_le (sz sz': nat) (#t: Type) (f: bare_parser t) : Lemma (requires ( parses_at_least sz f /\ sz' <= sz )) (ensures ( parses_at_least sz' f )) = () (** A parser that always consumes at least one byte. A list can be serialized only if the parser for elements always consumes at least one byte. Anyway, since we require such a parser to have the prefix property, this is always true except for the parser for empty data. *) let parses_at_most (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz >= (consumed <: nat) ) let is_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s)} Some? (parse f s) ==> ( let (_, consumed) = Some?.v (parse f s) in sz == (consumed <: nat) ) let is_constant_size_parser_equiv (sz: nat) (#t: Type) (f: bare_parser t) : Lemma (is_constant_size_parser sz f <==> (parses_at_least sz f /\ parses_at_most sz f)) = () let is_total_constant_size_parser (sz: nat) (#t: Type) (f: bare_parser t) : GTot Type0 = forall (s: bytes) . {:pattern (parse f s) } (Seq.length s < sz) == (None? (parse f s)) type parser_subkind = | ParserStrong | ParserConsumesAll let parser_subkind_prop (k: parser_subkind) (#t: Type) (f: bare_parser t) : GTot Type0 = match k with | ParserStrong -> no_lookahead f | ParserConsumesAll -> consumes_all f type parser_kind_metadata_some = | ParserKindMetadataTotal | ParserKindMetadataFail type parser_kind_metadata_t = option parser_kind_metadata_some inline_for_extraction type parser_kind' = { parser_kind_low: nat; parser_kind_high: option nat; parser_kind_subkind: option parser_subkind; parser_kind_metadata: parser_kind_metadata_t; } let parser_kind = (x: parser_kind' { Some? x.parser_kind_high ==> x.parser_kind_low <= Some?.v x.parser_kind_high }) inline_for_extraction let strong_parser_kind (lo hi: nat) (md: parser_kind_metadata_t) : Pure parser_kind (requires (lo <= hi)) (ensures (fun _ -> True)) = { parser_kind_low = lo; parser_kind_high = Some hi; parser_kind_subkind = Some ParserStrong; parser_kind_metadata = md; } inline_for_extraction let total_constant_size_parser_kind (sz: nat) : Tot parser_kind = strong_parser_kind sz sz (Some ParserKindMetadataTotal) let parser_always_fails (#t: Type) (f: bare_parser t) : GTot Type0 = forall input . {:pattern (parse f input)} parse f input == None let parser_kind_metadata_prop (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = match k.parser_kind_metadata with | None -> True | Some ParserKindMetadataTotal -> k.parser_kind_high == Some k.parser_kind_low ==> is_total_constant_size_parser k.parser_kind_low f | Some ParserKindMetadataFail -> parser_always_fails f let parser_kind_prop' (#t: Type) (k: parser_kind) (f: bare_parser t) : GTot Type0 = injective f /\ (Some? k.parser_kind_subkind ==> parser_subkind_prop (Some?.v k.parser_kind_subkind) f) /\ parses_at_least k.parser_kind_low f /\ (Some? k.parser_kind_high ==> (parses_at_most (Some?.v k.parser_kind_high) f)) /\ parser_kind_metadata_prop k f val parser_kind_prop (#a:Type) (k:parser_kind) (f:bare_parser a) : GTot Type0 val parser_kind_prop_equiv (#t: Type) (k: parser_kind) (f: bare_parser t) : Lemma (parser_kind_prop k f <==> parser_kind_prop' k f) val parser_kind_prop_ext (#t: Type) (k: parser_kind) (f1 f2: bare_parser t) : Lemma (requires (forall (input: bytes) . parse f1 input == parse f2 input)) (ensures (parser_kind_prop k f1 <==> parser_kind_prop k f2)) [@unifier_hint_injective] inline_for_extraction let parser (k: parser_kind) (t: Type) : Tot Type = (f: bare_parser t { parser_kind_prop k f } ) inline_for_extraction let tot_bare_parser (t:Type) : Tot Type = (b: bytes) -> Tot (option (t * consumed_length b)) [@unifier_hint_injective] let tot_parser (k: parser_kind) (t: Type) : Tot Type = (f: tot_bare_parser t { parser_kind_prop k f } ) inline_for_extraction let get_parser_kind (#k: parser_kind) (#t: Type) (p: parser k t) : Tot parser_kind = k inline_for_extraction let get_parser_type (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = t let parser_kind_prop_intro (k: parser_kind) (#t: Type) (f: parser k t) : Lemma (parser_kind_prop k f) = () let is_strong (#k:parser_kind) (#t:Type) (p:parser k t) : Tot (r:bool{r ==> k.parser_kind_subkind == Some (ParserStrong)}) = k.parser_kind_subkind = Some (ParserStrong) let is_weaker_than (k1 k2: parser_kind) : GTot Type0 = (Some? k1.parser_kind_metadata ==> k1.parser_kind_metadata == k2.parser_kind_metadata) /\ ((k1.parser_kind_metadata <> Some ParserKindMetadataFail /\ k2.parser_kind_metadata <> Some ParserKindMetadataFail) ==> ( (Some? k1.parser_kind_subkind ==> k1.parser_kind_subkind == k2.parser_kind_subkind) /\ k1.parser_kind_low <= k2.parser_kind_low /\ (Some? k1.parser_kind_high ==> ( Some? k2.parser_kind_high /\ Some?.v k2.parser_kind_high <= Some?.v k1.parser_kind_high )))) val is_weaker_than_correct (k1: parser_kind) (k2: parser_kind) (#t: Type) (f: bare_parser t) : Lemma (requires (parser_kind_prop k2 f /\ k1 `is_weaker_than` k2)) (ensures (parser_kind_prop k1 f)) (* AR: see bug#1349 *) unfold let coerce_to_bare_parser (t:Type) (k2:parser_kind) (p:parser k2 t) :Tot (bare_parser t) = p let weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: parser k2 t) : Pure (parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: parser k1 t unfold let coerce_to_tot_bare_parser (t:Type) (k2:parser_kind) (p:tot_parser k2 t) :Tot (tot_bare_parser t) = p let tot_weaken (k1: parser_kind) (#k2: parser_kind) (#t: Type) (p2: tot_parser k2 t) : Pure (tot_parser k1 t) (requires (k1 `is_weaker_than` k2)) (ensures (fun _ -> True)) = let p = coerce_to_tot_bare_parser t k2 p2 in is_weaker_than_correct k1 k2 p; p <: tot_parser k1 t // inline_for_extraction let tot_strengthen (k: parser_kind) (#t: Type) (f: tot_bare_parser t) : Pure (tot_parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f // inline_for_extraction let strengthen (k: parser_kind) (#t: Type) (f: bare_parser t) : Pure (parser k t) (requires (parser_kind_prop k f)) (ensures (fun _ -> True)) = f #push-options "--z3rlimit 16" [@"opaque_to_smt"] inline_for_extraction let is_some (#t: Type) (x: option t) : Tot (y: bool { y == Some? x }) = match x with | Some _ -> true | _ -> false [@"opaque_to_smt"] inline_for_extraction let some_v (#t: Type) (x: option t { Some? x }) : Tot (y: t { y == Some?.v x }) = match x with | Some y -> y [@"opaque_to_smt"] inline_for_extraction let bool_and (b1 b2: bool) : Tot (y: bool { y == (b1 && b2) }) = if b1 then b2 else false [@"opaque_to_smt"] inline_for_extraction let bool_or (b1 b2: bool) : Tot (y: bool { y == (b1 || b2) }) = if b1 then true else b2 inline_for_extraction let glb (k1 k2: parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> k `is_weaker_than` k1 /\ k `is_weaker_than` k2 // (forall k' . (k' `is_weaker_than` k1 /\ k' `is_weaker_than` k2) ==> k' `is_weaker_than` k) )) = match k1.parser_kind_metadata, k2.parser_kind_metadata with | _, Some ParserKindMetadataFail -> { parser_kind_low = k1.parser_kind_low; parser_kind_high = k1.parser_kind_high; parser_kind_subkind = k1.parser_kind_subkind; parser_kind_metadata = (match k1.parser_kind_metadata with Some ParserKindMetadataFail -> Some ParserKindMetadataFail | _ -> None); } | Some ParserKindMetadataFail, _ -> { parser_kind_low = k2.parser_kind_low; parser_kind_high = k2.parser_kind_high; parser_kind_subkind = k2.parser_kind_subkind; parser_kind_metadata = None; } | _ -> { parser_kind_low = (if k1.parser_kind_low < k2.parser_kind_low then k1.parser_kind_low else k2.parser_kind_low); parser_kind_high = ( if is_some k1.parser_kind_high `bool_and` is_some k2.parser_kind_high then if some_v k2.parser_kind_high < some_v k1.parser_kind_high then k1.parser_kind_high else k2.parser_kind_high else None ); parser_kind_metadata = if k1.parser_kind_metadata = k2.parser_kind_metadata then k1.parser_kind_metadata else None; parser_kind_subkind = if k1.parser_kind_subkind = k2.parser_kind_subkind then k1.parser_kind_subkind else None } #pop-options #push-options "--warn_error -271" let default_parser_kind : (x: parser_kind { forall (t: Type) (p: bare_parser t) . {:pattern (parser_kind_prop x p)} injective p ==> parser_kind_prop x p }) = let aux (t:Type) (k:parser_kind) (p:bare_parser t) : Lemma (parser_kind_prop k p <==> parser_kind_prop' k p) [SMTPat ()] = parser_kind_prop_equiv k p in let x = { parser_kind_low = 0; parser_kind_high = None; parser_kind_metadata = None; parser_kind_subkind = None; } in x #pop-options // #set-options "--max_fuel 8 --max_ifuel 8" module L = FStar.List.Tot let rec glb_list_of (#t: eqtype) (f: (t -> Tot parser_kind)) (l: list t) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . L.mem kl l ==> k `is_weaker_than` (f kl)) /\ (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` (f kl))) ==> k' `is_weaker_than` k) )) = match l with | [] -> default_parser_kind | [k] -> f k | k1 :: q -> let k' = glb_list_of f q in glb (f k1) k' #reset-options let glb_list (l: list parser_kind) : Pure parser_kind (requires True) (ensures (fun k -> (forall kl . {:pattern (L.mem kl l)} L.mem kl l ==> k `is_weaker_than` kl) // (forall k' . (Cons? l /\ (forall kl . L.mem kl l ==> k' `is_weaker_than` kl)) ==> k' `is_weaker_than` k) )) = glb_list_of id l (* Coercions *) unfold inline_for_extraction let coerce (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) let coerce' (t2: Type) (#t1: Type) (x: t1) : Pure t2 (requires (t1 == t2)) (ensures (fun _ -> True)) = (x <: t2) unfold let coerce_parser (t2: Type) (#k: parser_kind) (#t1: Type) (p: parser k t1) : Pure (parser k t2) (requires (t2 == t1)) (ensures (fun _ -> True)) = p val parse_injective (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( injective_precond p input1 input2 )) (ensures ( injective_postcond p input1 input2 )) val parse_strong_prefix (#k: parser_kind) (#t: Type) (p: parser k t) (input1: bytes) (input2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ Seq.slice input1 0 consumed `Seq.equal` Seq.slice input2 0 consumed | _ -> False ))) (ensures ( match parse p input1 with | Some (x, consumed) -> consumed <= Seq.length input2 /\ parse p input2 == Some (x, consumed) | _ -> False )) (* Pure serializers *) inline_for_extraction let bare_serializer (t: Type) : Tot Type = t -> GTot bytes let serializer_correct (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (x: t) .{:pattern (parse p (f x))} parse p (f x) == Some (x, Seq.length (f x)) let serializer_correct_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (f: bare_serializer t1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Lemma (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (serializer_correct p1 f <==> serializer_correct p2 f)) = () let serializer_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : GTot Type0 = forall (s: bytes) . {:pattern (parse p s)} Some? (parse p s) ==> ( let (Some (x, len)) = parse p s in f x == Seq.slice s 0 len ) val serializer_correct_implies_complete (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) : Lemma (requires (serializer_correct p f)) (ensures (serializer_complete p f)) [@unifier_hint_injective] inline_for_extraction let serializer (#k: parser_kind) (#t: Type) (p: parser k t) : Tot Type = (f: bare_serializer t { serializer_correct p f } ) let mk_serializer (#k: parser_kind) (#t: Type) (p: parser k t) (f: bare_serializer t) (prf: ( (x: t) -> Lemma (parse p (f x) == Some (x, Seq.length (f x))) )) : Tot (serializer p) = Classical.forall_intro prf; f unfold let coerce_serializer (t2: Type) (#k: parser_kind) (#t1: Type) (#p: parser k t1) (s: serializer p) (u: unit { t2 == t1 } ) : Tot (serializer (coerce_parser t2 p)) = s let serialize_ext (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ (forall (input: bytes) . parse p1 input == parse p2 input))) (ensures (fun _ -> True)) = serializer_correct_ext p1 s1 p2; (s1 <: bare_serializer t2) let serialize_ext' (#k1: parser_kind) (#t1: Type) (p1: parser k1 t1) (s1: serializer p1) (#k2: parser_kind) (#t2: Type) (p2: parser k2 t2) : Pure (serializer p2) (requires (t1 == t2 /\ k1 == k2 /\ p1 == p2)) (ensures (fun _ -> True)) = serialize_ext p1 s1 p2 let serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : GTot bytes = s x let parse_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (parse p (serialize s x) == Some (x, Seq.length (serialize s x))) = () let parsed_data_is_serialize (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: bytes) : Lemma (requires (Some? (parse p x))) (ensures ( let Some (y, consumed) = parse p x in (serialize s y `Seq.append` Seq.slice x consumed (Seq.length x)) `Seq.equal` x )) = let Some (y, consumed) = parse p x in parse_injective p (serialize s y) x let serializer_unique (#k: parser_kind) (#t: Type) (p: parser k t) (s1 s2: serializer p) (x: t) : Lemma (s1 x == s2 x) = (* need these because of patterns *) let _ = parse p (s1 x) in let _ = parse p (s2 x) in serializer_correct_implies_complete p s2 let serializer_injective (#k: parser_kind) (#t: Type) (p: parser k t) (s: serializer p) (x1 x2: t) : Lemma (requires (s x1 == s x2)) (ensures (x1 == x2)) = (* patterns, again *) assert (parse p (s x1) == parse p (s x2)) val serializer_parser_unique' (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s /\ Some? (parse p1 x) )) (ensures ( parse p1 x == parse p2 x )) let serializer_parser_unique (#k1: parser_kind) (#t: Type) (p1: parser k1 t) (#k2: parser_kind) (p2: parser k2 t) (s: bare_serializer t) (x: bytes) : Lemma (requires ( is_strong p1 /\ is_strong p2 /\ serializer_correct p1 s /\ serializer_correct p2 s )) (ensures ( p1 x == p2 x )) = if Some? (p1 x) then serializer_parser_unique' p1 p2 s x else if Some? (p2 x) then serializer_parser_unique' p2 p1 s x else () val serialize_length (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (let x = Seq.length (serialize s x) in k.parser_kind_low <= x /\ ( match k.parser_kind_high with | None -> True | Some y -> x <= y )) [SMTPat (Seq.length (serialize s x))] val serialize_not_fail (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x: t) : Lemma (k.parser_kind_metadata <> Some ParserKindMetadataFail) [SMTPat (serialize s x)] let serialize_strong_prefix (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p) (x1 x2: t) (q1 q2: bytes) : Lemma (requires ( k.parser_kind_subkind == Some ParserStrong /\ serialize s x1 `Seq.append` q1 == serialize s x2 `Seq.append` q2 )) (ensures (x1 == x2 /\ q1 == q2)) = parse_strong_prefix p (serialize s x1) (serialize s x1 `Seq.append` q1); parse_strong_prefix p (serialize s x2) (serialize s x2 `Seq.append` q2); Seq.lemma_append_inj (serialize s x1) q1 (serialize s x2) q2 let parse_truncate (#k: parser_kind) (#t: Type) (#p: parser k t) (s: serializer p { k.parser_kind_subkind == Some ParserStrong }) (x: t) (n: nat) : Lemma (requires ( n < Seq.length (serialize s x) )) (ensures ( parse p (Seq.slice (serialize s x) 0 n) == None )) = let sq0 = serialize s x in let sq1 = Seq.slice sq0 0 n in match parse p sq1 with | None -> () | Some (x', consumed) -> parse_strong_prefix p sq1 sq0 let seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Pure (s_ : Seq.seq t { Seq.length s_ == Seq.length s } ) (requires (i + Seq.length s' <= Seq.length s)) (ensures (fun _ -> True)) = Seq.append (Seq.slice s 0 i) (Seq.append s' (Seq.slice s (i + Seq.length s') (Seq.length s))) let index_seq_upd_seq (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j < Seq.length s)) (ensures ( Seq.index (seq_upd_seq s i s') j == (if i <= j && j < i + Seq.length s' then Seq.index s' (j - i) else Seq.index s j))) = () let seq_upd_seq_slice (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') == s')) = assert (Seq.slice (seq_upd_seq s i s') i (i + Seq.length s') `Seq.equal` s') let seq_upd_seq_slice' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ i <= j1 /\ j1 <= j2 /\ j2 <= i + Seq.length s')) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s' (j1 - i) (j2 - i))) = seq_upd_seq_slice s i s'; Seq.slice_slice (seq_upd_seq s i s') i (i + Seq.length s') (j1 - i) (j2 - i) let seq_upd_seq_slice_left (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') 0 i == Seq.slice s 0 i)) = assert (Seq.slice (seq_upd_seq s i s') 0 i `Seq.equal` Seq.slice s 0 i) let seq_upd_seq_slice_left' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= Seq.length s /\ j1 <= j2 /\ j2 <= i)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_left s i s'; Seq.slice_slice (seq_upd_seq s i s') 0 i j1 j2 let seq_upd_seq_slice_right (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i + Seq.length s' <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) == Seq.slice s (i + Seq.length s') (Seq.length s))) = assert (Seq.slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) `Seq.equal` Seq.slice s (i + Seq.length s') (Seq.length s)) let seq_upd_seq_slice_right' (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) (j1 j2: nat) : Lemma (requires (i + Seq.length s' <= j1 /\ j1 <= j2 /\ j2 <= Seq.length s)) (ensures (Seq.slice (seq_upd_seq s i s') j1 j2 == Seq.slice s j1 j2)) = seq_upd_seq_slice_right s i s'; Seq.slice_slice (seq_upd_seq s i s') (i + Seq.length s') (Seq.length s) (j1 - (i + Seq.length s')) (j2 - (i + Seq.length s')) let seq_upd_seq_empty (#t: Type) (s: Seq.seq t) (i: nat) (s' : Seq.seq t) : Lemma (requires (i <= Seq.length s /\ Seq.length s' == 0)) (ensures (seq_upd_seq s i s' == s)) = assert (seq_upd_seq s i s' `Seq.equal` s) let seq_upd_seq_slice_idem (#t: Type) (s: Seq.seq t) (lo hi: nat) : Lemma (requires (lo <= hi /\ hi <= Seq.length s)) (ensures (seq_upd_seq s lo (Seq.slice s lo hi) == s)) = assert (seq_upd_seq s lo (Seq.slice s lo hi) `Seq.equal` s) let seq_upd_seq_left (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s 0 s' == Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s)))) = assert (seq_upd_seq s 0 s' `Seq.equal` Seq.append s' (Seq.slice s (Seq.length s') (Seq.length s))) let seq_upd_seq_right (#t: Type) (s: Seq.seq t) (s' : Seq.seq t) : Lemma (requires (Seq.length s' <= Seq.length s)) (ensures (seq_upd_seq s (Seq.length s - Seq.length s') s' == Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s')) = assert (seq_upd_seq s (Seq.length s - Seq.length s') s' `Seq.equal` Seq.append (Seq.slice s 0 (Seq.length s - Seq.length s')) s') let seq_upd_seq_right_to_left (#t: Type) (s1: Seq.seq t) (i1: nat) (s2: Seq.seq t) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 + Seq.length s2 <= Seq.length s1 /\ i2 + Seq.length s3 <= Seq.length s2)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) == seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq s2 i2 s3) `Seq.equal` seq_upd_seq (seq_upd_seq s1 i1 s2) (i1 + i2) s3) let seq_upd_seq_seq_upd_seq_slice (#t: Type) (s1: Seq.seq t) (i1: nat) (hi: nat) (i2: nat) (s3: Seq.seq t) : Lemma (requires (i1 <= hi /\ hi <= Seq.length s1 /\ i1 + i2 + Seq.length s3 <= hi)) (ensures ( seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) == seq_upd_seq s1 (i1 + i2) s3 )) = assert (seq_upd_seq s1 i1 (seq_upd_seq (Seq.slice s1 i1 hi) i2 s3) `Seq.equal` seq_upd_seq s1 (i1 + i2) s3) let seq_upd_seq_disj_comm (#t: Type) (s: Seq.seq t) (i1: nat) (s1: Seq.seq t) (i2: nat) (s2: Seq.seq t) : Lemma (requires ( i1 + Seq.length s1 <= Seq.length s /\ i2 + Seq.length s2 <= Seq.length s /\ (i1 + Seq.length s1 <= i2 \/ i2 + Seq.length s2 <= i1) )) (ensures ( seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 == seq_upd_seq (seq_upd_seq s i2 s2) i1 s1 )) = assert (seq_upd_seq (seq_upd_seq s i1 s1) i2 s2 `Seq.equal` seq_upd_seq (seq_upd_seq s i2 s2) i1 s1) let seq_upd_seq_seq_upd (#t: Type) (s: Seq.seq t) (i: nat) (x: t) : Lemma (requires (i < Seq.length s)) (ensures (Seq.upd s i x == seq_upd_seq s i (Seq.create 1 x))) = assert (Seq.upd s i x `Seq.equal` seq_upd_seq s i (Seq.create 1 x))
{ "checked_file": "/", "dependencies": [ "prims.fst.checked", "LowParse.Norm.fst.checked", "LowParse.Bytes.fsti.checked", "FStar.UInt8.fsti.checked", "FStar.UInt32.fsti.checked", "FStar.Seq.fst.checked", "FStar.Pervasives.Native.fst.checked", "FStar.Pervasives.fsti.checked", "FStar.List.Tot.fst.checked", "FStar.Classical.fsti.checked" ], "interface_file": false, "source_file": "LowParse.Spec.Base.fsti" }
[ { "abbrev": true, "full_module": "FStar.List.Tot", "short_module": "L" }, { "abbrev": true, "full_module": "FStar.UInt32", "short_module": "U32" }, { "abbrev": true, "full_module": "FStar.UInt8", "short_module": "U8" }, { "abbrev": true, "full_module": "FStar.Seq", "short_module": "Seq" }, { "abbrev": false, "full_module": "LowParse.Norm", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Bytes", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "LowParse.Spec", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: FStar.Seq.Base.seq t -> i': Prims.nat -> s': FStar.Seq.Base.seq t -> sl: FStar.Seq.Base.seq t -> FStar.Pervasives.Lemma (requires i' + FStar.Seq.Base.length s' <= FStar.Seq.Base.length s) (ensures FStar.Seq.Base.length sl + i' <= FStar.Seq.Base.length (FStar.Seq.Base.append sl s) /\ FStar.Seq.Base.append sl (LowParse.Spec.Base.seq_upd_seq s i' s') == LowParse.Spec.Base.seq_upd_seq (FStar.Seq.Base.append sl s) (FStar.Seq.Base.length sl + i') s')
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "FStar.Seq.Base.seq", "Prims.nat", "Prims._assert", "FStar.Seq.Base.equal", "FStar.Seq.Base.append", "LowParse.Spec.Base.seq_upd_seq", "Prims.op_Addition", "FStar.Seq.Base.length", "Prims.unit", "Prims.b2t", "Prims.op_LessThanOrEqual", "Prims.squash", "Prims.l_and", "Prims.eq2", "Prims.Nil", "FStar.Pervasives.pattern" ]
[]
true
false
true
false
false
let seq_append_seq_upd_seq_l (#t: Type) (s: Seq.seq t) (i': nat) (s' sl: Seq.seq t) : Lemma (requires (i' + Seq.length s' <= Seq.length s)) (ensures (Seq.length sl + i' <= Seq.length (sl `Seq.append` s) /\ sl `Seq.append` (seq_upd_seq s i' s') == seq_upd_seq (sl `Seq.append` s) (Seq.length sl + i') s')) =
assert ((sl `Seq.append` (seq_upd_seq s i' s')) `Seq.equal` (seq_upd_seq (sl `Seq.append` s) (Seq.length sl + i') s'))
false
Pulse.Checker.AssertWithBinders.fsti
Pulse.Checker.AssertWithBinders.head_show_proof_state
val head_show_proof_state : st: Pulse.Syntax.Base.st_term -> Prims.bool
let head_show_proof_state (st:st_term) = match st.term with | Tm_ProofHintWithBinders { hint_type = SHOW_PROOF_STATE _ } -> true | _ -> false
{ "file_name": "lib/steel/pulse/Pulse.Checker.AssertWithBinders.fsti", "git_rev": "f984200f79bdc452374ae994a5ca837496476c41", "git_url": "https://github.com/FStarLang/steel.git", "project_name": "steel" }
{ "end_col": 14, "end_line": 33, "start_col": 0, "start_line": 30 }
(* Copyright 2023 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module Pulse.Checker.AssertWithBinders module T = FStar.Tactics.V2 open Pulse.Syntax open Pulse.Typing open Pulse.Checker.Base let head_wild (st:st_term) = match st.term with | Tm_ProofHintWithBinders { hint_type = WILD } -> true | _ -> false
{ "checked_file": "/", "dependencies": [ "Pulse.Typing.fst.checked", "Pulse.Syntax.fst.checked", "Pulse.Checker.Base.fsti.checked", "prims.fst.checked", "FStar.Tactics.V2.fst.checked", "FStar.Pervasives.fsti.checked" ], "interface_file": false, "source_file": "Pulse.Checker.AssertWithBinders.fsti" }
[ { "abbrev": false, "full_module": "Pulse.Checker.Base", "short_module": null }, { "abbrev": false, "full_module": "Pulse.Typing", "short_module": null }, { "abbrev": false, "full_module": "Pulse.Syntax", "short_module": null }, { "abbrev": true, "full_module": "FStar.Tactics.V2", "short_module": "T" }, { "abbrev": false, "full_module": "Pulse.Checker", "short_module": null }, { "abbrev": false, "full_module": "Pulse.Checker", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
st: Pulse.Syntax.Base.st_term -> Prims.bool
Prims.Tot
[ "total" ]
[]
[ "Pulse.Syntax.Base.st_term", "Pulse.Syntax.Base.__proj__Mkst_term__item__term", "Pulse.Syntax.Base.range", "Prims.list", "Pulse.Syntax.Base.binder", "Pulse.Syntax.Base.st_term'", "Prims.bool" ]
[]
false
false
false
true
false
let head_show_proof_state (st: st_term) =
match st.term with | Tm_ProofHintWithBinders { hint_type = SHOW_PROOF_STATE _ } -> true | _ -> false
false
Pulse.Checker.AssertWithBinders.fsti
Pulse.Checker.AssertWithBinders.handle_head_immediately
val handle_head_immediately : st: Pulse.Syntax.Base.st_term -> Prims.bool
let handle_head_immediately st = head_wild st || head_show_proof_state st
{ "file_name": "lib/steel/pulse/Pulse.Checker.AssertWithBinders.fsti", "git_rev": "f984200f79bdc452374ae994a5ca837496476c41", "git_url": "https://github.com/FStarLang/steel.git", "project_name": "steel" }
{ "end_col": 73, "end_line": 35, "start_col": 0, "start_line": 35 }
(* Copyright 2023 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module Pulse.Checker.AssertWithBinders module T = FStar.Tactics.V2 open Pulse.Syntax open Pulse.Typing open Pulse.Checker.Base let head_wild (st:st_term) = match st.term with | Tm_ProofHintWithBinders { hint_type = WILD } -> true | _ -> false let head_show_proof_state (st:st_term) = match st.term with | Tm_ProofHintWithBinders { hint_type = SHOW_PROOF_STATE _ } -> true | _ -> false
{ "checked_file": "/", "dependencies": [ "Pulse.Typing.fst.checked", "Pulse.Syntax.fst.checked", "Pulse.Checker.Base.fsti.checked", "prims.fst.checked", "FStar.Tactics.V2.fst.checked", "FStar.Pervasives.fsti.checked" ], "interface_file": false, "source_file": "Pulse.Checker.AssertWithBinders.fsti" }
[ { "abbrev": false, "full_module": "Pulse.Checker.Base", "short_module": null }, { "abbrev": false, "full_module": "Pulse.Typing", "short_module": null }, { "abbrev": false, "full_module": "Pulse.Syntax", "short_module": null }, { "abbrev": true, "full_module": "FStar.Tactics.V2", "short_module": "T" }, { "abbrev": false, "full_module": "Pulse.Checker", "short_module": null }, { "abbrev": false, "full_module": "Pulse.Checker", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
st: Pulse.Syntax.Base.st_term -> Prims.bool
Prims.Tot
[ "total" ]
[]
[ "Pulse.Syntax.Base.st_term", "Prims.op_BarBar", "Pulse.Checker.AssertWithBinders.head_wild", "Pulse.Checker.AssertWithBinders.head_show_proof_state", "Prims.bool" ]
[]
false
false
false
true
false
let handle_head_immediately st =
head_wild st || head_show_proof_state st
false
Pulse.Checker.AssertWithBinders.fsti
Pulse.Checker.AssertWithBinders.head_wild
val head_wild : st: Pulse.Syntax.Base.st_term -> Prims.bool
let head_wild (st:st_term) = match st.term with | Tm_ProofHintWithBinders { hint_type = WILD } -> true | _ -> false
{ "file_name": "lib/steel/pulse/Pulse.Checker.AssertWithBinders.fsti", "git_rev": "f984200f79bdc452374ae994a5ca837496476c41", "git_url": "https://github.com/FStarLang/steel.git", "project_name": "steel" }
{ "end_col": 14, "end_line": 28, "start_col": 0, "start_line": 25 }
(* Copyright 2023 Microsoft Research Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) module Pulse.Checker.AssertWithBinders module T = FStar.Tactics.V2 open Pulse.Syntax open Pulse.Typing open Pulse.Checker.Base
{ "checked_file": "/", "dependencies": [ "Pulse.Typing.fst.checked", "Pulse.Syntax.fst.checked", "Pulse.Checker.Base.fsti.checked", "prims.fst.checked", "FStar.Tactics.V2.fst.checked", "FStar.Pervasives.fsti.checked" ], "interface_file": false, "source_file": "Pulse.Checker.AssertWithBinders.fsti" }
[ { "abbrev": false, "full_module": "Pulse.Checker.Base", "short_module": null }, { "abbrev": false, "full_module": "Pulse.Typing", "short_module": null }, { "abbrev": false, "full_module": "Pulse.Syntax", "short_module": null }, { "abbrev": true, "full_module": "FStar.Tactics.V2", "short_module": "T" }, { "abbrev": false, "full_module": "Pulse.Checker", "short_module": null }, { "abbrev": false, "full_module": "Pulse.Checker", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 8, "max_ifuel": 2, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": true, "z3cliopt": [], "z3refresh": false, "z3rlimit": 5, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
st: Pulse.Syntax.Base.st_term -> Prims.bool
Prims.Tot
[ "total" ]
[]
[ "Pulse.Syntax.Base.st_term", "Pulse.Syntax.Base.__proj__Mkst_term__item__term", "Prims.list", "Pulse.Syntax.Base.binder", "Pulse.Syntax.Base.st_term'", "Prims.bool" ]
[]
false
false
false
true
false
let head_wild (st: st_term) =
match st.term with | Tm_ProofHintWithBinders { hint_type = WILD } -> true | _ -> false
false
EverCrypt.DRBG.fst
EverCrypt.DRBG.invert_state_s
val invert_state_s (a: supported_alg) : Lemma (requires True) (ensures inversion (state_s a)) [SMTPat (state_s a)]
val invert_state_s (a: supported_alg) : Lemma (requires True) (ensures inversion (state_s a)) [SMTPat (state_s a)]
let invert_state_s (a:supported_alg): Lemma (requires True) (ensures inversion (state_s a)) [ SMTPat (state_s a) ] = allow_inversion (state_s a)
{ "file_name": "providers/evercrypt/fst/EverCrypt.DRBG.fst", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 29, "end_line": 38, "start_col": 0, "start_line": 33 }
module EverCrypt.DRBG open FStar.HyperStack.ST open Lib.IntTypes open Spec.Hash.Definitions module HS = FStar.HyperStack module B = LowStar.Buffer module S = Spec.HMAC_DRBG open Hacl.HMAC_DRBG open Lib.RandomBuffer.System open LowStar.BufferOps friend Hacl.HMAC_DRBG friend EverCrypt.HMAC #set-options "--max_ifuel 0 --max_fuel 0 --z3rlimit 50" /// Some duplication from Hacl.HMAC_DRBG because we don't want clients to depend on it /// /// Respects EverCrypt convention and reverses order of buf_len, buf arguments [@CAbstractStruct] noeq type state_s: supported_alg -> Type0 = | SHA1_s : state SHA1 -> state_s SHA1 | SHA2_256_s: state SHA2_256 -> state_s SHA2_256 | SHA2_384_s: state SHA2_384 -> state_s SHA2_384 | SHA2_512_s: state SHA2_512 -> state_s SHA2_512
{ "checked_file": "/", "dependencies": [ "Spec.HMAC_DRBG.fsti.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "LowStar.BufferOps.fst.checked", "LowStar.Buffer.fst.checked", "Lib.RandomBuffer.System.fsti.checked", "Lib.Memzero0.fsti.checked", "Lib.IntTypes.fsti.checked", "Hacl.HMAC_DRBG.fst.checked", "Hacl.HMAC_DRBG.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.HyperStack.ST.fsti.checked", "FStar.HyperStack.fst.checked", "EverCrypt.HMAC.fst.checked" ], "interface_file": true, "source_file": "EverCrypt.DRBG.fst" }
[ { "abbrev": false, "full_module": "LowStar.BufferOps", "short_module": null }, { "abbrev": false, "full_module": "Lib.RandomBuffer.System", "short_module": null }, { "abbrev": false, "full_module": "Hacl.HMAC_DRBG", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
a: Hacl.HMAC_DRBG.supported_alg -> FStar.Pervasives.Lemma (ensures FStar.Pervasives.inversion (EverCrypt.DRBG.state_s a)) [SMTPat (EverCrypt.DRBG.state_s a)]
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "Hacl.HMAC_DRBG.supported_alg", "FStar.Pervasives.allow_inversion", "EverCrypt.DRBG.state_s", "Prims.unit", "Prims.l_True", "Prims.squash", "FStar.Pervasives.inversion", "Prims.Cons", "FStar.Pervasives.pattern", "FStar.Pervasives.smt_pat", "Prims.Nil" ]
[]
true
false
true
false
false
let invert_state_s (a: supported_alg) : Lemma (requires True) (ensures inversion (state_s a)) [SMTPat (state_s a)] =
allow_inversion (state_s a)
false
EverCrypt.DRBG.fst
EverCrypt.DRBG.footprint_s
val footprint_s: #a:supported_alg -> state_s a -> GTot B.loc
val footprint_s: #a:supported_alg -> state_s a -> GTot B.loc
let footprint_s #a st = footprint (p st)
{ "file_name": "providers/evercrypt/fst/EverCrypt.DRBG.fst", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 40, "end_line": 51, "start_col": 0, "start_line": 51 }
module EverCrypt.DRBG open FStar.HyperStack.ST open Lib.IntTypes open Spec.Hash.Definitions module HS = FStar.HyperStack module B = LowStar.Buffer module S = Spec.HMAC_DRBG open Hacl.HMAC_DRBG open Lib.RandomBuffer.System open LowStar.BufferOps friend Hacl.HMAC_DRBG friend EverCrypt.HMAC #set-options "--max_ifuel 0 --max_fuel 0 --z3rlimit 50" /// Some duplication from Hacl.HMAC_DRBG because we don't want clients to depend on it /// /// Respects EverCrypt convention and reverses order of buf_len, buf arguments [@CAbstractStruct] noeq type state_s: supported_alg -> Type0 = | SHA1_s : state SHA1 -> state_s SHA1 | SHA2_256_s: state SHA2_256 -> state_s SHA2_256 | SHA2_384_s: state SHA2_384 -> state_s SHA2_384 | SHA2_512_s: state SHA2_512 -> state_s SHA2_512 let invert_state_s (a:supported_alg): Lemma (requires True) (ensures inversion (state_s a)) [ SMTPat (state_s a) ] = allow_inversion (state_s a) /// Only call this function in extracted code with a known `a` inline_for_extraction noextract let p #a (s:state_s a) : Hacl.HMAC_DRBG.state a = match a with | SHA1 -> let SHA1_s p = s in p | SHA2_256 -> let SHA2_256_s p = s in p | SHA2_384 -> let SHA2_384_s p = s in p | SHA2_512 -> let SHA2_512_s p = s in p let freeable_s #a st = freeable (p st)
{ "checked_file": "/", "dependencies": [ "Spec.HMAC_DRBG.fsti.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "LowStar.BufferOps.fst.checked", "LowStar.Buffer.fst.checked", "Lib.RandomBuffer.System.fsti.checked", "Lib.Memzero0.fsti.checked", "Lib.IntTypes.fsti.checked", "Hacl.HMAC_DRBG.fst.checked", "Hacl.HMAC_DRBG.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.HyperStack.ST.fsti.checked", "FStar.HyperStack.fst.checked", "EverCrypt.HMAC.fst.checked" ], "interface_file": true, "source_file": "EverCrypt.DRBG.fst" }
[ { "abbrev": false, "full_module": "LowStar.BufferOps", "short_module": null }, { "abbrev": false, "full_module": "Lib.RandomBuffer.System", "short_module": null }, { "abbrev": false, "full_module": "Hacl.HMAC_DRBG", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
st: EverCrypt.DRBG.state_s a -> Prims.GTot LowStar.Monotonic.Buffer.loc
Prims.GTot
[ "sometrivial" ]
[]
[ "Hacl.HMAC_DRBG.supported_alg", "EverCrypt.DRBG.state_s", "Hacl.HMAC_DRBG.footprint", "EverCrypt.DRBG.p", "LowStar.Monotonic.Buffer.loc" ]
[]
false
false
false
false
false
let footprint_s #a st =
footprint (p st)
false
EverCrypt.DRBG.fst
EverCrypt.DRBG.freeable_s
val freeable_s: #a:supported_alg -> st:state_s a -> Type0
val freeable_s: #a:supported_alg -> st:state_s a -> Type0
let freeable_s #a st = freeable (p st)
{ "file_name": "providers/evercrypt/fst/EverCrypt.DRBG.fst", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 38, "end_line": 49, "start_col": 0, "start_line": 49 }
module EverCrypt.DRBG open FStar.HyperStack.ST open Lib.IntTypes open Spec.Hash.Definitions module HS = FStar.HyperStack module B = LowStar.Buffer module S = Spec.HMAC_DRBG open Hacl.HMAC_DRBG open Lib.RandomBuffer.System open LowStar.BufferOps friend Hacl.HMAC_DRBG friend EverCrypt.HMAC #set-options "--max_ifuel 0 --max_fuel 0 --z3rlimit 50" /// Some duplication from Hacl.HMAC_DRBG because we don't want clients to depend on it /// /// Respects EverCrypt convention and reverses order of buf_len, buf arguments [@CAbstractStruct] noeq type state_s: supported_alg -> Type0 = | SHA1_s : state SHA1 -> state_s SHA1 | SHA2_256_s: state SHA2_256 -> state_s SHA2_256 | SHA2_384_s: state SHA2_384 -> state_s SHA2_384 | SHA2_512_s: state SHA2_512 -> state_s SHA2_512 let invert_state_s (a:supported_alg): Lemma (requires True) (ensures inversion (state_s a)) [ SMTPat (state_s a) ] = allow_inversion (state_s a) /// Only call this function in extracted code with a known `a` inline_for_extraction noextract let p #a (s:state_s a) : Hacl.HMAC_DRBG.state a = match a with | SHA1 -> let SHA1_s p = s in p | SHA2_256 -> let SHA2_256_s p = s in p | SHA2_384 -> let SHA2_384_s p = s in p | SHA2_512 -> let SHA2_512_s p = s in p
{ "checked_file": "/", "dependencies": [ "Spec.HMAC_DRBG.fsti.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "LowStar.BufferOps.fst.checked", "LowStar.Buffer.fst.checked", "Lib.RandomBuffer.System.fsti.checked", "Lib.Memzero0.fsti.checked", "Lib.IntTypes.fsti.checked", "Hacl.HMAC_DRBG.fst.checked", "Hacl.HMAC_DRBG.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.HyperStack.ST.fsti.checked", "FStar.HyperStack.fst.checked", "EverCrypt.HMAC.fst.checked" ], "interface_file": true, "source_file": "EverCrypt.DRBG.fst" }
[ { "abbrev": false, "full_module": "LowStar.BufferOps", "short_module": null }, { "abbrev": false, "full_module": "Lib.RandomBuffer.System", "short_module": null }, { "abbrev": false, "full_module": "Hacl.HMAC_DRBG", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
st: EverCrypt.DRBG.state_s a -> Type0
Prims.Tot
[ "total" ]
[]
[ "Hacl.HMAC_DRBG.supported_alg", "EverCrypt.DRBG.state_s", "Hacl.HMAC_DRBG.freeable", "EverCrypt.DRBG.p" ]
[]
false
false
false
false
true
let freeable_s #a st =
freeable (p st)
false
EverCrypt.DRBG.fst
EverCrypt.DRBG.invariant_s
val invariant_s: #a:supported_alg -> state_s a -> HS.mem -> Type0
val invariant_s: #a:supported_alg -> state_s a -> HS.mem -> Type0
let invariant_s #a st h = invariant (p st) h
{ "file_name": "providers/evercrypt/fst/EverCrypt.DRBG.fst", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 44, "end_line": 53, "start_col": 0, "start_line": 53 }
module EverCrypt.DRBG open FStar.HyperStack.ST open Lib.IntTypes open Spec.Hash.Definitions module HS = FStar.HyperStack module B = LowStar.Buffer module S = Spec.HMAC_DRBG open Hacl.HMAC_DRBG open Lib.RandomBuffer.System open LowStar.BufferOps friend Hacl.HMAC_DRBG friend EverCrypt.HMAC #set-options "--max_ifuel 0 --max_fuel 0 --z3rlimit 50" /// Some duplication from Hacl.HMAC_DRBG because we don't want clients to depend on it /// /// Respects EverCrypt convention and reverses order of buf_len, buf arguments [@CAbstractStruct] noeq type state_s: supported_alg -> Type0 = | SHA1_s : state SHA1 -> state_s SHA1 | SHA2_256_s: state SHA2_256 -> state_s SHA2_256 | SHA2_384_s: state SHA2_384 -> state_s SHA2_384 | SHA2_512_s: state SHA2_512 -> state_s SHA2_512 let invert_state_s (a:supported_alg): Lemma (requires True) (ensures inversion (state_s a)) [ SMTPat (state_s a) ] = allow_inversion (state_s a) /// Only call this function in extracted code with a known `a` inline_for_extraction noextract let p #a (s:state_s a) : Hacl.HMAC_DRBG.state a = match a with | SHA1 -> let SHA1_s p = s in p | SHA2_256 -> let SHA2_256_s p = s in p | SHA2_384 -> let SHA2_384_s p = s in p | SHA2_512 -> let SHA2_512_s p = s in p let freeable_s #a st = freeable (p st) let footprint_s #a st = footprint (p st)
{ "checked_file": "/", "dependencies": [ "Spec.HMAC_DRBG.fsti.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "LowStar.BufferOps.fst.checked", "LowStar.Buffer.fst.checked", "Lib.RandomBuffer.System.fsti.checked", "Lib.Memzero0.fsti.checked", "Lib.IntTypes.fsti.checked", "Hacl.HMAC_DRBG.fst.checked", "Hacl.HMAC_DRBG.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.HyperStack.ST.fsti.checked", "FStar.HyperStack.fst.checked", "EverCrypt.HMAC.fst.checked" ], "interface_file": true, "source_file": "EverCrypt.DRBG.fst" }
[ { "abbrev": false, "full_module": "LowStar.BufferOps", "short_module": null }, { "abbrev": false, "full_module": "Lib.RandomBuffer.System", "short_module": null }, { "abbrev": false, "full_module": "Hacl.HMAC_DRBG", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
st: EverCrypt.DRBG.state_s a -> h: FStar.Monotonic.HyperStack.mem -> Type0
Prims.Tot
[ "total" ]
[]
[ "Hacl.HMAC_DRBG.supported_alg", "EverCrypt.DRBG.state_s", "FStar.Monotonic.HyperStack.mem", "Hacl.HMAC_DRBG.invariant", "EverCrypt.DRBG.p" ]
[]
false
false
false
false
true
let invariant_s #a st h =
invariant (p st) h
false
EverCrypt.DRBG.fst
EverCrypt.DRBG.loc_includes_union_l_footprint_s
val loc_includes_union_l_footprint_s: #a:supported_alg -> l1:B.loc -> l2:B.loc -> st:state_s a -> Lemma (requires B.loc_includes l1 (footprint_s st) \/ B.loc_includes l2 (footprint_s st)) (ensures B.loc_includes (B.loc_union l1 l2) (footprint_s st)) [SMTPat (B.loc_includes (B.loc_union l1 l2) (footprint_s st))]
val loc_includes_union_l_footprint_s: #a:supported_alg -> l1:B.loc -> l2:B.loc -> st:state_s a -> Lemma (requires B.loc_includes l1 (footprint_s st) \/ B.loc_includes l2 (footprint_s st)) (ensures B.loc_includes (B.loc_union l1 l2) (footprint_s st)) [SMTPat (B.loc_includes (B.loc_union l1 l2) (footprint_s st))]
let loc_includes_union_l_footprint_s #a l1 l2 st = B.loc_includes_union_l l1 l2 (footprint_s st)
{ "file_name": "providers/evercrypt/fst/EverCrypt.DRBG.fst", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 47, "end_line": 60, "start_col": 0, "start_line": 59 }
module EverCrypt.DRBG open FStar.HyperStack.ST open Lib.IntTypes open Spec.Hash.Definitions module HS = FStar.HyperStack module B = LowStar.Buffer module S = Spec.HMAC_DRBG open Hacl.HMAC_DRBG open Lib.RandomBuffer.System open LowStar.BufferOps friend Hacl.HMAC_DRBG friend EverCrypt.HMAC #set-options "--max_ifuel 0 --max_fuel 0 --z3rlimit 50" /// Some duplication from Hacl.HMAC_DRBG because we don't want clients to depend on it /// /// Respects EverCrypt convention and reverses order of buf_len, buf arguments [@CAbstractStruct] noeq type state_s: supported_alg -> Type0 = | SHA1_s : state SHA1 -> state_s SHA1 | SHA2_256_s: state SHA2_256 -> state_s SHA2_256 | SHA2_384_s: state SHA2_384 -> state_s SHA2_384 | SHA2_512_s: state SHA2_512 -> state_s SHA2_512 let invert_state_s (a:supported_alg): Lemma (requires True) (ensures inversion (state_s a)) [ SMTPat (state_s a) ] = allow_inversion (state_s a) /// Only call this function in extracted code with a known `a` inline_for_extraction noextract let p #a (s:state_s a) : Hacl.HMAC_DRBG.state a = match a with | SHA1 -> let SHA1_s p = s in p | SHA2_256 -> let SHA2_256_s p = s in p | SHA2_384 -> let SHA2_384_s p = s in p | SHA2_512 -> let SHA2_512_s p = s in p let freeable_s #a st = freeable (p st) let footprint_s #a st = footprint (p st) let invariant_s #a st h = invariant (p st) h let repr #a st h = let st = B.get h st 0 in repr (p st) h
{ "checked_file": "/", "dependencies": [ "Spec.HMAC_DRBG.fsti.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "LowStar.BufferOps.fst.checked", "LowStar.Buffer.fst.checked", "Lib.RandomBuffer.System.fsti.checked", "Lib.Memzero0.fsti.checked", "Lib.IntTypes.fsti.checked", "Hacl.HMAC_DRBG.fst.checked", "Hacl.HMAC_DRBG.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.HyperStack.ST.fsti.checked", "FStar.HyperStack.fst.checked", "EverCrypt.HMAC.fst.checked" ], "interface_file": true, "source_file": "EverCrypt.DRBG.fst" }
[ { "abbrev": false, "full_module": "LowStar.BufferOps", "short_module": null }, { "abbrev": false, "full_module": "Lib.RandomBuffer.System", "short_module": null }, { "abbrev": false, "full_module": "Hacl.HMAC_DRBG", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
l1: LowStar.Monotonic.Buffer.loc -> l2: LowStar.Monotonic.Buffer.loc -> st: EverCrypt.DRBG.state_s a -> FStar.Pervasives.Lemma (requires LowStar.Monotonic.Buffer.loc_includes l1 (EverCrypt.DRBG.footprint_s st) \/ LowStar.Monotonic.Buffer.loc_includes l2 (EverCrypt.DRBG.footprint_s st)) (ensures LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union l1 l2) (EverCrypt.DRBG.footprint_s st)) [ SMTPat (LowStar.Monotonic.Buffer.loc_includes (LowStar.Monotonic.Buffer.loc_union l1 l2) (EverCrypt.DRBG.footprint_s st)) ]
FStar.Pervasives.Lemma
[ "lemma" ]
[]
[ "Hacl.HMAC_DRBG.supported_alg", "LowStar.Monotonic.Buffer.loc", "EverCrypt.DRBG.state_s", "LowStar.Monotonic.Buffer.loc_includes_union_l", "EverCrypt.DRBG.footprint_s", "Prims.unit" ]
[]
true
false
true
false
false
let loc_includes_union_l_footprint_s #a l1 l2 st =
B.loc_includes_union_l l1 l2 (footprint_s st)
false
EverCrypt.DRBG.fst
EverCrypt.DRBG.repr
val repr: #a:supported_alg -> st:state a -> h:HS.mem -> GTot (S.state a)
val repr: #a:supported_alg -> st:state a -> h:HS.mem -> GTot (S.state a)
let repr #a st h = let st = B.get h st 0 in repr (p st) h
{ "file_name": "providers/evercrypt/fst/EverCrypt.DRBG.fst", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 15, "end_line": 57, "start_col": 0, "start_line": 55 }
module EverCrypt.DRBG open FStar.HyperStack.ST open Lib.IntTypes open Spec.Hash.Definitions module HS = FStar.HyperStack module B = LowStar.Buffer module S = Spec.HMAC_DRBG open Hacl.HMAC_DRBG open Lib.RandomBuffer.System open LowStar.BufferOps friend Hacl.HMAC_DRBG friend EverCrypt.HMAC #set-options "--max_ifuel 0 --max_fuel 0 --z3rlimit 50" /// Some duplication from Hacl.HMAC_DRBG because we don't want clients to depend on it /// /// Respects EverCrypt convention and reverses order of buf_len, buf arguments [@CAbstractStruct] noeq type state_s: supported_alg -> Type0 = | SHA1_s : state SHA1 -> state_s SHA1 | SHA2_256_s: state SHA2_256 -> state_s SHA2_256 | SHA2_384_s: state SHA2_384 -> state_s SHA2_384 | SHA2_512_s: state SHA2_512 -> state_s SHA2_512 let invert_state_s (a:supported_alg): Lemma (requires True) (ensures inversion (state_s a)) [ SMTPat (state_s a) ] = allow_inversion (state_s a) /// Only call this function in extracted code with a known `a` inline_for_extraction noextract let p #a (s:state_s a) : Hacl.HMAC_DRBG.state a = match a with | SHA1 -> let SHA1_s p = s in p | SHA2_256 -> let SHA2_256_s p = s in p | SHA2_384 -> let SHA2_384_s p = s in p | SHA2_512 -> let SHA2_512_s p = s in p let freeable_s #a st = freeable (p st) let footprint_s #a st = footprint (p st) let invariant_s #a st h = invariant (p st) h
{ "checked_file": "/", "dependencies": [ "Spec.HMAC_DRBG.fsti.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "LowStar.BufferOps.fst.checked", "LowStar.Buffer.fst.checked", "Lib.RandomBuffer.System.fsti.checked", "Lib.Memzero0.fsti.checked", "Lib.IntTypes.fsti.checked", "Hacl.HMAC_DRBG.fst.checked", "Hacl.HMAC_DRBG.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.HyperStack.ST.fsti.checked", "FStar.HyperStack.fst.checked", "EverCrypt.HMAC.fst.checked" ], "interface_file": true, "source_file": "EverCrypt.DRBG.fst" }
[ { "abbrev": false, "full_module": "LowStar.BufferOps", "short_module": null }, { "abbrev": false, "full_module": "Lib.RandomBuffer.System", "short_module": null }, { "abbrev": false, "full_module": "Hacl.HMAC_DRBG", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
st: EverCrypt.DRBG.state a -> h: FStar.Monotonic.HyperStack.mem -> Prims.GTot (Spec.HMAC_DRBG.state a)
Prims.GTot
[ "sometrivial" ]
[]
[ "Hacl.HMAC_DRBG.supported_alg", "EverCrypt.DRBG.state", "FStar.Monotonic.HyperStack.mem", "Hacl.HMAC_DRBG.repr", "EverCrypt.DRBG.p", "EverCrypt.DRBG.state_s", "LowStar.Monotonic.Buffer.get", "LowStar.Buffer.trivial_preorder", "Spec.HMAC_DRBG.state" ]
[]
false
false
false
false
false
let repr #a st h =
let st = B.get h st 0 in repr (p st) h
false
EverCrypt.DRBG.fst
EverCrypt.DRBG.p
val p (#a: _) (s: state_s a) : Hacl.HMAC_DRBG.state a
val p (#a: _) (s: state_s a) : Hacl.HMAC_DRBG.state a
let p #a (s:state_s a) : Hacl.HMAC_DRBG.state a = match a with | SHA1 -> let SHA1_s p = s in p | SHA2_256 -> let SHA2_256_s p = s in p | SHA2_384 -> let SHA2_384_s p = s in p | SHA2_512 -> let SHA2_512_s p = s in p
{ "file_name": "providers/evercrypt/fst/EverCrypt.DRBG.fst", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 41, "end_line": 47, "start_col": 0, "start_line": 42 }
module EverCrypt.DRBG open FStar.HyperStack.ST open Lib.IntTypes open Spec.Hash.Definitions module HS = FStar.HyperStack module B = LowStar.Buffer module S = Spec.HMAC_DRBG open Hacl.HMAC_DRBG open Lib.RandomBuffer.System open LowStar.BufferOps friend Hacl.HMAC_DRBG friend EverCrypt.HMAC #set-options "--max_ifuel 0 --max_fuel 0 --z3rlimit 50" /// Some duplication from Hacl.HMAC_DRBG because we don't want clients to depend on it /// /// Respects EverCrypt convention and reverses order of buf_len, buf arguments [@CAbstractStruct] noeq type state_s: supported_alg -> Type0 = | SHA1_s : state SHA1 -> state_s SHA1 | SHA2_256_s: state SHA2_256 -> state_s SHA2_256 | SHA2_384_s: state SHA2_384 -> state_s SHA2_384 | SHA2_512_s: state SHA2_512 -> state_s SHA2_512 let invert_state_s (a:supported_alg): Lemma (requires True) (ensures inversion (state_s a)) [ SMTPat (state_s a) ] = allow_inversion (state_s a) /// Only call this function in extracted code with a known `a`
{ "checked_file": "/", "dependencies": [ "Spec.HMAC_DRBG.fsti.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "LowStar.BufferOps.fst.checked", "LowStar.Buffer.fst.checked", "Lib.RandomBuffer.System.fsti.checked", "Lib.Memzero0.fsti.checked", "Lib.IntTypes.fsti.checked", "Hacl.HMAC_DRBG.fst.checked", "Hacl.HMAC_DRBG.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.HyperStack.ST.fsti.checked", "FStar.HyperStack.fst.checked", "EverCrypt.HMAC.fst.checked" ], "interface_file": true, "source_file": "EverCrypt.DRBG.fst" }
[ { "abbrev": false, "full_module": "LowStar.BufferOps", "short_module": null }, { "abbrev": false, "full_module": "Lib.RandomBuffer.System", "short_module": null }, { "abbrev": false, "full_module": "Hacl.HMAC_DRBG", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
s: EverCrypt.DRBG.state_s a -> Hacl.HMAC_DRBG.state a
Prims.Tot
[ "total" ]
[]
[ "Spec.HMAC_DRBG.supported_alg", "EverCrypt.DRBG.state_s", "Hacl.HMAC_DRBG.state", "Spec.Hash.Definitions.SHA1", "Spec.Hash.Definitions.SHA2_256", "Spec.Hash.Definitions.SHA2_384", "Spec.Hash.Definitions.SHA2_512" ]
[]
false
false
false
false
false
let p #a (s: state_s a) : Hacl.HMAC_DRBG.state a =
match a with | SHA1 -> let SHA1_s p = s in p | SHA2_256 -> let SHA2_256_s p = s in p | SHA2_384 -> let SHA2_384_s p = s in p | SHA2_512 -> let SHA2_512_s p = s in p
false
EverCrypt.DRBG.fst
EverCrypt.DRBG.generate_sha2_384
val generate_sha2_384: generate_st SHA2_384
val generate_sha2_384: generate_st SHA2_384
let generate_sha2_384 = mk_generate EverCrypt.HMAC.compute_sha2_384
{ "file_name": "providers/evercrypt/fst/EverCrypt.DRBG.fst", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 67, "end_line": 242, "start_col": 0, "start_line": 242 }
module EverCrypt.DRBG open FStar.HyperStack.ST open Lib.IntTypes open Spec.Hash.Definitions module HS = FStar.HyperStack module B = LowStar.Buffer module S = Spec.HMAC_DRBG open Hacl.HMAC_DRBG open Lib.RandomBuffer.System open LowStar.BufferOps friend Hacl.HMAC_DRBG friend EverCrypt.HMAC #set-options "--max_ifuel 0 --max_fuel 0 --z3rlimit 50" /// Some duplication from Hacl.HMAC_DRBG because we don't want clients to depend on it /// /// Respects EverCrypt convention and reverses order of buf_len, buf arguments [@CAbstractStruct] noeq type state_s: supported_alg -> Type0 = | SHA1_s : state SHA1 -> state_s SHA1 | SHA2_256_s: state SHA2_256 -> state_s SHA2_256 | SHA2_384_s: state SHA2_384 -> state_s SHA2_384 | SHA2_512_s: state SHA2_512 -> state_s SHA2_512 let invert_state_s (a:supported_alg): Lemma (requires True) (ensures inversion (state_s a)) [ SMTPat (state_s a) ] = allow_inversion (state_s a) /// Only call this function in extracted code with a known `a` inline_for_extraction noextract let p #a (s:state_s a) : Hacl.HMAC_DRBG.state a = match a with | SHA1 -> let SHA1_s p = s in p | SHA2_256 -> let SHA2_256_s p = s in p | SHA2_384 -> let SHA2_384_s p = s in p | SHA2_512 -> let SHA2_512_s p = s in p let freeable_s #a st = freeable (p st) let footprint_s #a st = footprint (p st) let invariant_s #a st h = invariant (p st) h let repr #a st h = let st = B.get h st 0 in repr (p st) h let loc_includes_union_l_footprint_s #a l1 l2 st = B.loc_includes_union_l l1 l2 (footprint_s st) let invariant_loc_in_footprint #a st m = () let frame_invariant #a l st h0 h1 = () /// State allocation // Would like to specialize alloca in each branch, but two calls to StackInline // functions in the same block lead to variable redefinitions at extraction. let alloca a = let st = match a with | SHA1 -> SHA1_s (alloca a) | SHA2_256 -> SHA2_256_s (alloca a) | SHA2_384 -> SHA2_384_s (alloca a) | SHA2_512 -> SHA2_512_s (alloca a) in B.alloca st 1ul let create_in a r = let st = match a with | SHA1 -> SHA1_s (create_in SHA1 r) | SHA2_256 -> SHA2_256_s (create_in SHA2_256 r) | SHA2_384 -> SHA2_384_s (create_in SHA2_384 r) | SHA2_512 -> SHA2_512_s (create_in SHA2_512 r) in B.malloc r st 1ul let create a = create_in a HS.root /// Instantiate function inline_for_extraction noextract val mk_instantiate: #a:supported_alg -> EverCrypt.HMAC.compute_st a -> instantiate_st a let mk_instantiate #a hmac st personalization_string personalization_string_len = if personalization_string_len >. max_personalization_string_length then false else let entropy_input_len = min_length a in let nonce_len = min_length a /. 2ul in let min_entropy = entropy_input_len +! nonce_len in push_frame(); assert_norm (range (v min_entropy) U32); let entropy = B.alloca (u8 0) min_entropy in let ok = randombytes entropy min_entropy in let result = if not ok then false else begin let entropy_input = B.sub entropy 0ul entropy_input_len in let nonce = B.sub entropy entropy_input_len nonce_len in S.hmac_input_bound a; let st_s = !*st in mk_instantiate hmac (p st_s) entropy_input_len entropy_input nonce_len nonce personalization_string_len personalization_string; true end in pop_frame(); result (** @type: true *) val instantiate_sha1 : instantiate_st SHA1 (** @type: true *) val instantiate_sha2_256: instantiate_st SHA2_256 (** @type: true *) val instantiate_sha2_384: instantiate_st SHA2_384 (** @type: true *) val instantiate_sha2_512: instantiate_st SHA2_512 let instantiate_sha1 = mk_instantiate EverCrypt.HMAC.compute_sha1 let instantiate_sha2_256 = mk_instantiate EverCrypt.HMAC.compute_sha2_256 let instantiate_sha2_384 = mk_instantiate EverCrypt.HMAC.compute_sha2_384 let instantiate_sha2_512 = mk_instantiate EverCrypt.HMAC.compute_sha2_512 /// Reseed function inline_for_extraction noextract val mk_reseed: #a:supported_alg -> EverCrypt.HMAC.compute_st a -> reseed_st a let mk_reseed #a hmac st additional_input additional_input_len = if additional_input_len >. max_additional_input_length then false else let entropy_input_len = min_length a in push_frame(); let entropy_input = B.alloca (u8 0) entropy_input_len in let ok = randombytes entropy_input entropy_input_len in let result = if not ok then false else begin S.hmac_input_bound a; let st_s = !*st in mk_reseed hmac (p st_s) entropy_input_len entropy_input additional_input_len additional_input; true end in pop_frame(); result (** @type: true *) val reseed_sha1 : reseed_st SHA1 (** @type: true *) val reseed_sha2_256: reseed_st SHA2_256 (** @type: true *) val reseed_sha2_384: reseed_st SHA2_384 (** @type: true *) val reseed_sha2_512: reseed_st SHA2_512 let reseed_sha1 = mk_reseed EverCrypt.HMAC.compute_sha1 let reseed_sha2_256 = mk_reseed EverCrypt.HMAC.compute_sha2_256 let reseed_sha2_384 = mk_reseed EverCrypt.HMAC.compute_sha2_384 let reseed_sha2_512 = mk_reseed EverCrypt.HMAC.compute_sha2_512 /// Generate function inline_for_extraction noextract val mk_generate: #a:supported_alg -> EverCrypt.HMAC.compute_st a -> generate_st a let mk_generate #a hmac output st n additional_input additional_input_len = if additional_input_len >. max_additional_input_length || n >. max_output_length then false else ( push_frame(); let ok = mk_reseed hmac st additional_input additional_input_len in let result = if not ok then false else begin let st_s = !*st in let b = mk_generate hmac output (p st_s) n additional_input_len additional_input in b (* This used to be true, which is fishy *) end in let h1 = get () in pop_frame(); let h2 = get () in frame_invariant (B.loc_all_regions_from false (HS.get_tip h1)) st h1 h2; result ) (** @type: true *) val generate_sha1 : generate_st SHA1 (** @type: true *) val generate_sha2_256: generate_st SHA2_256 (** @type: true *) val generate_sha2_384: generate_st SHA2_384 (** @type: true *) val generate_sha2_512: generate_st SHA2_512 let generate_sha1 = mk_generate EverCrypt.HMAC.compute_sha1
{ "checked_file": "/", "dependencies": [ "Spec.HMAC_DRBG.fsti.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "LowStar.BufferOps.fst.checked", "LowStar.Buffer.fst.checked", "Lib.RandomBuffer.System.fsti.checked", "Lib.Memzero0.fsti.checked", "Lib.IntTypes.fsti.checked", "Hacl.HMAC_DRBG.fst.checked", "Hacl.HMAC_DRBG.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.HyperStack.ST.fsti.checked", "FStar.HyperStack.fst.checked", "EverCrypt.HMAC.fst.checked" ], "interface_file": true, "source_file": "EverCrypt.DRBG.fst" }
[ { "abbrev": false, "full_module": "LowStar.BufferOps", "short_module": null }, { "abbrev": false, "full_module": "Lib.RandomBuffer.System", "short_module": null }, { "abbrev": false, "full_module": "Hacl.HMAC_DRBG", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
EverCrypt.DRBG.generate_st Spec.Hash.Definitions.SHA2_384
Prims.Tot
[ "total" ]
[]
[ "EverCrypt.DRBG.mk_generate", "Spec.Hash.Definitions.SHA2_384", "EverCrypt.HMAC.compute_sha2_384" ]
[]
false
false
false
true
false
let generate_sha2_384 =
mk_generate EverCrypt.HMAC.compute_sha2_384
false
EverCrypt.DRBG.fst
EverCrypt.DRBG.instantiate_sha2_256
val instantiate_sha2_256: instantiate_st SHA2_256
val instantiate_sha2_256: instantiate_st SHA2_256
let instantiate_sha2_256 = mk_instantiate EverCrypt.HMAC.compute_sha2_256
{ "file_name": "providers/evercrypt/fst/EverCrypt.DRBG.fst", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 73, "end_line": 143, "start_col": 0, "start_line": 143 }
module EverCrypt.DRBG open FStar.HyperStack.ST open Lib.IntTypes open Spec.Hash.Definitions module HS = FStar.HyperStack module B = LowStar.Buffer module S = Spec.HMAC_DRBG open Hacl.HMAC_DRBG open Lib.RandomBuffer.System open LowStar.BufferOps friend Hacl.HMAC_DRBG friend EverCrypt.HMAC #set-options "--max_ifuel 0 --max_fuel 0 --z3rlimit 50" /// Some duplication from Hacl.HMAC_DRBG because we don't want clients to depend on it /// /// Respects EverCrypt convention and reverses order of buf_len, buf arguments [@CAbstractStruct] noeq type state_s: supported_alg -> Type0 = | SHA1_s : state SHA1 -> state_s SHA1 | SHA2_256_s: state SHA2_256 -> state_s SHA2_256 | SHA2_384_s: state SHA2_384 -> state_s SHA2_384 | SHA2_512_s: state SHA2_512 -> state_s SHA2_512 let invert_state_s (a:supported_alg): Lemma (requires True) (ensures inversion (state_s a)) [ SMTPat (state_s a) ] = allow_inversion (state_s a) /// Only call this function in extracted code with a known `a` inline_for_extraction noextract let p #a (s:state_s a) : Hacl.HMAC_DRBG.state a = match a with | SHA1 -> let SHA1_s p = s in p | SHA2_256 -> let SHA2_256_s p = s in p | SHA2_384 -> let SHA2_384_s p = s in p | SHA2_512 -> let SHA2_512_s p = s in p let freeable_s #a st = freeable (p st) let footprint_s #a st = footprint (p st) let invariant_s #a st h = invariant (p st) h let repr #a st h = let st = B.get h st 0 in repr (p st) h let loc_includes_union_l_footprint_s #a l1 l2 st = B.loc_includes_union_l l1 l2 (footprint_s st) let invariant_loc_in_footprint #a st m = () let frame_invariant #a l st h0 h1 = () /// State allocation // Would like to specialize alloca in each branch, but two calls to StackInline // functions in the same block lead to variable redefinitions at extraction. let alloca a = let st = match a with | SHA1 -> SHA1_s (alloca a) | SHA2_256 -> SHA2_256_s (alloca a) | SHA2_384 -> SHA2_384_s (alloca a) | SHA2_512 -> SHA2_512_s (alloca a) in B.alloca st 1ul let create_in a r = let st = match a with | SHA1 -> SHA1_s (create_in SHA1 r) | SHA2_256 -> SHA2_256_s (create_in SHA2_256 r) | SHA2_384 -> SHA2_384_s (create_in SHA2_384 r) | SHA2_512 -> SHA2_512_s (create_in SHA2_512 r) in B.malloc r st 1ul let create a = create_in a HS.root /// Instantiate function inline_for_extraction noextract val mk_instantiate: #a:supported_alg -> EverCrypt.HMAC.compute_st a -> instantiate_st a let mk_instantiate #a hmac st personalization_string personalization_string_len = if personalization_string_len >. max_personalization_string_length then false else let entropy_input_len = min_length a in let nonce_len = min_length a /. 2ul in let min_entropy = entropy_input_len +! nonce_len in push_frame(); assert_norm (range (v min_entropy) U32); let entropy = B.alloca (u8 0) min_entropy in let ok = randombytes entropy min_entropy in let result = if not ok then false else begin let entropy_input = B.sub entropy 0ul entropy_input_len in let nonce = B.sub entropy entropy_input_len nonce_len in S.hmac_input_bound a; let st_s = !*st in mk_instantiate hmac (p st_s) entropy_input_len entropy_input nonce_len nonce personalization_string_len personalization_string; true end in pop_frame(); result (** @type: true *) val instantiate_sha1 : instantiate_st SHA1 (** @type: true *) val instantiate_sha2_256: instantiate_st SHA2_256 (** @type: true *) val instantiate_sha2_384: instantiate_st SHA2_384 (** @type: true *) val instantiate_sha2_512: instantiate_st SHA2_512
{ "checked_file": "/", "dependencies": [ "Spec.HMAC_DRBG.fsti.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "LowStar.BufferOps.fst.checked", "LowStar.Buffer.fst.checked", "Lib.RandomBuffer.System.fsti.checked", "Lib.Memzero0.fsti.checked", "Lib.IntTypes.fsti.checked", "Hacl.HMAC_DRBG.fst.checked", "Hacl.HMAC_DRBG.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.HyperStack.ST.fsti.checked", "FStar.HyperStack.fst.checked", "EverCrypt.HMAC.fst.checked" ], "interface_file": true, "source_file": "EverCrypt.DRBG.fst" }
[ { "abbrev": false, "full_module": "LowStar.BufferOps", "short_module": null }, { "abbrev": false, "full_module": "Lib.RandomBuffer.System", "short_module": null }, { "abbrev": false, "full_module": "Hacl.HMAC_DRBG", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
EverCrypt.DRBG.instantiate_st Spec.Hash.Definitions.SHA2_256
Prims.Tot
[ "total" ]
[]
[ "EverCrypt.DRBG.mk_instantiate", "Spec.Hash.Definitions.SHA2_256", "EverCrypt.HMAC.compute_sha2_256" ]
[]
false
false
false
true
false
let instantiate_sha2_256 =
mk_instantiate EverCrypt.HMAC.compute_sha2_256
false
EverCrypt.DRBG.fst
EverCrypt.DRBG.reseed_sha2_256
val reseed_sha2_256: reseed_st SHA2_256
val reseed_sha2_256: reseed_st SHA2_256
let reseed_sha2_256 = mk_reseed EverCrypt.HMAC.compute_sha2_256
{ "file_name": "providers/evercrypt/fst/EverCrypt.DRBG.fst", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 63, "end_line": 193, "start_col": 0, "start_line": 193 }
module EverCrypt.DRBG open FStar.HyperStack.ST open Lib.IntTypes open Spec.Hash.Definitions module HS = FStar.HyperStack module B = LowStar.Buffer module S = Spec.HMAC_DRBG open Hacl.HMAC_DRBG open Lib.RandomBuffer.System open LowStar.BufferOps friend Hacl.HMAC_DRBG friend EverCrypt.HMAC #set-options "--max_ifuel 0 --max_fuel 0 --z3rlimit 50" /// Some duplication from Hacl.HMAC_DRBG because we don't want clients to depend on it /// /// Respects EverCrypt convention and reverses order of buf_len, buf arguments [@CAbstractStruct] noeq type state_s: supported_alg -> Type0 = | SHA1_s : state SHA1 -> state_s SHA1 | SHA2_256_s: state SHA2_256 -> state_s SHA2_256 | SHA2_384_s: state SHA2_384 -> state_s SHA2_384 | SHA2_512_s: state SHA2_512 -> state_s SHA2_512 let invert_state_s (a:supported_alg): Lemma (requires True) (ensures inversion (state_s a)) [ SMTPat (state_s a) ] = allow_inversion (state_s a) /// Only call this function in extracted code with a known `a` inline_for_extraction noextract let p #a (s:state_s a) : Hacl.HMAC_DRBG.state a = match a with | SHA1 -> let SHA1_s p = s in p | SHA2_256 -> let SHA2_256_s p = s in p | SHA2_384 -> let SHA2_384_s p = s in p | SHA2_512 -> let SHA2_512_s p = s in p let freeable_s #a st = freeable (p st) let footprint_s #a st = footprint (p st) let invariant_s #a st h = invariant (p st) h let repr #a st h = let st = B.get h st 0 in repr (p st) h let loc_includes_union_l_footprint_s #a l1 l2 st = B.loc_includes_union_l l1 l2 (footprint_s st) let invariant_loc_in_footprint #a st m = () let frame_invariant #a l st h0 h1 = () /// State allocation // Would like to specialize alloca in each branch, but two calls to StackInline // functions in the same block lead to variable redefinitions at extraction. let alloca a = let st = match a with | SHA1 -> SHA1_s (alloca a) | SHA2_256 -> SHA2_256_s (alloca a) | SHA2_384 -> SHA2_384_s (alloca a) | SHA2_512 -> SHA2_512_s (alloca a) in B.alloca st 1ul let create_in a r = let st = match a with | SHA1 -> SHA1_s (create_in SHA1 r) | SHA2_256 -> SHA2_256_s (create_in SHA2_256 r) | SHA2_384 -> SHA2_384_s (create_in SHA2_384 r) | SHA2_512 -> SHA2_512_s (create_in SHA2_512 r) in B.malloc r st 1ul let create a = create_in a HS.root /// Instantiate function inline_for_extraction noextract val mk_instantiate: #a:supported_alg -> EverCrypt.HMAC.compute_st a -> instantiate_st a let mk_instantiate #a hmac st personalization_string personalization_string_len = if personalization_string_len >. max_personalization_string_length then false else let entropy_input_len = min_length a in let nonce_len = min_length a /. 2ul in let min_entropy = entropy_input_len +! nonce_len in push_frame(); assert_norm (range (v min_entropy) U32); let entropy = B.alloca (u8 0) min_entropy in let ok = randombytes entropy min_entropy in let result = if not ok then false else begin let entropy_input = B.sub entropy 0ul entropy_input_len in let nonce = B.sub entropy entropy_input_len nonce_len in S.hmac_input_bound a; let st_s = !*st in mk_instantiate hmac (p st_s) entropy_input_len entropy_input nonce_len nonce personalization_string_len personalization_string; true end in pop_frame(); result (** @type: true *) val instantiate_sha1 : instantiate_st SHA1 (** @type: true *) val instantiate_sha2_256: instantiate_st SHA2_256 (** @type: true *) val instantiate_sha2_384: instantiate_st SHA2_384 (** @type: true *) val instantiate_sha2_512: instantiate_st SHA2_512 let instantiate_sha1 = mk_instantiate EverCrypt.HMAC.compute_sha1 let instantiate_sha2_256 = mk_instantiate EverCrypt.HMAC.compute_sha2_256 let instantiate_sha2_384 = mk_instantiate EverCrypt.HMAC.compute_sha2_384 let instantiate_sha2_512 = mk_instantiate EverCrypt.HMAC.compute_sha2_512 /// Reseed function inline_for_extraction noextract val mk_reseed: #a:supported_alg -> EverCrypt.HMAC.compute_st a -> reseed_st a let mk_reseed #a hmac st additional_input additional_input_len = if additional_input_len >. max_additional_input_length then false else let entropy_input_len = min_length a in push_frame(); let entropy_input = B.alloca (u8 0) entropy_input_len in let ok = randombytes entropy_input entropy_input_len in let result = if not ok then false else begin S.hmac_input_bound a; let st_s = !*st in mk_reseed hmac (p st_s) entropy_input_len entropy_input additional_input_len additional_input; true end in pop_frame(); result (** @type: true *) val reseed_sha1 : reseed_st SHA1 (** @type: true *) val reseed_sha2_256: reseed_st SHA2_256 (** @type: true *) val reseed_sha2_384: reseed_st SHA2_384 (** @type: true *) val reseed_sha2_512: reseed_st SHA2_512
{ "checked_file": "/", "dependencies": [ "Spec.HMAC_DRBG.fsti.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "LowStar.BufferOps.fst.checked", "LowStar.Buffer.fst.checked", "Lib.RandomBuffer.System.fsti.checked", "Lib.Memzero0.fsti.checked", "Lib.IntTypes.fsti.checked", "Hacl.HMAC_DRBG.fst.checked", "Hacl.HMAC_DRBG.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.HyperStack.ST.fsti.checked", "FStar.HyperStack.fst.checked", "EverCrypt.HMAC.fst.checked" ], "interface_file": true, "source_file": "EverCrypt.DRBG.fst" }
[ { "abbrev": false, "full_module": "LowStar.BufferOps", "short_module": null }, { "abbrev": false, "full_module": "Lib.RandomBuffer.System", "short_module": null }, { "abbrev": false, "full_module": "Hacl.HMAC_DRBG", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
EverCrypt.DRBG.reseed_st Spec.Hash.Definitions.SHA2_256
Prims.Tot
[ "total" ]
[]
[ "EverCrypt.DRBG.mk_reseed", "Spec.Hash.Definitions.SHA2_256", "EverCrypt.HMAC.compute_sha2_256" ]
[]
false
false
false
true
false
let reseed_sha2_256 =
mk_reseed EverCrypt.HMAC.compute_sha2_256
false
EverCrypt.DRBG.fst
EverCrypt.DRBG.generate_sha1
val generate_sha1 : generate_st SHA1
val generate_sha1 : generate_st SHA1
let generate_sha1 = mk_generate EverCrypt.HMAC.compute_sha1
{ "file_name": "providers/evercrypt/fst/EverCrypt.DRBG.fst", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 63, "end_line": 240, "start_col": 0, "start_line": 240 }
module EverCrypt.DRBG open FStar.HyperStack.ST open Lib.IntTypes open Spec.Hash.Definitions module HS = FStar.HyperStack module B = LowStar.Buffer module S = Spec.HMAC_DRBG open Hacl.HMAC_DRBG open Lib.RandomBuffer.System open LowStar.BufferOps friend Hacl.HMAC_DRBG friend EverCrypt.HMAC #set-options "--max_ifuel 0 --max_fuel 0 --z3rlimit 50" /// Some duplication from Hacl.HMAC_DRBG because we don't want clients to depend on it /// /// Respects EverCrypt convention and reverses order of buf_len, buf arguments [@CAbstractStruct] noeq type state_s: supported_alg -> Type0 = | SHA1_s : state SHA1 -> state_s SHA1 | SHA2_256_s: state SHA2_256 -> state_s SHA2_256 | SHA2_384_s: state SHA2_384 -> state_s SHA2_384 | SHA2_512_s: state SHA2_512 -> state_s SHA2_512 let invert_state_s (a:supported_alg): Lemma (requires True) (ensures inversion (state_s a)) [ SMTPat (state_s a) ] = allow_inversion (state_s a) /// Only call this function in extracted code with a known `a` inline_for_extraction noextract let p #a (s:state_s a) : Hacl.HMAC_DRBG.state a = match a with | SHA1 -> let SHA1_s p = s in p | SHA2_256 -> let SHA2_256_s p = s in p | SHA2_384 -> let SHA2_384_s p = s in p | SHA2_512 -> let SHA2_512_s p = s in p let freeable_s #a st = freeable (p st) let footprint_s #a st = footprint (p st) let invariant_s #a st h = invariant (p st) h let repr #a st h = let st = B.get h st 0 in repr (p st) h let loc_includes_union_l_footprint_s #a l1 l2 st = B.loc_includes_union_l l1 l2 (footprint_s st) let invariant_loc_in_footprint #a st m = () let frame_invariant #a l st h0 h1 = () /// State allocation // Would like to specialize alloca in each branch, but two calls to StackInline // functions in the same block lead to variable redefinitions at extraction. let alloca a = let st = match a with | SHA1 -> SHA1_s (alloca a) | SHA2_256 -> SHA2_256_s (alloca a) | SHA2_384 -> SHA2_384_s (alloca a) | SHA2_512 -> SHA2_512_s (alloca a) in B.alloca st 1ul let create_in a r = let st = match a with | SHA1 -> SHA1_s (create_in SHA1 r) | SHA2_256 -> SHA2_256_s (create_in SHA2_256 r) | SHA2_384 -> SHA2_384_s (create_in SHA2_384 r) | SHA2_512 -> SHA2_512_s (create_in SHA2_512 r) in B.malloc r st 1ul let create a = create_in a HS.root /// Instantiate function inline_for_extraction noextract val mk_instantiate: #a:supported_alg -> EverCrypt.HMAC.compute_st a -> instantiate_st a let mk_instantiate #a hmac st personalization_string personalization_string_len = if personalization_string_len >. max_personalization_string_length then false else let entropy_input_len = min_length a in let nonce_len = min_length a /. 2ul in let min_entropy = entropy_input_len +! nonce_len in push_frame(); assert_norm (range (v min_entropy) U32); let entropy = B.alloca (u8 0) min_entropy in let ok = randombytes entropy min_entropy in let result = if not ok then false else begin let entropy_input = B.sub entropy 0ul entropy_input_len in let nonce = B.sub entropy entropy_input_len nonce_len in S.hmac_input_bound a; let st_s = !*st in mk_instantiate hmac (p st_s) entropy_input_len entropy_input nonce_len nonce personalization_string_len personalization_string; true end in pop_frame(); result (** @type: true *) val instantiate_sha1 : instantiate_st SHA1 (** @type: true *) val instantiate_sha2_256: instantiate_st SHA2_256 (** @type: true *) val instantiate_sha2_384: instantiate_st SHA2_384 (** @type: true *) val instantiate_sha2_512: instantiate_st SHA2_512 let instantiate_sha1 = mk_instantiate EverCrypt.HMAC.compute_sha1 let instantiate_sha2_256 = mk_instantiate EverCrypt.HMAC.compute_sha2_256 let instantiate_sha2_384 = mk_instantiate EverCrypt.HMAC.compute_sha2_384 let instantiate_sha2_512 = mk_instantiate EverCrypt.HMAC.compute_sha2_512 /// Reseed function inline_for_extraction noextract val mk_reseed: #a:supported_alg -> EverCrypt.HMAC.compute_st a -> reseed_st a let mk_reseed #a hmac st additional_input additional_input_len = if additional_input_len >. max_additional_input_length then false else let entropy_input_len = min_length a in push_frame(); let entropy_input = B.alloca (u8 0) entropy_input_len in let ok = randombytes entropy_input entropy_input_len in let result = if not ok then false else begin S.hmac_input_bound a; let st_s = !*st in mk_reseed hmac (p st_s) entropy_input_len entropy_input additional_input_len additional_input; true end in pop_frame(); result (** @type: true *) val reseed_sha1 : reseed_st SHA1 (** @type: true *) val reseed_sha2_256: reseed_st SHA2_256 (** @type: true *) val reseed_sha2_384: reseed_st SHA2_384 (** @type: true *) val reseed_sha2_512: reseed_st SHA2_512 let reseed_sha1 = mk_reseed EverCrypt.HMAC.compute_sha1 let reseed_sha2_256 = mk_reseed EverCrypt.HMAC.compute_sha2_256 let reseed_sha2_384 = mk_reseed EverCrypt.HMAC.compute_sha2_384 let reseed_sha2_512 = mk_reseed EverCrypt.HMAC.compute_sha2_512 /// Generate function inline_for_extraction noextract val mk_generate: #a:supported_alg -> EverCrypt.HMAC.compute_st a -> generate_st a let mk_generate #a hmac output st n additional_input additional_input_len = if additional_input_len >. max_additional_input_length || n >. max_output_length then false else ( push_frame(); let ok = mk_reseed hmac st additional_input additional_input_len in let result = if not ok then false else begin let st_s = !*st in let b = mk_generate hmac output (p st_s) n additional_input_len additional_input in b (* This used to be true, which is fishy *) end in let h1 = get () in pop_frame(); let h2 = get () in frame_invariant (B.loc_all_regions_from false (HS.get_tip h1)) st h1 h2; result ) (** @type: true *) val generate_sha1 : generate_st SHA1 (** @type: true *) val generate_sha2_256: generate_st SHA2_256 (** @type: true *) val generate_sha2_384: generate_st SHA2_384 (** @type: true *) val generate_sha2_512: generate_st SHA2_512
{ "checked_file": "/", "dependencies": [ "Spec.HMAC_DRBG.fsti.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "LowStar.BufferOps.fst.checked", "LowStar.Buffer.fst.checked", "Lib.RandomBuffer.System.fsti.checked", "Lib.Memzero0.fsti.checked", "Lib.IntTypes.fsti.checked", "Hacl.HMAC_DRBG.fst.checked", "Hacl.HMAC_DRBG.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.HyperStack.ST.fsti.checked", "FStar.HyperStack.fst.checked", "EverCrypt.HMAC.fst.checked" ], "interface_file": true, "source_file": "EverCrypt.DRBG.fst" }
[ { "abbrev": false, "full_module": "LowStar.BufferOps", "short_module": null }, { "abbrev": false, "full_module": "Lib.RandomBuffer.System", "short_module": null }, { "abbrev": false, "full_module": "Hacl.HMAC_DRBG", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
EverCrypt.DRBG.generate_st Spec.Hash.Definitions.SHA1
Prims.Tot
[ "total" ]
[]
[ "EverCrypt.DRBG.mk_generate", "Spec.Hash.Definitions.SHA1", "EverCrypt.HMAC.compute_sha1" ]
[]
false
false
false
true
false
let generate_sha1 =
mk_generate EverCrypt.HMAC.compute_sha1
false
EverCrypt.DRBG.fst
EverCrypt.DRBG.reseed_sha1
val reseed_sha1 : reseed_st SHA1
val reseed_sha1 : reseed_st SHA1
let reseed_sha1 = mk_reseed EverCrypt.HMAC.compute_sha1
{ "file_name": "providers/evercrypt/fst/EverCrypt.DRBG.fst", "git_rev": "eb1badfa34c70b0bbe0fe24fe0f49fb1295c7872", "git_url": "https://github.com/project-everest/hacl-star.git", "project_name": "hacl-star" }
{ "end_col": 59, "end_line": 192, "start_col": 0, "start_line": 192 }
module EverCrypt.DRBG open FStar.HyperStack.ST open Lib.IntTypes open Spec.Hash.Definitions module HS = FStar.HyperStack module B = LowStar.Buffer module S = Spec.HMAC_DRBG open Hacl.HMAC_DRBG open Lib.RandomBuffer.System open LowStar.BufferOps friend Hacl.HMAC_DRBG friend EverCrypt.HMAC #set-options "--max_ifuel 0 --max_fuel 0 --z3rlimit 50" /// Some duplication from Hacl.HMAC_DRBG because we don't want clients to depend on it /// /// Respects EverCrypt convention and reverses order of buf_len, buf arguments [@CAbstractStruct] noeq type state_s: supported_alg -> Type0 = | SHA1_s : state SHA1 -> state_s SHA1 | SHA2_256_s: state SHA2_256 -> state_s SHA2_256 | SHA2_384_s: state SHA2_384 -> state_s SHA2_384 | SHA2_512_s: state SHA2_512 -> state_s SHA2_512 let invert_state_s (a:supported_alg): Lemma (requires True) (ensures inversion (state_s a)) [ SMTPat (state_s a) ] = allow_inversion (state_s a) /// Only call this function in extracted code with a known `a` inline_for_extraction noextract let p #a (s:state_s a) : Hacl.HMAC_DRBG.state a = match a with | SHA1 -> let SHA1_s p = s in p | SHA2_256 -> let SHA2_256_s p = s in p | SHA2_384 -> let SHA2_384_s p = s in p | SHA2_512 -> let SHA2_512_s p = s in p let freeable_s #a st = freeable (p st) let footprint_s #a st = footprint (p st) let invariant_s #a st h = invariant (p st) h let repr #a st h = let st = B.get h st 0 in repr (p st) h let loc_includes_union_l_footprint_s #a l1 l2 st = B.loc_includes_union_l l1 l2 (footprint_s st) let invariant_loc_in_footprint #a st m = () let frame_invariant #a l st h0 h1 = () /// State allocation // Would like to specialize alloca in each branch, but two calls to StackInline // functions in the same block lead to variable redefinitions at extraction. let alloca a = let st = match a with | SHA1 -> SHA1_s (alloca a) | SHA2_256 -> SHA2_256_s (alloca a) | SHA2_384 -> SHA2_384_s (alloca a) | SHA2_512 -> SHA2_512_s (alloca a) in B.alloca st 1ul let create_in a r = let st = match a with | SHA1 -> SHA1_s (create_in SHA1 r) | SHA2_256 -> SHA2_256_s (create_in SHA2_256 r) | SHA2_384 -> SHA2_384_s (create_in SHA2_384 r) | SHA2_512 -> SHA2_512_s (create_in SHA2_512 r) in B.malloc r st 1ul let create a = create_in a HS.root /// Instantiate function inline_for_extraction noextract val mk_instantiate: #a:supported_alg -> EverCrypt.HMAC.compute_st a -> instantiate_st a let mk_instantiate #a hmac st personalization_string personalization_string_len = if personalization_string_len >. max_personalization_string_length then false else let entropy_input_len = min_length a in let nonce_len = min_length a /. 2ul in let min_entropy = entropy_input_len +! nonce_len in push_frame(); assert_norm (range (v min_entropy) U32); let entropy = B.alloca (u8 0) min_entropy in let ok = randombytes entropy min_entropy in let result = if not ok then false else begin let entropy_input = B.sub entropy 0ul entropy_input_len in let nonce = B.sub entropy entropy_input_len nonce_len in S.hmac_input_bound a; let st_s = !*st in mk_instantiate hmac (p st_s) entropy_input_len entropy_input nonce_len nonce personalization_string_len personalization_string; true end in pop_frame(); result (** @type: true *) val instantiate_sha1 : instantiate_st SHA1 (** @type: true *) val instantiate_sha2_256: instantiate_st SHA2_256 (** @type: true *) val instantiate_sha2_384: instantiate_st SHA2_384 (** @type: true *) val instantiate_sha2_512: instantiate_st SHA2_512 let instantiate_sha1 = mk_instantiate EverCrypt.HMAC.compute_sha1 let instantiate_sha2_256 = mk_instantiate EverCrypt.HMAC.compute_sha2_256 let instantiate_sha2_384 = mk_instantiate EverCrypt.HMAC.compute_sha2_384 let instantiate_sha2_512 = mk_instantiate EverCrypt.HMAC.compute_sha2_512 /// Reseed function inline_for_extraction noextract val mk_reseed: #a:supported_alg -> EverCrypt.HMAC.compute_st a -> reseed_st a let mk_reseed #a hmac st additional_input additional_input_len = if additional_input_len >. max_additional_input_length then false else let entropy_input_len = min_length a in push_frame(); let entropy_input = B.alloca (u8 0) entropy_input_len in let ok = randombytes entropy_input entropy_input_len in let result = if not ok then false else begin S.hmac_input_bound a; let st_s = !*st in mk_reseed hmac (p st_s) entropy_input_len entropy_input additional_input_len additional_input; true end in pop_frame(); result (** @type: true *) val reseed_sha1 : reseed_st SHA1 (** @type: true *) val reseed_sha2_256: reseed_st SHA2_256 (** @type: true *) val reseed_sha2_384: reseed_st SHA2_384 (** @type: true *) val reseed_sha2_512: reseed_st SHA2_512
{ "checked_file": "/", "dependencies": [ "Spec.HMAC_DRBG.fsti.checked", "Spec.Hash.Definitions.fst.checked", "prims.fst.checked", "LowStar.BufferOps.fst.checked", "LowStar.Buffer.fst.checked", "Lib.RandomBuffer.System.fsti.checked", "Lib.Memzero0.fsti.checked", "Lib.IntTypes.fsti.checked", "Hacl.HMAC_DRBG.fst.checked", "Hacl.HMAC_DRBG.fst.checked", "FStar.UInt32.fsti.checked", "FStar.Pervasives.fsti.checked", "FStar.HyperStack.ST.fsti.checked", "FStar.HyperStack.fst.checked", "EverCrypt.HMAC.fst.checked" ], "interface_file": true, "source_file": "EverCrypt.DRBG.fst" }
[ { "abbrev": false, "full_module": "LowStar.BufferOps", "short_module": null }, { "abbrev": false, "full_module": "Lib.RandomBuffer.System", "short_module": null }, { "abbrev": false, "full_module": "Hacl.HMAC_DRBG", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": true, "full_module": "Spec.HMAC_DRBG", "short_module": "S" }, { "abbrev": true, "full_module": "LowStar.Buffer", "short_module": "B" }, { "abbrev": true, "full_module": "FStar.HyperStack", "short_module": "HS" }, { "abbrev": false, "full_module": "Spec.Hash.Definitions", "short_module": null }, { "abbrev": false, "full_module": "Lib.IntTypes", "short_module": null }, { "abbrev": false, "full_module": "FStar.HyperStack.ST", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "EverCrypt", "short_module": null }, { "abbrev": false, "full_module": "FStar.Pervasives", "short_module": null }, { "abbrev": false, "full_module": "Prims", "short_module": null }, { "abbrev": false, "full_module": "FStar", "short_module": null } ]
{ "detail_errors": false, "detail_hint_replay": false, "initial_fuel": 2, "initial_ifuel": 1, "max_fuel": 0, "max_ifuel": 0, "no_plugins": false, "no_smt": false, "no_tactics": false, "quake_hi": 1, "quake_keep": false, "quake_lo": 1, "retry": false, "reuse_hint_for": null, "smtencoding_elim_box": false, "smtencoding_l_arith_repr": "boxwrap", "smtencoding_nl_arith_repr": "boxwrap", "smtencoding_valid_elim": false, "smtencoding_valid_intro": true, "tcnorm": true, "trivial_pre_for_unannotated_effectful_fns": false, "z3cliopt": [], "z3refresh": false, "z3rlimit": 50, "z3rlimit_factor": 1, "z3seed": 0, "z3smtopt": [], "z3version": "4.8.5" }
false
EverCrypt.DRBG.reseed_st Spec.Hash.Definitions.SHA1
Prims.Tot
[ "total" ]
[]
[ "EverCrypt.DRBG.mk_reseed", "Spec.Hash.Definitions.SHA1", "EverCrypt.HMAC.compute_sha1" ]
[]
false
false
false
true
false
let reseed_sha1 =
mk_reseed EverCrypt.HMAC.compute_sha1
false