Skip to content

Commit

Permalink
merge with master
Browse files Browse the repository at this point in the history
  • Loading branch information
jcp19 committed Jul 12, 2024
2 parents 81ca970 + 01387ec commit 689222a
Show file tree
Hide file tree
Showing 9 changed files with 245 additions and 144 deletions.
7 changes: 6 additions & 1 deletion .github/workflows/gobra.yml
Original file line number Diff line number Diff line change
Expand Up @@ -415,7 +415,12 @@ jobs:
includePaths: ${{ env.includePaths }}
assumeInjectivityOnInhale: ${{ env.assumeInjectivityOnInhale }}
checkConsistency: ${{ env.checkConsistency }}
chop: 10
# Due to a bug introduced in https:/viperproject/gobra/pull/776,
# we must currently disable the chopper, otherwise we well-founded orders
# for termination checking are not available at the chopped Viper parts.
# We should reenable it whenever possible, as it reduces verification time in
# ~8 min (20%).
# chop: 10
parallelizeBranches: '1'
conditionalizePermissions: '1'
moreJoins: 'impure'
Expand Down
64 changes: 26 additions & 38 deletions pkg/slayers/path/scion/base_spec.gobra
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,7 @@ decreases
pure func (b Base) ValidCurrFieldsSpec() bool {
return 0 <= b.NumINF && b.NumINF <= MaxINFs &&
0 <= b.NumHops && b.NumHops <= MaxHops &&
b.ValidCurrInfSpec() &&
b.ValidCurrHfSpec()
b.ValidCurrInfSpec() && b.ValidCurrHfSpec()
}

// A `Base` is weakly valid when the fields `NumINF` and `NumHops` are,
Expand All @@ -71,29 +70,29 @@ pure func (b Base) WeaklyValid() bool {
return 0 <= b.NumINF && b.NumINF <= MaxINFs &&
0 <= b.NumHops && b.NumHops <= MaxHops &&
(0 < b.NumINF ==> 0 < b.NumHops) &&
b.PathMeta.InBounds() &&
b.NumsCompatibleWithSegLen()
b.PathMeta.InBounds() && b.NumsCompatibleWithSegLen()
}

// A `Base` is strongly valid iff it is weakly valid and its `CurrHF`
// and `CurrINF` are within bounds.
// A `Base` is valid (a.k.a fully valid) iff it is weakly valid
// and its `CurrHF` and `CurrINF` are within bounds, its `CurrHF`
// is compatible with its `CurrINF`, and there are no singleton
// segments. In the past, there used to be another validity
// criteria, stronger than WeaklyValid and weaker than FullyValid.
// This was known as StronglyValid and has been derprecated.
ghost
decreases
pure func (b Base) StronglyValid() bool {
return b.WeaklyValid() && b.ValidCurrFieldsSpec()
pure func (b Base) Valid() bool {
return b.WeaklyValid() &&
b.ValidCurrFieldsSpec() &&
b.CurrInfMatchesCurrHFSpec() &&
b.PathMeta.SegLen[0] != 1 &&
b.PathMeta.SegLen[1] != 1 &&
b.PathMeta.SegLen[2] != 1
}

// A `Base` is fully valid iff it is strongly valid and its `CurrHF` is
// compatible with its `CurrINF`.
ghost
decreases
pure func (b Base) FullyValid() bool {
return b.StronglyValid() && b.CurrInfMatchesCurrHF()
}

ghost
decreases
pure func (b Base) CurrInfMatchesCurrHF() bool {
pure func (b Base) CurrInfMatchesCurrHFSpec() bool {
return b.PathMeta.CurrINF == b.InfForHfSpec(b.PathMeta.CurrHF)
}

Expand Down Expand Up @@ -167,7 +166,7 @@ ghost
requires s.NumINF != 0
requires int(s.PathMeta.CurrHF) < s.NumHops-1
ensures s.WeaklyValid() ==> res.WeaklyValid()
ensures s.StronglyValid() ==> res.StronglyValid()
ensures s.Valid() ==> res.Valid()
decreases
pure func (s Base) IncPathSpec() (res Base) {
return Base {
Expand All @@ -177,6 +176,12 @@ pure func (s Base) IncPathSpec() (res Base) {
}
}

ghost
requires s.Valid()
ensures s.IsXoverSpec() ==> !s.IncPathSpec().IsXoverSpec()
decreases
func (s Base) NotIsXoverAfterIncPath() {}

ghost
decreases
pure func (b Base) ReverseSpec() Base {
Expand Down Expand Up @@ -208,12 +213,10 @@ pure func (b Base) ReverseSegLen() [3]uint8 {
}

ghost
requires b.StronglyValid()
ensures b.ReverseSpec().StronglyValid()
requires b.Valid()
ensures b.ReverseSpec().Valid()
decreases
pure func (b Base) ReversingBaseStronglyValidSegLenHasValidSegLen() Lemma {
return Lemma{}
}
func (b Base) ReversingBaseValidSegLenHasValidSegLen() { }

ghost
requires b.Mem()
Expand Down Expand Up @@ -321,21 +324,6 @@ pure func (s MetaHdr) EqAbsHeader(ub []byte) bool {
s == DecodedFrom(binary.BigEndian.Uint32(ub[:MetaLen]))
}

ghost
opaque
requires MetaLen <= idx && idx <= len(ub)
requires acc(sl.Bytes(ub, 0, len(ub)), R55)
requires acc(sl.Bytes(ub[:idx], 0, idx), R55)
ensures s.EqAbsHeader(ub) == s.EqAbsHeader(ub[:idx])
decreases
pure func (s MetaHdr) EqAbsHeaderForSublice(ub []byte, idx int) Lemma {
return let _ := Asserting(ub[:MetaLen] === ub[:idx][:MetaLen]) in
unfolding acc(sl.Bytes(ub, 0, len(ub)), R56) in
unfolding acc(sl.Bytes(ub[:idx], 0, idx), R56) in
let _ := Asserting(s.EqAbsHeader(ub) == (s == DecodedFrom(binary.BigEndian.Uint32(ub[:MetaLen])))) in
Lemma{}
}

/** Lemma proven in /VerifiedSCION/verification/utils/bitwise/proofs.dfy **/
ghost
requires m.InBounds()
Expand Down
8 changes: 4 additions & 4 deletions pkg/slayers/path/scion/decoded.go
Original file line number Diff line number Diff line change
Expand Up @@ -213,11 +213,11 @@ func (s *Decoded) SerializeTo(b []byte /*@, ghost ubuf []byte @*/) (r error) {
// @ p.Mem(ubuf) &&
// @ p == s &&
// @ typeOf(p) == type[*Decoded] &&
// @ (old(s.GetBase(ubuf).StronglyValid()) ==> s.GetBase(ubuf).StronglyValid()))
// @ (old(s.GetBase(ubuf).Valid()) ==> s.GetBase(ubuf).Valid()))
// @ ensures r != nil ==> r.ErrorMem() && s.Mem(ubuf)
// @ decreases
func (s *Decoded) Reverse( /*@ ghost ubuf []byte @*/ ) (p path.Path, r error) {
//@ ghost isValid := s.GetBase(ubuf).StronglyValid()
//@ ghost isValid := s.GetBase(ubuf).Valid()
//@ ghost base := s.GetBase(ubuf)
//@ ghost metaHdrAferReversingSegLen := MetaHdr {
//@ CurrINF: base.PathMeta.CurrINF,
Expand Down Expand Up @@ -282,8 +282,8 @@ func (s *Decoded) Reverse( /*@ ghost ubuf []byte @*/ ) (p path.Path, r error) {
s.PathMeta.CurrINF = uint8(s.NumINF) - s.PathMeta.CurrINF - 1
s.PathMeta.CurrHF = uint8(s.NumHops) - s.PathMeta.CurrHF - 1
//@ assert s.Base == base.ReverseSpec()
//@ ghost if isValid { base.ReversingBaseStronglyValidSegLenHasValidSegLen() }
//@ assert isValid ==> s.Base.StronglyValid()
//@ ghost if isValid { base.ReversingBaseValidSegLenHasValidSegLen() }
//@ assert isValid ==> s.Base.Valid()
//@ fold s.Base.Mem()
//@ fold s.Mem(ubuf)
return s, nil
Expand Down
2 changes: 1 addition & 1 deletion pkg/slayers/path/scion/decoded_spec.gobra
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ requires d.Mem(ubuf)
ensures e == nil ==> (
d.Mem(ubuf) &&
d.LenSpec(ubuf) == old(d.LenSpec(ubuf)) &&
(old(d.GetBase(ubuf).StronglyValid()) ==> d.GetBase(ubuf).StronglyValid()))
(old(d.GetBase(ubuf).Valid()) ==> d.GetBase(ubuf).Valid()))
ensures e != nil ==> d.NonInitMem() && e.ErrorMem()
decreases
func (d *Decoded) IncPath(ghost ubuf []byte) (e error) {
Expand Down
40 changes: 26 additions & 14 deletions pkg/slayers/path/scion/raw.go
Original file line number Diff line number Diff line change
Expand Up @@ -139,17 +139,17 @@ func (s *Raw) Reverse( /*@ ghost ubuf []byte @*/ ) (p path.Path, err error) {
// @ ensures err == nil ==> (
// @ let newUb := s.RawBufferMem(ubuf) in
// @ d.Mem(newUb) &&
// @ (old(s.GetBase(ubuf).StronglyValid()) ==> d.GetBase(newUb).StronglyValid()))
// @ (old(s.GetBase(ubuf).Valid()) ==> d.GetBase(newUb).Valid()))
// @ ensures err != nil ==> err.ErrorMem()
// @ decreases
func (s *Raw) ToDecoded( /*@ ghost ubuf []byte @*/ ) (d *Decoded, err error) {
//@ unfold acc(s.Mem(ubuf), R6)
//@ unfold acc(s.Base.Mem(), R6)
//@ ghost var base Base = s.Base
//@ ghost var pathMeta MetaHdr = s.Base.PathMeta
//@ ghost validIdxs := s.GetBase(ubuf).StronglyValid()
//@ ghost validIdxs := s.GetBase(ubuf).Valid()
//@ assert validIdxs ==> s.Base.PathMeta.InBounds()
//@ assert validIdxs ==> base.StronglyValid()
//@ assert validIdxs ==> base.Valid()
//@ assert s.Raw[:MetaLen] === ubuf[:MetaLen]

// (VerifiedSCION) In this method, many slice operations are done in two
Expand Down Expand Up @@ -207,7 +207,7 @@ func (s *Raw) ToDecoded( /*@ ghost ubuf []byte @*/ ) (d *Decoded, err error) {
//@ ghost if validIdxs {
//@ s.PathMeta.SerializeAndDeserializeLemma(b0, b1, b2, b3)
//@ assert pathMeta == decoded.GetMetaHdr(s.Raw)
//@ assert decoded.GetBase(s.Raw).StronglyValid()
//@ assert decoded.GetBase(s.Raw).Valid()
//@ }
//@ sl.Unslice_Bytes(ubuf, 0, len(s.Raw), HalfPerm)
//@ sl.Unslice_Bytes(ubuf, 0, len(s.Raw), HalfPerm)
Expand Down Expand Up @@ -265,17 +265,16 @@ func (s *Raw) IncPath( /*@ ghost ubuf []byte @*/ ) (r error) {
//@ tail := ubuf[MetaLen:]
//@ unfold acc(sl.Bytes(tail, 0, len(tail)), R50)
//@ oldHfIdxSeg := oldCurrHfIdx-oldPrevSegLen
//@ WidenCurrSeg(ubuf, oldOffset + MetaLen, oldCurrInfIdx, oldHfIdxSeg,
//@ oldSegLen, MetaLen, MetaLen, len(ubuf))
//@ WidenLeftSeg(ubuf, oldCurrInfIdx + 1, oldSegs, MetaLen, MetaLen, len(ubuf))
//@ WidenMidSeg(ubuf, oldCurrInfIdx + 2, oldSegs, MetaLen, MetaLen, len(ubuf))
//@ WidenRightSeg(ubuf, oldCurrInfIdx - 1, oldSegs, MetaLen, MetaLen, len(ubuf))
//@ LenCurrSeg(tail, oldOffset, oldCurrInfIdx, oldHfIdxSeg, oldSegLen)
//@ WidenCurrSeg(ubuf, oldOffset + MetaLen, oldCurrInfIdx, oldHfIdxSeg, oldSegLen, MetaLen, MetaLen, len(ubuf))
//@ WidenLeftSeg(ubuf, oldCurrInfIdx + 1, oldSegs, MetaLen, MetaLen, len(ubuf))
//@ WidenMidSeg(ubuf, oldCurrInfIdx + 2, oldSegs, MetaLen, MetaLen, len(ubuf))
//@ WidenRightSeg(ubuf, oldCurrInfIdx - 1, oldSegs, MetaLen, MetaLen, len(ubuf))
//@ LenCurrSeg(tail, oldOffset, oldCurrInfIdx, oldHfIdxSeg, oldSegLen)
//@ oldAbsPkt := reveal s.absPkt(ubuf)
//@ sl.SplitRange_Bytes(ubuf, 0, MetaLen, HalfPerm)
//@ unfold acc(s.Base.Mem(), R2)
err := s.PathMeta.SerializeTo(s.Raw[:MetaLen])
//@ assert s.Base.StronglyValid()
//@ assert s.Base.Valid()
//@ assert s.PathMeta.InBounds()
//@ v := s.Raw[:MetaLen]
//@ b0 := sl.GetByte(v, 0, MetaLen, 0)
Expand All @@ -284,7 +283,7 @@ func (s *Raw) IncPath( /*@ ghost ubuf []byte @*/ ) (r error) {
//@ b3 := sl.GetByte(v, 0, MetaLen, 3)
//@ s.PathMeta.SerializeAndDeserializeLemma(b0, b1, b2, b3)
//@ assert s.PathMeta.EqAbsHeader(v)
//@ assert RawBytesToBase(v).StronglyValid()
//@ assert RawBytesToBase(v).Valid()
//@ sl.CombineRange_Bytes(ubuf, 0, MetaLen, HalfPerm)
//@ ValidPktMetaHdrSublice(ubuf, MetaLen)
//@ assert s.EqAbsHeader(ubuf) == s.PathMeta.EqAbsHeader(ubuf)
Expand Down Expand Up @@ -441,7 +440,7 @@ func (s *Raw) SetInfoField(info path.InfoField, idx int /*@, ghost ubuf []byte @
//@ ghost if idx == currInfIdx {
//@ CurrSegEquality(ubuf, offset, currInfIdx, hfIdxSeg, segLen)
//@ LeftSegEquality(ubuf, currInfIdx+1, segLens)
//@ MidSegEquality(ubuf, currInfIdx+2, segLens)
//@ MidSegEquality(ubuf, currInfIdx+2, segLens)
//@ RightSegEquality(ubuf, currInfIdx-1, segLens)
//@ }
//@ reveal s.absPkt(ubuf)
Expand Down Expand Up @@ -582,7 +581,7 @@ func (s *Raw) IsPenultimateHop( /*@ ghost ubuf []byte @*/ ) bool {

// IsLastHop returns whether the current hop is the last hop on the path.
// @ preserves acc(s.Mem(ubuf), R40)
// @ ensures res == s.IsLastHopSpec(ubuf)
// @ ensures res == s.IsLastHopSpec(ubuf)
// @ decreases
func (s *Raw) IsLastHop( /*@ ghost ubuf []byte @*/ ) (res bool) {
//@ unfold acc(s.Mem(ubuf), R40)
Expand All @@ -591,3 +590,16 @@ func (s *Raw) IsLastHop( /*@ ghost ubuf []byte @*/ ) (res bool) {
//@ defer fold acc(s.Base.Mem(), R40)
return int(s.PathMeta.CurrHF) == (s.NumHops - 1)
}

// CurrINFMatchesCurrHF returns whether the the path's current hopfield
// is in the path's current segment.
// @ preserves acc(s.Mem(ub), R40)
// @ ensures res == s.GetBase(ub).CurrInfMatchesCurrHFSpec()
// @ decreases
func (s *Raw) CurrINFMatchesCurrHF( /*@ ghost ub []byte @*/ ) (res bool) {
// @ unfold acc(s.Mem(ub), R40)
// @ defer fold acc(s.Mem(ub), R40)
// @ unfold acc(s.Base.Mem(), R40)
// @ defer fold acc(s.Base.Mem(), R40)
return s.PathMeta.CurrINF == s.infIndexForHF(s.PathMeta.CurrHF)
}
7 changes: 4 additions & 3 deletions pkg/slayers/path/scion/raw_spec.gobra
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,8 @@ ghost
requires acc(r.Mem(ub), _)
decreases
pure func (r *Raw) GetCurrHF(ghost ub []byte) uint8 {
return unfolding acc(r.Mem(ub), _) in (unfolding acc(r.Base.Mem(), _) in r.PathMeta.CurrHF)
return unfolding acc(r.Mem(ub), _) in
(unfolding acc(r.Base.Mem(), _) in r.PathMeta.CurrHF)
}

ghost
Expand Down Expand Up @@ -437,7 +438,7 @@ pure func validPktMetaHdr(raw []byte) bool {
let segs := io.CombineSegLens(seg1, seg2, seg3) in
let base := RawBytesToBase(raw) in
0 < metaHdr.SegLen[0] &&
base.FullyValid() &&
base.Valid() &&
PktLen(segs, MetaLen) <= len(raw)
}

Expand All @@ -462,7 +463,7 @@ func ValidPktMetaHdrSublice(raw []byte, idx int) {
ghost
requires acc(s.Mem(ub), R54)
requires acc(sl.Bytes(ub, 0, len(ub)), R55)
requires s.GetBase(ub).FullyValid()
requires s.GetBase(ub).Valid()
requires s.GetBase(ub).EqAbsHeader(ub)
ensures acc(sl.Bytes(ub, 0, len(ub)), R55)
ensures acc(s.Mem(ub), R54)
Expand Down
21 changes: 18 additions & 3 deletions pkg/slayers/scion_spec.gobra
Original file line number Diff line number Diff line change
Expand Up @@ -189,9 +189,9 @@ pure func (s *SCION) ValidPathMetaData(ghost ub []byte) bool {
return unfolding acc(s.Mem(ub), _) in
let ubPath := s.UBPath(ub) in
(typeOf(s.Path) == type[*scion.Raw] ==>
s.Path.(*scion.Raw).GetBase(ubPath).StronglyValid()) &&
s.Path.(*scion.Raw).GetBase(ubPath).Valid()) &&
(typeOf(s.Path) == type[*epic.Path] ==>
s.Path.(*epic.Path).GetBase(ubPath).StronglyValid())
s.Path.(*epic.Path).GetBase(ubPath).Valid())
}

// TODO: simplify the body of the predicate when let expressions
Expand Down Expand Up @@ -470,7 +470,7 @@ pure func ValidPktMetaHdr(raw []byte) bool {
let segs := io.CombineSegLens(seg1, seg2, seg3) in
let base := scion.Base{metaHdr, segs.NumInfoFields(), segs.TotalHops()} in
0 < metaHdr.SegLen[0] &&
base.FullyValid() &&
base.Valid() &&
scion.PktLen(segs, start + scion.MetaLen) <= length
}

Expand All @@ -486,6 +486,21 @@ pure func IsSupportedPkt(raw []byte) bool {
nextHdr != L4SCMP
}

ghost
requires CmnHdrLen <= idx && idx <= len(raw)
preserves acc(sl.Bytes(raw, 0, len(raw)), R55)
preserves acc(sl.Bytes(raw[:idx], 0, idx), R55)
ensures IsSupportedPkt(raw) == IsSupportedPkt(raw[:idx])
decreases
func IsSupportedPktSubslice(raw []byte, idx int) {
unfold acc(sl.Bytes(raw, 0, len(raw)), R56)
unfold acc(sl.Bytes(raw[:idx], 0, idx), R56)
reveal IsSupportedPkt(raw)
reveal IsSupportedPkt(raw[:idx])
fold acc(sl.Bytes(raw, 0, len(raw)), R56)
fold acc(sl.Bytes(raw[:idx], 0, idx), R56)
}

ghost
requires acc(sl.Bytes(ub, 0, len(ub)), _)
requires CmnHdrLen <= len(ub)
Expand Down
Loading

0 comments on commit 689222a

Please sign in to comment.