test_ID
stringlengths 3
3
| test_file
stringlengths 14
119
| ground_truth
stringlengths 70
28.7k
| hints_removed
stringlengths 58
28.7k
|
---|---|---|---|
600 | dafny-synthesis_task_id_606.dfy | method DegreesToRadians(degrees: real) returns (radians: real)
ensures radians == degrees * 3.14159265358979323846 / 180.0
{
radians := degrees * 3.14159265358979323846 / 180.0;
} | method DegreesToRadians(degrees: real) returns (radians: real)
ensures radians == degrees * 3.14159265358979323846 / 180.0
{
radians := degrees * 3.14159265358979323846 / 180.0;
} |
601 | dafny-synthesis_task_id_61.dfy | predicate IsDigit(c: char)
{
48 <= c as int <= 57
}
method CountSubstringsWithSumOfDigitsEqualToLength(s: string) returns (count: int)
ensures count >= 0
{
count := 0;
for i := 0 to |s|
invariant 0 <= i <= |s|
{
var sum := 0;
for j := i to |s|
invariant i <= j <= |s|
invariant sum >= 0
invariant sum <= j - i
{
if j == |s| || !IsDigit(s[j]) {
if sum == j - i {
count := count + 1;
}
break;
}
sum := sum + (s[j] as int - 48);
if sum > j - i + 1 {
break;
}
}
}
}
| predicate IsDigit(c: char)
{
48 <= c as int <= 57
}
method CountSubstringsWithSumOfDigitsEqualToLength(s: string) returns (count: int)
ensures count >= 0
{
count := 0;
for i := 0 to |s|
{
var sum := 0;
for j := i to |s|
{
if j == |s| || !IsDigit(s[j]) {
if sum == j - i {
count := count + 1;
}
break;
}
sum := sum + (s[j] as int - 48);
if sum > j - i + 1 {
break;
}
}
}
}
|
602 | dafny-synthesis_task_id_610.dfy | method RemoveElement(s: array<int>, k: int) returns (v: array<int>)
requires 0 <= k < s.Length
ensures v.Length == s.Length - 1
ensures forall i :: 0 <= i < k ==> v[i] == s[i]
ensures forall i :: k <= i < v.Length ==> v[i] == s[i + 1]
{
v := new int[s.Length - 1];
var i := 0;
while i < k
invariant 0 <= i <= k
invariant forall j :: 0 <= j < i ==> v[j] == s[j]
{
v[i] := s[i];
i := i + 1;
}
assert forall i :: 0 <= i < k ==> v[i] == s[i];
while i < v.Length
invariant k <= i <= v.Length
invariant forall j :: k <= j < i ==> v[j] == s[j + 1]
invariant forall i :: 0 <= i < k ==> v[i] == s[i]
{
v[i] := s[i + 1];
i := i + 1;
}
} | method RemoveElement(s: array<int>, k: int) returns (v: array<int>)
requires 0 <= k < s.Length
ensures v.Length == s.Length - 1
ensures forall i :: 0 <= i < k ==> v[i] == s[i]
ensures forall i :: k <= i < v.Length ==> v[i] == s[i + 1]
{
v := new int[s.Length - 1];
var i := 0;
while i < k
{
v[i] := s[i];
i := i + 1;
}
while i < v.Length
{
v[i] := s[i + 1];
i := i + 1;
}
} |
603 | dafny-synthesis_task_id_616.dfy | method ElementWiseModulo(a: array<int>, b: array<int>) returns (result: array<int>)
requires a != null && b != null
requires a.Length == b.Length
requires forall i :: 0 <= i < b.Length ==> b[i] != 0
ensures result != null
ensures result.Length == a.Length
ensures forall i :: 0 <= i < result.Length ==> result[i] == a[i] % b[i]
{
result := new int[a.Length];
var i := 0;
while i < a.Length
invariant 0 <= i <= a.Length
invariant result.Length == a.Length
invariant forall k :: 0 <= k < i ==> result[k] == a[k] % b[k]
{
result[i] := a[i] % b[i];
i := i + 1;
}
} | method ElementWiseModulo(a: array<int>, b: array<int>) returns (result: array<int>)
requires a != null && b != null
requires a.Length == b.Length
requires forall i :: 0 <= i < b.Length ==> b[i] != 0
ensures result != null
ensures result.Length == a.Length
ensures forall i :: 0 <= i < result.Length ==> result[i] == a[i] % b[i]
{
result := new int[a.Length];
var i := 0;
while i < a.Length
{
result[i] := a[i] % b[i];
i := i + 1;
}
} |
604 | dafny-synthesis_task_id_618.dfy | method ElementWiseDivide(a: seq<int>, b: seq<int>) returns (result: seq<int>)
requires |a| == |b|
requires forall i :: 0 <= i < |b| ==> b[i] != 0
ensures |result| == |a|
ensures forall i :: 0 <= i < |result| ==> result[i] == a[i] / b[i]
{
result := [];
for i := 0 to |a|
invariant 0 <= i <= |a|
invariant |result| == i
invariant forall k :: 0 <= k < i ==> result[k] == a[k] / b[k]
{
result := result + [a[i] / b[i]];
}
} | method ElementWiseDivide(a: seq<int>, b: seq<int>) returns (result: seq<int>)
requires |a| == |b|
requires forall i :: 0 <= i < |b| ==> b[i] != 0
ensures |result| == |a|
ensures forall i :: 0 <= i < |result| ==> result[i] == a[i] / b[i]
{
result := [];
for i := 0 to |a|
{
result := result + [a[i] / b[i]];
}
} |
605 | dafny-synthesis_task_id_62.dfy | method FindSmallest(s: array<int>) returns (min: int)
requires s.Length > 0
ensures forall i :: 0 <= i < s.Length ==> min <= s[i]
ensures exists i :: 0 <= i < s.Length && min == s[i]
{
min := s[0];
for i := 1 to s.Length
invariant 0 <= i <= s.Length
invariant forall k :: 0 <= k < i ==> min <= s[k]
invariant exists k :: 0 <= k < i && min == s[k]
{
if s[i] < min
{
min := s[i];
}
}
} | method FindSmallest(s: array<int>) returns (min: int)
requires s.Length > 0
ensures forall i :: 0 <= i < s.Length ==> min <= s[i]
ensures exists i :: 0 <= i < s.Length && min == s[i]
{
min := s[0];
for i := 1 to s.Length
{
if s[i] < min
{
min := s[i];
}
}
} |
606 | dafny-synthesis_task_id_622.dfy | method FindMedian(a: array<int>, b: array<int>) returns (median: int)
requires a != null && b != null
requires a.Length == b.Length
requires a.Length > 0
requires forall i :: 0 <= i < a.Length - 1 ==> a[i] <= a[i + 1]
requires forall i :: 0 <= i < b.Length - 1 ==> b[i] <= b[i + 1]
ensures median == if (a.Length % 2 == 0) then (a[a.Length / 2 - 1] + b[0]) / 2 else a[a.Length / 2]
{
if (a.Length % 2 == 0) {
median := (a[a.Length / 2 - 1] + b[0]) / 2;
} else {
median := a[a.Length / 2];
}
} | method FindMedian(a: array<int>, b: array<int>) returns (median: int)
requires a != null && b != null
requires a.Length == b.Length
requires a.Length > 0
requires forall i :: 0 <= i < a.Length - 1 ==> a[i] <= a[i + 1]
requires forall i :: 0 <= i < b.Length - 1 ==> b[i] <= b[i + 1]
ensures median == if (a.Length % 2 == 0) then (a[a.Length / 2 - 1] + b[0]) / 2 else a[a.Length / 2]
{
if (a.Length % 2 == 0) {
median := (a[a.Length / 2 - 1] + b[0]) / 2;
} else {
median := a[a.Length / 2];
}
} |
607 | dafny-synthesis_task_id_623.dfy | method PowerOfListElements(l: seq<int>, n: int) returns (result: seq<int>)
requires n >= 0
ensures |result| == |l|
ensures forall i :: 0 <= i < |l| ==> result[i] == Power(l[i], n)
{
result := [];
for i := 0 to |l|
invariant 0 <= i <= |l|
invariant |result| == i
invariant forall k :: 0 <= k < i ==> result[k] == Power(l[k], n)
{
result := result + [Power(l[i], n)];
}
}
function Power(base: int, exponent: int): int
requires exponent >= 0
{
if exponent == 0 then 1
else base * Power(base, exponent-1)
} | method PowerOfListElements(l: seq<int>, n: int) returns (result: seq<int>)
requires n >= 0
ensures |result| == |l|
ensures forall i :: 0 <= i < |l| ==> result[i] == Power(l[i], n)
{
result := [];
for i := 0 to |l|
{
result := result + [Power(l[i], n)];
}
}
function Power(base: int, exponent: int): int
requires exponent >= 0
{
if exponent == 0 then 1
else base * Power(base, exponent-1)
} |
608 | dafny-synthesis_task_id_624.dfy | predicate IsLowerCase(c : char)
{
97 <= c as int <= 122
}
predicate IsLowerUpperPair(c : char, C : char)
{
(c as int) == (C as int) + 32
}
function ShiftMinus32(c : char) : char
{
((c as int - 32) % 128) as char
}
method ToUppercase(s: string) returns (v: string)
ensures |v| == |s|
ensures forall i :: 0 <= i < |s| ==> if IsLowerCase(s[i]) then IsLowerUpperPair(s[i], v[i]) else v[i] == s[i]
{
var s' : string := [];
for i := 0 to |s|
invariant 0 <= i <= |s|
invariant |s'| == i
invariant forall k :: 0 <= k < i && IsLowerCase(s[k]) ==> IsLowerUpperPair(s[k], s'[k])
invariant forall k :: 0 <= k < i && !IsLowerCase(s[k]) ==> s[k] == s'[k]
{
if IsLowerCase(s[i])
{
s' := s' + [ShiftMinus32(s[i])];
}
else
{
s' := s' + [s[i]];
}
}
return s';
}
| predicate IsLowerCase(c : char)
{
97 <= c as int <= 122
}
predicate IsLowerUpperPair(c : char, C : char)
{
(c as int) == (C as int) + 32
}
function ShiftMinus32(c : char) : char
{
((c as int - 32) % 128) as char
}
method ToUppercase(s: string) returns (v: string)
ensures |v| == |s|
ensures forall i :: 0 <= i < |s| ==> if IsLowerCase(s[i]) then IsLowerUpperPair(s[i], v[i]) else v[i] == s[i]
{
var s' : string := [];
for i := 0 to |s|
{
if IsLowerCase(s[i])
{
s' := s' + [ShiftMinus32(s[i])];
}
else
{
s' := s' + [s[i]];
}
}
return s';
}
|
609 | dafny-synthesis_task_id_625.dfy | method SwapFirstAndLast(a: array<int>)
requires a.Length > 0
modifies a
ensures a[0] == old(a[a.Length - 1])
ensures a[a.Length - 1] == old(a[0])
ensures forall k :: 1 <= k < a.Length - 1 ==> a[k] == old(a[k])
{
var tmp := a[0];
a[0] := a[a.Length - 1];
a[a.Length - 1] := tmp;
} | method SwapFirstAndLast(a: array<int>)
requires a.Length > 0
modifies a
ensures a[0] == old(a[a.Length - 1])
ensures a[a.Length - 1] == old(a[0])
ensures forall k :: 1 <= k < a.Length - 1 ==> a[k] == old(a[k])
{
var tmp := a[0];
a[0] := a[a.Length - 1];
a[a.Length - 1] := tmp;
} |
610 | dafny-synthesis_task_id_626.dfy | method AreaOfLargestTriangleInSemicircle(radius: int) returns (area: int)
requires radius > 0
ensures area == radius * radius
{
area := radius * radius;
} | method AreaOfLargestTriangleInSemicircle(radius: int) returns (area: int)
requires radius > 0
ensures area == radius * radius
{
area := radius * radius;
} |
611 | dafny-synthesis_task_id_627.dfy | method SmallestMissingNumber(s: seq<int>) returns (v: int)
requires forall i, j :: 0 <= i < j < |s| ==> s[i] <= s[j]
requires forall i :: 0 <= i < |s| ==> s[i] >= 0
ensures 0 <= v
ensures v !in s
ensures forall k :: 0 <= k < v ==> k in s
{
v := 0;
for i := 0 to |s|
invariant 0 <= i <= |s|
invariant 0 <= v <= i
invariant v !in s[..i]
invariant forall k :: 0 <= k < v && s[k] != v ==> k in s[..i]
{
if s[i] > v
{
break;
}
else
{
if s[i] == v
{
v := v + 1;
}
}
}
assert forall k :: 0 <= k < v && s[k] != v ==> k in s;
} | method SmallestMissingNumber(s: seq<int>) returns (v: int)
requires forall i, j :: 0 <= i < j < |s| ==> s[i] <= s[j]
requires forall i :: 0 <= i < |s| ==> s[i] >= 0
ensures 0 <= v
ensures v !in s
ensures forall k :: 0 <= k < v ==> k in s
{
v := 0;
for i := 0 to |s|
{
if s[i] > v
{
break;
}
else
{
if s[i] == v
{
v := v + 1;
}
}
}
} |
612 | dafny-synthesis_task_id_629.dfy | predicate IsEven(n: int)
{
n % 2 == 0
}
method FindEvenNumbers(arr: array<int>) returns (evenList: seq<int>)
// All numbers in the output are even and exist in the input
ensures forall i :: 0 <= i < |evenList| ==> IsEven(evenList[i]) && evenList[i] in arr[..]
// All even numbers in the input are in the output
ensures forall i :: 0 <= i < arr.Length && IsEven(arr[i]) ==> arr[i] in evenList
{
evenList := [];
for i := 0 to arr.Length
invariant 0 <= i <= arr.Length
invariant 0 <= |evenList| <= i
invariant forall k :: 0 <= k < |evenList| ==> IsEven(evenList[k]) && evenList[k] in arr[..]
invariant forall k :: 0 <= k < i && IsEven(arr[k]) ==> arr[k] in evenList
{
if IsEven(arr[i])
{
evenList := evenList + [arr[i]];
}
}
}
| predicate IsEven(n: int)
{
n % 2 == 0
}
method FindEvenNumbers(arr: array<int>) returns (evenList: seq<int>)
// All numbers in the output are even and exist in the input
ensures forall i :: 0 <= i < |evenList| ==> IsEven(evenList[i]) && evenList[i] in arr[..]
// All even numbers in the input are in the output
ensures forall i :: 0 <= i < arr.Length && IsEven(arr[i]) ==> arr[i] in evenList
{
evenList := [];
for i := 0 to arr.Length
{
if IsEven(arr[i])
{
evenList := evenList + [arr[i]];
}
}
}
|
613 | dafny-synthesis_task_id_632.dfy | method MoveZeroesToEnd(arr: array<int>)
requires arr.Length >= 2
modifies arr
// Same size
ensures arr.Length == old(arr.Length)
// Zeros to the right of the first zero
ensures forall i, j :: 0 <= i < j < arr.Length && arr[i] == 0 ==> arr[j] == 0
// The final array is a permutation of the original one
ensures multiset(arr[..]) == multiset(old(arr[..]))
// Relative order of non-zero elements is preserved
ensures forall n, m /* on old array */:: 0 <= n < m < arr.Length && old(arr[n]) != 0 && old(arr[m]) != 0 ==>
exists k, l /* on new array */:: 0 <= k < l < arr.Length && arr[k] == old(arr[n]) && arr[l] == old(arr[m])
//ensures IsOrderPreserved(arr[..], old(arr[..]))
// Number of zeros is preserved
{
var i := 0;
var j := 0;
assert 0 <= i <= arr.Length;
assert forall k :: 0 <= k < arr.Length ==> arr[k] == old(arr[k]);
//assert(forall n, m :: 0 <= n < m < arr.Length ==> arr[n] == old(arr[n]) && arr[m] == old(arr[m]));
while j < arr.Length
invariant 0 <= i <= j <= arr.Length
// Elements to the right of j are unchanged
invariant forall k :: j <= k < arr.Length ==> old(arr[k]) == arr[k]
// Everything to the left of i is non-zero
invariant forall k :: 0 <= k < i ==> arr[k] != 0
// Everything between i and j, but excluding j, is zero
invariant forall k :: i <= k < j ==> arr[k] == 0
// If there there are zeros, they are to the right of i
invariant forall k :: 0 <= k < j && arr[k] == 0 ==> k >= i
// No new numbers are added, up to j
invariant forall k :: 0 <= k < j && arr[k] != old(arr[k]) ==> exists l :: 0 <= l < j && arr[k] == old(arr[l])
// The new array up to j is always a permutation of the original one
invariant multiset(arr[..]) == multiset(old(arr[..]))
// Relative order of non-zero elements is always preserved
//invariant IsOrderPreserved(arr[..], old(arr[..]))
invariant forall n, m /* on old */:: 0 <= n < m < j && old(arr[n]) != 0 && old(arr[m]) != 0 ==>
exists k, l /* on new */:: 0 <= k < l < i && arr[k] == old(arr[n]) && arr[l] == old(arr[m])
{
if arr[j] != 0
{
if i != j
{
assert(arr[j] != 0);
swap(arr, i, j);
assert(forall k :: 0 <= k <= j ==> exists l :: 0 <= l <= j && arr[k] == old(arr[l]));
}
i := i + 1;
}
j := j + 1;
}
assert j == arr.Length;
}
method swap(arr: array<int>, i: int, j: int)
requires arr.Length > 0
requires 0 <= i < arr.Length && 0 <= j < arr.Length
modifies arr
ensures arr[i] == old(arr[j]) && arr[j] == old(arr[i])
ensures forall k :: 0 <= k < arr.Length && k != i && k != j ==> arr[k] == old(arr[k])
ensures multiset(arr[..]) == multiset(old(arr[..]))
{
var tmp := arr[i];
arr[i] := arr[j];
arr[j] := tmp;
}
function count(arr: seq<int>, value: int) : (c: nat)
ensures c <= |arr|
{
if |arr| == 0 then 0 else (if arr[0] == value then 1 else 0) + count(arr[1..], value)
}
| method MoveZeroesToEnd(arr: array<int>)
requires arr.Length >= 2
modifies arr
// Same size
ensures arr.Length == old(arr.Length)
// Zeros to the right of the first zero
ensures forall i, j :: 0 <= i < j < arr.Length && arr[i] == 0 ==> arr[j] == 0
// The final array is a permutation of the original one
ensures multiset(arr[..]) == multiset(old(arr[..]))
// Relative order of non-zero elements is preserved
ensures forall n, m /* on old array */:: 0 <= n < m < arr.Length && old(arr[n]) != 0 && old(arr[m]) != 0 ==>
exists k, l /* on new array */:: 0 <= k < l < arr.Length && arr[k] == old(arr[n]) && arr[l] == old(arr[m])
//ensures IsOrderPreserved(arr[..], old(arr[..]))
// Number of zeros is preserved
{
var i := 0;
var j := 0;
while j < arr.Length
// Elements to the right of j are unchanged
// Everything to the left of i is non-zero
// Everything between i and j, but excluding j, is zero
// If there there are zeros, they are to the right of i
// No new numbers are added, up to j
// The new array up to j is always a permutation of the original one
// Relative order of non-zero elements is always preserved
{
if arr[j] != 0
{
if i != j
{
swap(arr, i, j);
}
i := i + 1;
}
j := j + 1;
}
}
method swap(arr: array<int>, i: int, j: int)
requires arr.Length > 0
requires 0 <= i < arr.Length && 0 <= j < arr.Length
modifies arr
ensures arr[i] == old(arr[j]) && arr[j] == old(arr[i])
ensures forall k :: 0 <= k < arr.Length && k != i && k != j ==> arr[k] == old(arr[k])
ensures multiset(arr[..]) == multiset(old(arr[..]))
{
var tmp := arr[i];
arr[i] := arr[j];
arr[j] := tmp;
}
function count(arr: seq<int>, value: int) : (c: nat)
ensures c <= |arr|
{
if |arr| == 0 then 0 else (if arr[0] == value then 1 else 0) + count(arr[1..], value)
}
|
614 | dafny-synthesis_task_id_637.dfy | method IsBreakEven(costPrice: int, sellingPrice: int) returns (result: bool)
requires costPrice >= 0 && sellingPrice >= 0
ensures result <==> costPrice == sellingPrice
{
result := costPrice == sellingPrice;
} | method IsBreakEven(costPrice: int, sellingPrice: int) returns (result: bool)
requires costPrice >= 0 && sellingPrice >= 0
ensures result <==> costPrice == sellingPrice
{
result := costPrice == sellingPrice;
} |
615 | dafny-synthesis_task_id_641.dfy | method NthNonagonalNumber(n: int) returns (number: int)
requires n >= 0
ensures number == n * (7 * n - 5) / 2
{
number := n * (7 * n - 5) / 2;
} | method NthNonagonalNumber(n: int) returns (number: int)
requires n >= 0
ensures number == n * (7 * n - 5) / 2
{
number := n * (7 * n - 5) / 2;
} |
616 | dafny-synthesis_task_id_644.dfy | method Reverse(a: array<int>)
modifies a;
ensures forall k :: 0 <= k < a.Length ==> a[k] == old(a[(a.Length-1) - k]);
{
var l := a.Length - 1;
var i := 0;
while (i < l-i)
invariant 0 <= i <= (l+1)/2;
invariant forall k :: 0 <= k < i || l-i < k <= l ==> a[k] == old(a[l-k]);
invariant forall k :: i <= k <= l-i ==> a[k] == old(a[k]);
{
a[i], a[l-i] := a[l-i], a[i];
i := i + 1;
}
}
method ReverseUptoK(s: array<int>, k: int)
modifies s
requires 2 <= k <= s.Length
ensures forall i :: 0 <= i < k ==> s[i] == old(s[k - 1 - i])
ensures forall i :: k <= i < s.Length ==> s[i] == old(s[i])
{
var l := k - 1;
var i := 0;
while (i < l-i)
invariant 0 <= i <= (l+1)/2;
invariant forall p :: 0 <= p < i || l-i < p <= l ==> s[p] == old(s[l-p]);
invariant forall p :: i <= p <= l-i ==> s[p] == old(s[p]);
invariant forall p :: k <= p < s.Length ==> s[p] == old(s[p])
{
s[i], s[l-i] := s[l-i], s[i];
i := i + 1;
}
} | method Reverse(a: array<int>)
modifies a;
ensures forall k :: 0 <= k < a.Length ==> a[k] == old(a[(a.Length-1) - k]);
{
var l := a.Length - 1;
var i := 0;
while (i < l-i)
{
a[i], a[l-i] := a[l-i], a[i];
i := i + 1;
}
}
method ReverseUptoK(s: array<int>, k: int)
modifies s
requires 2 <= k <= s.Length
ensures forall i :: 0 <= i < k ==> s[i] == old(s[k - 1 - i])
ensures forall i :: k <= i < s.Length ==> s[i] == old(s[i])
{
var l := k - 1;
var i := 0;
while (i < l-i)
{
s[i], s[l-i] := s[l-i], s[i];
i := i + 1;
}
} |
617 | dafny-synthesis_task_id_69.dfy | method ContainsSequence(list: seq<seq<int>>, sub: seq<int>) returns (result: bool)
ensures result <==> (exists i :: 0 <= i < |list| && sub == list[i])
{
result := false;
for i := 0 to |list|
invariant 0 <= i <= |list|
invariant result <==> (exists k :: 0 <= k < i && sub == list[k])
{
if sub == list[i] {
result := true;
break;
}
}
} | method ContainsSequence(list: seq<seq<int>>, sub: seq<int>) returns (result: bool)
ensures result <==> (exists i :: 0 <= i < |list| && sub == list[i])
{
result := false;
for i := 0 to |list|
{
if sub == list[i] {
result := true;
break;
}
}
} |
618 | dafny-synthesis_task_id_70.dfy | method AllSequencesEqualLength(sequences: seq<seq<int>>) returns (result: bool)
ensures result <==> forall i, j :: 0 <= i < |sequences| && 0 <= j < |sequences| ==> |sequences[i]| == |sequences[j]|
{
if |sequences| == 0 {
return true;
}
var firstLength := |sequences[0]|;
result := true;
for i := 1 to |sequences|
invariant 1 <= i <= |sequences|
invariant result <==> forall k :: 0 <= k < i ==> |sequences[k]| == firstLength
{
if |sequences[i]| != firstLength {
result := false;
break;
}
}
} | method AllSequencesEqualLength(sequences: seq<seq<int>>) returns (result: bool)
ensures result <==> forall i, j :: 0 <= i < |sequences| && 0 <= j < |sequences| ==> |sequences[i]| == |sequences[j]|
{
if |sequences| == 0 {
return true;
}
var firstLength := |sequences[0]|;
result := true;
for i := 1 to |sequences|
{
if |sequences[i]| != firstLength {
result := false;
break;
}
}
} |
619 | dafny-synthesis_task_id_728.dfy | method AddLists(a: seq<int>, b: seq<int>) returns (result: seq<int>)
requires |a| == |b|
ensures |result| == |a|
ensures forall i :: 0 <= i < |result| ==> result[i] == a[i] + b[i]
{
result := [];
for i := 0 to |a|
invariant 0 <= i <= |a|
invariant |result| == i
invariant forall k :: 0 <= k < i ==> result[k] == a[k] + b[k]
{
result := result + [a[i] + b[i]];
}
} | method AddLists(a: seq<int>, b: seq<int>) returns (result: seq<int>)
requires |a| == |b|
ensures |result| == |a|
ensures forall i :: 0 <= i < |result| ==> result[i] == a[i] + b[i]
{
result := [];
for i := 0 to |a|
{
result := result + [a[i] + b[i]];
}
} |
620 | dafny-synthesis_task_id_732.dfy | predicate IsSpaceCommaDot(c: char)
{
c == ' ' || c == ',' || c == '.'
}
method ReplaceWithColon(s: string) returns (v: string)
ensures |v| == |s|
ensures forall i :: 0 <= i < |s| ==> (IsSpaceCommaDot(s[i]) ==> v[i] == ':') && (!IsSpaceCommaDot(s[i]) ==> v[i] == s[i])
{
var s' : string := [];
for i := 0 to |s|
invariant 0 <= i <= |s|
invariant |s'| == i
invariant forall k :: 0 <= k < i ==> (IsSpaceCommaDot(s[k]) ==> s'[k] == ':') && (!IsSpaceCommaDot(s[k]) ==> s'[k] == s[k])
{
if IsSpaceCommaDot(s[i])
{
s' := s' + [':'];
}
else
{
s' := s' + [s[i]];
}
}
return s';
} | predicate IsSpaceCommaDot(c: char)
{
c == ' ' || c == ',' || c == '.'
}
method ReplaceWithColon(s: string) returns (v: string)
ensures |v| == |s|
ensures forall i :: 0 <= i < |s| ==> (IsSpaceCommaDot(s[i]) ==> v[i] == ':') && (!IsSpaceCommaDot(s[i]) ==> v[i] == s[i])
{
var s' : string := [];
for i := 0 to |s|
{
if IsSpaceCommaDot(s[i])
{
s' := s' + [':'];
}
else
{
s' := s' + [s[i]];
}
}
return s';
} |
621 | dafny-synthesis_task_id_733.dfy | method FindFirstOccurrence(arr: array<int>, target: int) returns (index: int)
requires arr != null
requires forall i, j :: 0 <= i < j < arr.Length ==> arr[i] <= arr[j]
ensures 0 <= index < arr.Length ==> arr[index] == target
ensures index == -1 ==> forall i :: 0 <= i < arr.Length ==> arr[i] != target
ensures forall i :: 0 <= i < arr.Length ==> arr[i] == old(arr[i])
{
index := -1;
for i := 0 to arr.Length
invariant 0 <= i <= arr.Length
invariant index == -1 ==> forall k :: 0 <= k < i ==> arr[k] != target
invariant 0 <= index < i ==> arr[index] == target
invariant forall k :: 0 <= k < arr.Length ==> arr[k] == old(arr[k])
{
if arr[i] == target
{
index := i;
break;
}
if arr[i] > target
{
break;
}
}
} | method FindFirstOccurrence(arr: array<int>, target: int) returns (index: int)
requires arr != null
requires forall i, j :: 0 <= i < j < arr.Length ==> arr[i] <= arr[j]
ensures 0 <= index < arr.Length ==> arr[index] == target
ensures index == -1 ==> forall i :: 0 <= i < arr.Length ==> arr[i] != target
ensures forall i :: 0 <= i < arr.Length ==> arr[i] == old(arr[i])
{
index := -1;
for i := 0 to arr.Length
{
if arr[i] == target
{
index := i;
break;
}
if arr[i] > target
{
break;
}
}
} |
622 | dafny-synthesis_task_id_741.dfy | method AllCharactersSame(s: string) returns (result: bool)
ensures result ==> forall i, j :: 0 <= i < |s| && 0 <= j < |s| ==> s[i] == s[j]
ensures !result ==> (|s| > 1) && (exists i, j :: 0 <= i < |s| && 0 <= j < |s| && i != j && s[i] != s[j])
{
if |s| <= 1 {
return true;
}
var firstChar := s[0];
result := true;
for i := 1 to |s|
invariant 0 <= i <= |s|
invariant result ==> forall k :: 0 <= k < i ==> s[k] == firstChar
{
if s[i] != firstChar {
result := false;
break;
}
}
} | method AllCharactersSame(s: string) returns (result: bool)
ensures result ==> forall i, j :: 0 <= i < |s| && 0 <= j < |s| ==> s[i] == s[j]
ensures !result ==> (|s| > 1) && (exists i, j :: 0 <= i < |s| && 0 <= j < |s| && i != j && s[i] != s[j])
{
if |s| <= 1 {
return true;
}
var firstChar := s[0];
result := true;
for i := 1 to |s|
{
if s[i] != firstChar {
result := false;
break;
}
}
} |
623 | dafny-synthesis_task_id_743.dfy | method RotateRight(l: seq<int>, n: int) returns (r: seq<int>)
requires n >= 0
ensures |r| == |l|
ensures forall i :: 0 <= i < |l| ==> r[i] == l[(i - n + |l|) % |l|]
{
var rotated: seq<int> := [];
for i := 0 to |l|
invariant 0 <= i <= |l|
invariant |rotated| == i
invariant forall k :: 0 <= k < i ==> rotated[k] == l[(k - n + |l|) % |l|]
{
rotated := rotated + [l[(i - n + |l|) % |l|]];
}
return rotated;
} | method RotateRight(l: seq<int>, n: int) returns (r: seq<int>)
requires n >= 0
ensures |r| == |l|
ensures forall i :: 0 <= i < |l| ==> r[i] == l[(i - n + |l|) % |l|]
{
var rotated: seq<int> := [];
for i := 0 to |l|
{
rotated := rotated + [l[(i - n + |l|) % |l|]];
}
return rotated;
} |
624 | dafny-synthesis_task_id_750.dfy | method AddTupleToList(l: seq<(int, int)>, t: (int, int)) returns (r: seq<(int, int)>)
ensures |r| == |l| + 1
ensures r[|r| - 1] == t
ensures forall i :: 0 <= i < |l| ==> r[i] == l[i]
{
r := l + [t];
} | method AddTupleToList(l: seq<(int, int)>, t: (int, int)) returns (r: seq<(int, int)>)
ensures |r| == |l| + 1
ensures r[|r| - 1] == t
ensures forall i :: 0 <= i < |l| ==> r[i] == l[i]
{
r := l + [t];
} |
625 | dafny-synthesis_task_id_751.dfy | method IsMinHeap(a: array<int>) returns (result: bool)
requires a != null
ensures result ==> forall i :: 0 <= i < a.Length / 2 ==> a[i] <= a[2*i + 1] && (2*i + 2 == a.Length || a[i] <= a[2*i + 2])
ensures !result ==> exists i :: 0 <= i < a.Length / 2 && (a[i] > a[2*i + 1] || (2*i + 2 != a.Length && a[i] > a[2*i + 2]))
{
result := true;
for i := 0 to a.Length / 2
invariant 0 <= i <= a.Length / 2
invariant result ==> forall k :: 0 <= k < i ==> a[k] <= a[2*k + 1] && (2*k + 2 == a.Length || a[k] <= a[2*k + 2])
{
if a[i] > a[2*i + 1] || (2*i + 2 != a.Length && a[i] > a[2*i + 2]) {
result := false;
break;
}
}
} | method IsMinHeap(a: array<int>) returns (result: bool)
requires a != null
ensures result ==> forall i :: 0 <= i < a.Length / 2 ==> a[i] <= a[2*i + 1] && (2*i + 2 == a.Length || a[i] <= a[2*i + 2])
ensures !result ==> exists i :: 0 <= i < a.Length / 2 && (a[i] > a[2*i + 1] || (2*i + 2 != a.Length && a[i] > a[2*i + 2]))
{
result := true;
for i := 0 to a.Length / 2
{
if a[i] > a[2*i + 1] || (2*i + 2 != a.Length && a[i] > a[2*i + 2]) {
result := false;
break;
}
}
} |
626 | dafny-synthesis_task_id_755.dfy | function MinPair(s: seq<int>) : (r: int)
requires |s| == 2
ensures s[0] <= s[1] <==> r == s[0]
ensures s[0] > s[1] ==> r == s[1]
{
if s[0] <= s[1] then s[0] else s[1]
}
function min(s: seq<int>) : (r: int)
requires |s| >= 2
ensures forall i :: 0 <= i < |s| ==> r <= s[i]
{
if |s| == 2 then MinPair(s)
else MinPair([s[0], min(s[1..])])
}
method SecondSmallest(s: array<int>) returns (secondSmallest: int)
requires s.Length >= 2
// There must be at least 2 different values, a minimum and another one
requires exists i, j :: 0 <= i < s.Length && 0 <= j < s.Length && i != j && s[i] == min(s[..]) && s[j] != s[i]
ensures exists i, j :: 0 <= i < s.Length && 0 <= j < s.Length && i != j && s[i] == min(s[..]) && s[j] == secondSmallest
ensures forall k :: 0 <= k < s.Length && s[k] != min(s[..]) ==> s[k] >= secondSmallest
{
var minIndex := 0;
var secondMinIndex := 1;
if s[1] < s[0] {
minIndex := 1;
secondMinIndex := 0;
}
for i := 2 to s.Length
invariant 0 <= i <= s.Length
invariant 0 <= minIndex < i
invariant 0 <= secondMinIndex < i
invariant minIndex != secondMinIndex
invariant forall k :: 0 <= k < i ==> s[k] >= s[minIndex]
invariant forall k :: 0 <= k < i && k != minIndex ==> s[k] >= s[secondMinIndex]
{
if s[i] < s[minIndex] {
secondMinIndex := minIndex;
minIndex := i;
} else if s[i] < s[secondMinIndex] {
secondMinIndex := i;
}
}
secondSmallest := s[secondMinIndex];
}
| function MinPair(s: seq<int>) : (r: int)
requires |s| == 2
ensures s[0] <= s[1] <==> r == s[0]
ensures s[0] > s[1] ==> r == s[1]
{
if s[0] <= s[1] then s[0] else s[1]
}
function min(s: seq<int>) : (r: int)
requires |s| >= 2
ensures forall i :: 0 <= i < |s| ==> r <= s[i]
{
if |s| == 2 then MinPair(s)
else MinPair([s[0], min(s[1..])])
}
method SecondSmallest(s: array<int>) returns (secondSmallest: int)
requires s.Length >= 2
// There must be at least 2 different values, a minimum and another one
requires exists i, j :: 0 <= i < s.Length && 0 <= j < s.Length && i != j && s[i] == min(s[..]) && s[j] != s[i]
ensures exists i, j :: 0 <= i < s.Length && 0 <= j < s.Length && i != j && s[i] == min(s[..]) && s[j] == secondSmallest
ensures forall k :: 0 <= k < s.Length && s[k] != min(s[..]) ==> s[k] >= secondSmallest
{
var minIndex := 0;
var secondMinIndex := 1;
if s[1] < s[0] {
minIndex := 1;
secondMinIndex := 0;
}
for i := 2 to s.Length
{
if s[i] < s[minIndex] {
secondMinIndex := minIndex;
minIndex := i;
} else if s[i] < s[secondMinIndex] {
secondMinIndex := i;
}
}
secondSmallest := s[secondMinIndex];
}
|
627 | dafny-synthesis_task_id_759.dfy | method IsDecimalWithTwoPrecision(s: string) returns (result: bool)
ensures result ==> (exists i :: 0 <= i < |s| && s[i] == '.' && |s| - i - 1 == 2)
ensures !result ==> !(exists i :: 0 <= i < |s| && s[i] == '.' && |s| - i - 1 == 2)
{
result := false;
for i := 0 to |s|
invariant 0 <= i <= |s|
invariant result <==> (exists k :: 0 <= k < i && s[k] == '.' && |s| - k - 1 == 2)
{
if s[i] == '.' && |s| - i - 1 == 2 {
result := true;
break;
}
}
} | method IsDecimalWithTwoPrecision(s: string) returns (result: bool)
ensures result ==> (exists i :: 0 <= i < |s| && s[i] == '.' && |s| - i - 1 == 2)
ensures !result ==> !(exists i :: 0 <= i < |s| && s[i] == '.' && |s| - i - 1 == 2)
{
result := false;
for i := 0 to |s|
{
if s[i] == '.' && |s| - i - 1 == 2 {
result := true;
break;
}
}
} |
628 | dafny-synthesis_task_id_760.dfy | method HasOnlyOneDistinctElement(a: array<int>) returns (result: bool)
requires a != null
ensures result ==> forall i, j :: 0 <= i < a.Length && 0 <= j < a.Length ==> a[i] == a[j]
ensures !result ==> exists i, j :: 0 <= i < a.Length && 0 <= j < a.Length && a[i] != a[j]
{
if a.Length == 0 {
return true;
}
var firstElement := a[0];
result := true;
for i := 1 to a.Length
invariant 1 <= i <= a.Length
invariant result ==> forall k :: 0 <= k < i ==> a[k] == firstElement
{
if a[i] != firstElement {
result := false;
break;
}
}
} | method HasOnlyOneDistinctElement(a: array<int>) returns (result: bool)
requires a != null
ensures result ==> forall i, j :: 0 <= i < a.Length && 0 <= j < a.Length ==> a[i] == a[j]
ensures !result ==> exists i, j :: 0 <= i < a.Length && 0 <= j < a.Length && a[i] != a[j]
{
if a.Length == 0 {
return true;
}
var firstElement := a[0];
result := true;
for i := 1 to a.Length
{
if a[i] != firstElement {
result := false;
break;
}
}
} |
629 | dafny-synthesis_task_id_762.dfy | method IsMonthWith30Days(month: int) returns (result: bool)
requires 1 <= month <= 12
ensures result <==> month == 4 || month == 6 || month == 9 || month == 11
{
result := month == 4 || month == 6 || month == 9 || month == 11;
} | method IsMonthWith30Days(month: int) returns (result: bool)
requires 1 <= month <= 12
ensures result <==> month == 4 || month == 6 || month == 9 || month == 11
{
result := month == 4 || month == 6 || month == 9 || month == 11;
} |
630 | dafny-synthesis_task_id_764.dfy | predicate IsDigit(c: char)
{
48 <= c as int <= 57
}
method CountDigits(s: string) returns (count: int)
ensures count >= 0
ensures count == | set i: int | 0 <= i < |s| && IsDigit(s[i])|
{
var digits := set i: int | 0 <= i < |s| && IsDigit(s[i]);
count := |digits|;
}
| predicate IsDigit(c: char)
{
48 <= c as int <= 57
}
method CountDigits(s: string) returns (count: int)
ensures count >= 0
ensures count == | set i: int | 0 <= i < |s| && IsDigit(s[i])|
{
var digits := set i: int | 0 <= i < |s| && IsDigit(s[i]);
count := |digits|;
}
|
631 | dafny-synthesis_task_id_769.dfy | method Difference(a: seq<int>, b: seq<int>) returns (diff: seq<int>)
ensures forall x :: x in diff <==> (x in a && x !in b)
ensures forall i, j :: 0 <= i < j < |diff| ==> diff[i] != diff[j]
{
diff := [];
for i := 0 to |a|
invariant 0 <= i <= |a|
invariant forall x :: x in diff <==> (x in a[..i] && x !in b)
invariant forall i, j :: 0 <= i < j < |diff| ==> diff[i] != diff[j]
{
if a[i] !in b && a[i] !in diff
{
diff := diff + [a[i]];
}
}
} | method Difference(a: seq<int>, b: seq<int>) returns (diff: seq<int>)
ensures forall x :: x in diff <==> (x in a && x !in b)
ensures forall i, j :: 0 <= i < j < |diff| ==> diff[i] != diff[j]
{
diff := [];
for i := 0 to |a|
{
if a[i] !in b && a[i] !in diff
{
diff := diff + [a[i]];
}
}
} |
632 | dafny-synthesis_task_id_77.dfy | method IsDivisibleBy11(n: int) returns (result: bool)
ensures result <==> n % 11 == 0
{
result := n % 11 == 0;
} | method IsDivisibleBy11(n: int) returns (result: bool)
ensures result <==> n % 11 == 0
{
result := n % 11 == 0;
} |
633 | dafny-synthesis_task_id_770.dfy | method SumOfFourthPowerOfOddNumbers(n: int) returns (sum: int)
requires n > 0
ensures sum == n * (2 * n + 1) * (24 * n * n * n - 12 * n * n - 14 * n + 7) / 15
{
sum := 0;
var i := 1;
for k := 0 to n
invariant 0 <= k <= n
invariant i == 2 * k + 1
invariant sum == k * (2 * k + 1) * (24 * k * k * k - 12 * k * k - 14 * k + 7) / 15
{
sum := sum + i * i * i * i;
i := i + 2;
}
} | method SumOfFourthPowerOfOddNumbers(n: int) returns (sum: int)
requires n > 0
ensures sum == n * (2 * n + 1) * (24 * n * n * n - 12 * n * n - 14 * n + 7) / 15
{
sum := 0;
var i := 1;
for k := 0 to n
{
sum := sum + i * i * i * i;
i := i + 2;
}
} |
634 | dafny-synthesis_task_id_775.dfy | predicate IsOdd(n: int)
{
n % 2 == 1
}
method IsOddAtIndexOdd(a: array<int>) returns (result: bool)
ensures result <==> forall i :: 0 <= i < a.Length ==> (IsOdd(i) ==> IsOdd(a[i]))
{
result := true;
for i := 0 to a.Length
invariant 0 <= i <= a.Length
invariant result <==> forall k :: 0 <= k < i ==> (IsOdd(k) ==> IsOdd(a[k]))
{
if IsOdd(i) && !IsOdd(a[i])
{
result := false;
break;
}
}
} | predicate IsOdd(n: int)
{
n % 2 == 1
}
method IsOddAtIndexOdd(a: array<int>) returns (result: bool)
ensures result <==> forall i :: 0 <= i < a.Length ==> (IsOdd(i) ==> IsOdd(a[i]))
{
result := true;
for i := 0 to a.Length
{
if IsOdd(i) && !IsOdd(a[i])
{
result := false;
break;
}
}
} |
635 | dafny-synthesis_task_id_776.dfy | predicate IsVowel(c: char)
{
c in {'a', 'e', 'i', 'o', 'u', 'A', 'E', 'I', 'O', 'U'}
}
method CountVowelNeighbors(s: string) returns (count: int)
ensures count >= 0
ensures count == | set i: int | 1 <= i < |s|-1 && IsVowel(s[i-1]) && IsVowel(s[i+1]) |
{
var vowels := set i: int | 1 <= i < |s|-1 && IsVowel(s[i-1]) && IsVowel(s[i+1]);
count := |vowels|;
} | predicate IsVowel(c: char)
{
c in {'a', 'e', 'i', 'o', 'u', 'A', 'E', 'I', 'O', 'U'}
}
method CountVowelNeighbors(s: string) returns (count: int)
ensures count >= 0
ensures count == | set i: int | 1 <= i < |s|-1 && IsVowel(s[i-1]) && IsVowel(s[i+1]) |
{
var vowels := set i: int | 1 <= i < |s|-1 && IsVowel(s[i-1]) && IsVowel(s[i+1]);
count := |vowels|;
} |
636 | dafny-synthesis_task_id_784.dfy | predicate IsEven(n: int)
{
n % 2 == 0
}
predicate IsOdd(n: int)
{
n % 2 != 0
}
predicate IsFirstEven(evenIndex: int, lst: seq<int>)
requires 0 <= evenIndex < |lst|
requires IsEven(lst[evenIndex])
{
forall i :: 0 <= i < evenIndex ==> IsOdd(lst[i])
}
predicate IsFirstOdd(oddIndex: int, lst: seq<int>)
requires 0 <= oddIndex < |lst|
requires IsOdd(lst[oddIndex])
{
forall i :: 0 <= i < oddIndex ==> IsEven(lst[i])
}
method FirstEvenOddIndices(lst : seq<int>) returns (evenIndex: int, oddIndex : int)
requires |lst| >= 2
requires exists i :: 0 <= i < |lst| && IsEven(lst[i])
requires exists i :: 0 <= i < |lst| && IsOdd(lst[i])
ensures 0 <= evenIndex < |lst|
ensures 0 <= oddIndex < |lst|
// This is the postcondition that ensures that it's the first, not just any
ensures IsEven(lst[evenIndex]) && IsFirstEven(evenIndex, lst)
ensures IsOdd(lst[oddIndex]) && IsFirstOdd(oddIndex, lst)
{
for i := 0 to |lst|
invariant 0 <= i <= |lst|
invariant forall j :: 0 <= j < i ==> IsOdd(lst[j])
{
if IsEven(lst[i])
{
evenIndex := i;
break;
}
}
for i := 0 to |lst|
invariant 0 <= i <= |lst|
invariant forall j :: 0 <= j < i ==> IsEven(lst[j])
{
if IsOdd(lst[i])
{
oddIndex := i;
break;
}
}
}
method ProductEvenOdd(lst: seq<int>) returns (product : int)
requires |lst| >= 2
requires exists i :: 0 <= i < |lst| && IsEven(lst[i])
requires exists i :: 0 <= i < |lst| && IsOdd(lst[i])
ensures exists i, j :: 0 <= i < |lst| && IsEven(lst[i]) && IsFirstEven(i, lst) &&
0 <= j < |lst| && IsOdd(lst[j]) && IsFirstOdd(j, lst) && product == lst[i] * lst[j]
{
var evenIndex, oddIndex := FirstEvenOddIndices(lst);
product := lst[evenIndex] * lst[oddIndex];
} | predicate IsEven(n: int)
{
n % 2 == 0
}
predicate IsOdd(n: int)
{
n % 2 != 0
}
predicate IsFirstEven(evenIndex: int, lst: seq<int>)
requires 0 <= evenIndex < |lst|
requires IsEven(lst[evenIndex])
{
forall i :: 0 <= i < evenIndex ==> IsOdd(lst[i])
}
predicate IsFirstOdd(oddIndex: int, lst: seq<int>)
requires 0 <= oddIndex < |lst|
requires IsOdd(lst[oddIndex])
{
forall i :: 0 <= i < oddIndex ==> IsEven(lst[i])
}
method FirstEvenOddIndices(lst : seq<int>) returns (evenIndex: int, oddIndex : int)
requires |lst| >= 2
requires exists i :: 0 <= i < |lst| && IsEven(lst[i])
requires exists i :: 0 <= i < |lst| && IsOdd(lst[i])
ensures 0 <= evenIndex < |lst|
ensures 0 <= oddIndex < |lst|
// This is the postcondition that ensures that it's the first, not just any
ensures IsEven(lst[evenIndex]) && IsFirstEven(evenIndex, lst)
ensures IsOdd(lst[oddIndex]) && IsFirstOdd(oddIndex, lst)
{
for i := 0 to |lst|
{
if IsEven(lst[i])
{
evenIndex := i;
break;
}
}
for i := 0 to |lst|
{
if IsOdd(lst[i])
{
oddIndex := i;
break;
}
}
}
method ProductEvenOdd(lst: seq<int>) returns (product : int)
requires |lst| >= 2
requires exists i :: 0 <= i < |lst| && IsEven(lst[i])
requires exists i :: 0 <= i < |lst| && IsOdd(lst[i])
ensures exists i, j :: 0 <= i < |lst| && IsEven(lst[i]) && IsFirstEven(i, lst) &&
0 <= j < |lst| && IsOdd(lst[j]) && IsFirstOdd(j, lst) && product == lst[i] * lst[j]
{
var evenIndex, oddIndex := FirstEvenOddIndices(lst);
product := lst[evenIndex] * lst[oddIndex];
} |
637 | dafny-synthesis_task_id_79.dfy | method IsLengthOdd(s: string) returns (result: bool)
ensures result <==> |s| % 2 == 1
{
result := |s| % 2 == 1;
} | method IsLengthOdd(s: string) returns (result: bool)
ensures result <==> |s| % 2 == 1
{
result := |s| % 2 == 1;
} |
638 | dafny-synthesis_task_id_790.dfy | predicate IsEven(n: int)
{
n % 2 == 0
}
method IsEvenAtIndexEven(lst: seq<int>) returns (result: bool)
ensures result <==> forall i :: 0 <= i < |lst| ==> (IsEven(i) ==> IsEven(lst[i]))
{
result := true;
for i := 0 to |lst|
invariant 0 <= i <= |lst|
invariant result <==> forall k :: 0 <= k < i ==> (IsEven(k) ==> IsEven(lst[k]))
{
if IsEven(i) && !IsEven(lst[i])
{
result := false;
break;
}
}
} | predicate IsEven(n: int)
{
n % 2 == 0
}
method IsEvenAtIndexEven(lst: seq<int>) returns (result: bool)
ensures result <==> forall i :: 0 <= i < |lst| ==> (IsEven(i) ==> IsEven(lst[i]))
{
result := true;
for i := 0 to |lst|
{
if IsEven(i) && !IsEven(lst[i])
{
result := false;
break;
}
}
} |
639 | dafny-synthesis_task_id_792.dfy | method CountLists(lists: seq<seq<int>>) returns (count: int)
ensures count >= 0
ensures count == |lists|
{
count := |lists|;
} | method CountLists(lists: seq<seq<int>>) returns (count: int)
ensures count >= 0
ensures count == |lists|
{
count := |lists|;
} |
640 | dafny-synthesis_task_id_793.dfy | method LastPosition(arr: array<int>, elem: int) returns (pos: int)
requires arr.Length > 0
requires forall i, j :: 0 <= i < j < arr.Length ==> arr[i] <= arr[j]
ensures pos == -1 || (0 <= pos < arr.Length && arr[pos] == elem && (pos <= arr.Length - 1 || arr[pos + 1] > elem))
ensures forall i :: 0 <= i < arr.Length ==> arr[i] == old(arr[i])
{
pos := -1;
for i := 0 to arr.Length - 1
invariant 0 <= i <= arr.Length
invariant pos == -1 || (0 <= pos < i && arr[pos] == elem && (pos == i - 1 || arr[pos + 1] > elem))
invariant forall k :: 0 <= k < arr.Length ==> arr[k] == old(arr[k])
{
if arr[i] == elem
{
pos := i;
}
}
} | method LastPosition(arr: array<int>, elem: int) returns (pos: int)
requires arr.Length > 0
requires forall i, j :: 0 <= i < j < arr.Length ==> arr[i] <= arr[j]
ensures pos == -1 || (0 <= pos < arr.Length && arr[pos] == elem && (pos <= arr.Length - 1 || arr[pos + 1] > elem))
ensures forall i :: 0 <= i < arr.Length ==> arr[i] == old(arr[i])
{
pos := -1;
for i := 0 to arr.Length - 1
{
if arr[i] == elem
{
pos := i;
}
}
} |
641 | dafny-synthesis_task_id_798.dfy | function sumTo( a:array<int>, n:int ) : int
requires a != null;
requires 0 <= n && n <= a.Length;
decreases n;
reads a;
{
if (n == 0) then 0 else sumTo(a, n-1) + a[n-1]
}
method ArraySum(a: array<int>) returns (result: int)
ensures result == sumTo(a, a.Length)
{
result := 0;
for i := 0 to a.Length
invariant 0 <= i <= a.Length
invariant result == sumTo(a, i)
{
result := result + a[i];
}
}
| function sumTo( a:array<int>, n:int ) : int
requires a != null;
requires 0 <= n && n <= a.Length;
reads a;
{
if (n == 0) then 0 else sumTo(a, n-1) + a[n-1]
}
method ArraySum(a: array<int>) returns (result: int)
ensures result == sumTo(a, a.Length)
{
result := 0;
for i := 0 to a.Length
{
result := result + a[i];
}
}
|
642 | dafny-synthesis_task_id_799.dfy | method RotateLeftBits(n: bv32, d: int) returns (result: bv32)
requires 0 <= d < 32
ensures result == ((n << d) | (n >> (32 - d)))
{
result := ((n << d) | (n >> (32 - d)));
} | method RotateLeftBits(n: bv32, d: int) returns (result: bv32)
requires 0 <= d < 32
ensures result == ((n << d) | (n >> (32 - d)))
{
result := ((n << d) | (n >> (32 - d)));
} |
643 | dafny-synthesis_task_id_8.dfy | method SquareElements(a: array<int>) returns (squared: array<int>)
ensures squared.Length == a.Length
ensures forall i :: 0 <= i < a.Length ==> squared[i] == a[i] * a[i]
{
squared := new int[a.Length];
for i := 0 to a.Length
invariant 0 <= i <= a.Length
invariant squared.Length == a.Length
invariant forall k :: 0 <= k < i ==> squared[k] == a[k] * a[k]
{
squared[i] := a[i] * a[i];
}
} | method SquareElements(a: array<int>) returns (squared: array<int>)
ensures squared.Length == a.Length
ensures forall i :: 0 <= i < a.Length ==> squared[i] == a[i] * a[i]
{
squared := new int[a.Length];
for i := 0 to a.Length
{
squared[i] := a[i] * a[i];
}
} |
644 | dafny-synthesis_task_id_80.dfy | method TetrahedralNumber(n: int) returns (t: int)
requires n >= 0
ensures t == n * (n + 1) * (n + 2) / 6
{
t := n * (n + 1) * (n + 2) / 6;
} | method TetrahedralNumber(n: int) returns (t: int)
requires n >= 0
ensures t == n * (n + 1) * (n + 2) / 6
{
t := n * (n + 1) * (n + 2) / 6;
} |
645 | dafny-synthesis_task_id_801.dfy | method CountEqualNumbers(a: int, b: int, c: int) returns (count: int)
ensures count >= 0 && count <= 3
ensures (count == 3) <==> (a == b && b == c)
ensures (count == 2) <==> ((a == b && b != c) || (a != b && b == c) || (a == c && b != c))
ensures (count == 1) <==> (a != b && b != c && a != c)
{
count := 1;
if (a == b) {
count := count + 1;
}
if (a == c) {
count := count + 1;
}
if (a != b && b == c) {
count := count + 1;
}
} | method CountEqualNumbers(a: int, b: int, c: int) returns (count: int)
ensures count >= 0 && count <= 3
ensures (count == 3) <==> (a == b && b == c)
ensures (count == 2) <==> ((a == b && b != c) || (a != b && b == c) || (a == c && b != c))
ensures (count == 1) <==> (a != b && b != c && a != c)
{
count := 1;
if (a == b) {
count := count + 1;
}
if (a == c) {
count := count + 1;
}
if (a != b && b == c) {
count := count + 1;
}
} |
646 | dafny-synthesis_task_id_803.dfy | method IsPerfectSquare(n: int) returns (result: bool)
requires n >= 0
ensures result == true ==> (exists i: int :: 0 <= i <= n && i * i == n)
ensures result == false ==> (forall a: int :: 0 < a*a < n ==> a*a != n)
{
var i := 0;
while (i * i < n)
invariant 0 <= i <= n
invariant forall k :: 0 <= k < i ==> k * k < n
{
i := i + 1;
}
return i * i == n;
} | method IsPerfectSquare(n: int) returns (result: bool)
requires n >= 0
ensures result == true ==> (exists i: int :: 0 <= i <= n && i * i == n)
ensures result == false ==> (forall a: int :: 0 < a*a < n ==> a*a != n)
{
var i := 0;
while (i * i < n)
{
i := i + 1;
}
return i * i == n;
} |
647 | dafny-synthesis_task_id_804.dfy | predicate IsEven(n: int)
{
n % 2 == 0
}
method IsProductEven(a: array<int>) returns (result: bool)
ensures result <==> exists i :: 0 <= i < a.Length && IsEven(a[i])
{
result := false;
for i := 0 to a.Length
invariant 0 <= i <= a.Length
invariant result <==> exists k :: 0 <= k < i && IsEven(a[k])
{
if IsEven(a[i])
{
result := true;
break;
}
}
} | predicate IsEven(n: int)
{
n % 2 == 0
}
method IsProductEven(a: array<int>) returns (result: bool)
ensures result <==> exists i :: 0 <= i < a.Length && IsEven(a[i])
{
result := false;
for i := 0 to a.Length
{
if IsEven(a[i])
{
result := true;
break;
}
}
} |
648 | dafny-synthesis_task_id_807.dfy | predicate IsOdd(x: int)
{
x % 2 != 0
}
method FindFirstOdd(a: array<int>) returns (found: bool, index: int)
requires a != null
ensures !found ==> forall i :: 0 <= i < a.Length ==> !IsOdd(a[i])
ensures found ==> 0 <= index < a.Length && IsOdd(a[index]) && forall i :: 0 <= i < index ==> !IsOdd(a[i])
{
found := false;
index := 0;
while (index < a.Length)
invariant 0 <= index <= a.Length
invariant !found ==> forall i :: 0 <= i < index ==> !IsOdd(a[i])
invariant found ==> IsOdd(a[index - 1]) && forall i :: 0 <= i < index - 1 ==> !IsOdd(a[i])
{
if IsOdd(a[index])
{
found := true;
return;
}
index := index + 1;
}
} | predicate IsOdd(x: int)
{
x % 2 != 0
}
method FindFirstOdd(a: array<int>) returns (found: bool, index: int)
requires a != null
ensures !found ==> forall i :: 0 <= i < a.Length ==> !IsOdd(a[i])
ensures found ==> 0 <= index < a.Length && IsOdd(a[index]) && forall i :: 0 <= i < index ==> !IsOdd(a[i])
{
found := false;
index := 0;
while (index < a.Length)
{
if IsOdd(a[index])
{
found := true;
return;
}
index := index + 1;
}
} |
649 | dafny-synthesis_task_id_808.dfy | method ContainsK(s: seq<int>, k: int) returns (result: bool)
ensures result <==> k in s
{
result := false;
for i := 0 to |s|
invariant 0 <= i <= |s|
invariant result <==> (exists j :: 0 <= j < i && s[j] == k)
{
if s[i] == k {
result := true;
break;
}
}
} | method ContainsK(s: seq<int>, k: int) returns (result: bool)
ensures result <==> k in s
{
result := false;
for i := 0 to |s|
{
if s[i] == k {
result := true;
break;
}
}
} |
650 | dafny-synthesis_task_id_809.dfy | method IsSmaller(a: seq<int>, b: seq<int>) returns (result: bool)
requires |a| == |b|
ensures result <==> forall i :: 0 <= i < |a| ==> a[i] > b[i]
ensures !result <==> exists i :: 0 <= i < |a| && a[i] <= b[i]
{
result := true;
for i := 0 to |a|
invariant 0 <= i <= |a|
invariant result <==> forall k :: 0 <= k < i ==> a[k] > b[k]
invariant !result <==> exists k :: 0 <= k < i && a[k] <= b[k]
{
if a[i] <= b[i]
{
result := false;
break;
}
}
} | method IsSmaller(a: seq<int>, b: seq<int>) returns (result: bool)
requires |a| == |b|
ensures result <==> forall i :: 0 <= i < |a| ==> a[i] > b[i]
ensures !result <==> exists i :: 0 <= i < |a| && a[i] <= b[i]
{
result := true;
for i := 0 to |a|
{
if a[i] <= b[i]
{
result := false;
break;
}
}
} |
651 | dafny-synthesis_task_id_82.dfy | method SphereVolume(radius: real) returns (volume: real)
requires radius > 0.0
ensures volume == 4.0/3.0 * 3.1415926535 * radius * radius * radius
{
volume := 4.0/3.0 * 3.1415926535 * radius * radius * radius;
} | method SphereVolume(radius: real) returns (volume: real)
requires radius > 0.0
ensures volume == 4.0/3.0 * 3.1415926535 * radius * radius * radius
{
volume := 4.0/3.0 * 3.1415926535 * radius * radius * radius;
} |
652 | dafny-synthesis_task_id_85.dfy | method SphereSurfaceArea(radius: real) returns (area: real)
requires radius > 0.0
ensures area == 4.0 * 3.14159265358979323846 * radius * radius
{
area := 4.0 * 3.14159265358979323846 * radius * radius;
} | method SphereSurfaceArea(radius: real) returns (area: real)
requires radius > 0.0
ensures area == 4.0 * 3.14159265358979323846 * radius * radius
{
area := 4.0 * 3.14159265358979323846 * radius * radius;
} |
653 | dafny-synthesis_task_id_86.dfy | method CenteredHexagonalNumber(n: nat) returns (result: nat)
requires n >= 0
ensures result == 3 * n * (n - 1) + 1
{
result := 3 * n * (n - 1) + 1;
} | method CenteredHexagonalNumber(n: nat) returns (result: nat)
requires n >= 0
ensures result == 3 * n * (n - 1) + 1
{
result := 3 * n * (n - 1) + 1;
} |
654 | dafny-synthesis_task_id_89.dfy | method ClosestSmaller(n: int) returns (m: int)
requires n > 0
ensures m + 1 == n
{
m := n - 1;
} | method ClosestSmaller(n: int) returns (m: int)
requires n > 0
ensures m + 1 == n
{
m := n - 1;
} |
655 | dafny-synthesis_task_id_94.dfy | method MinSecondValueFirst(s: array<seq<int>>) returns (firstOfMinSecond: int)
requires s.Length > 0
requires forall i :: 0 <= i < s.Length ==> |s[i]| >= 2
ensures exists i :: 0 <= i < s.Length && firstOfMinSecond == s[i][0] &&
(forall j :: 0 <= j < s.Length ==> s[i][1] <= s[j][1])
{
var minSecondIndex := 0;
for i := 1 to s.Length
invariant 0 <= i <= s.Length
invariant 0 <= minSecondIndex < i
invariant forall j :: 0 <= j < i ==> s[minSecondIndex][1] <= s[j][1]
{
if s[i][1] < s[minSecondIndex][1]
{
minSecondIndex := i;
}
}
firstOfMinSecond := s[minSecondIndex][0];
} | method MinSecondValueFirst(s: array<seq<int>>) returns (firstOfMinSecond: int)
requires s.Length > 0
requires forall i :: 0 <= i < s.Length ==> |s[i]| >= 2
ensures exists i :: 0 <= i < s.Length && firstOfMinSecond == s[i][0] &&
(forall j :: 0 <= j < s.Length ==> s[i][1] <= s[j][1])
{
var minSecondIndex := 0;
for i := 1 to s.Length
{
if s[i][1] < s[minSecondIndex][1]
{
minSecondIndex := i;
}
}
firstOfMinSecond := s[minSecondIndex][0];
} |
656 | dafny-synthesis_task_id_95.dfy | method SmallestListLength(s: seq<seq<int>>) returns (v: int)
requires |s| > 0
ensures forall i :: 0 <= i < |s| ==> v <= |s[i]|
ensures exists i :: 0 <= i < |s| && v == |s[i]|
{
v := |s[0]|;
for i := 1 to |s|
invariant 0 <= i <= |s|
invariant forall k :: 0 <= k < i ==> v <= |s[k]|
invariant exists k :: 0 <= k < i && v == |s[k]|
{
if |s[i]| < v
{
v := |s[i]|;
}
}
} | method SmallestListLength(s: seq<seq<int>>) returns (v: int)
requires |s| > 0
ensures forall i :: 0 <= i < |s| ==> v <= |s[i]|
ensures exists i :: 0 <= i < |s| && v == |s[i]|
{
v := |s[0]|;
for i := 1 to |s|
{
if |s[i]| < v
{
v := |s[i]|;
}
}
} |
657 | dafny-training_tmp_tmp_n2kixni_session1_training1.dfy | /*
* Copyright 2021 ConsenSys Software Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software dis-
* tributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/**
* Example 0.a.
* Add pre-cond to specify x >= 0 and a post cond of your choice.
* Counter-example generation.
*/
method abs(x: int) returns (y: int)
ensures true
{
if x < 0 {
y := -x;
} else {
y := x;
}
}
/** Call abs */
method foo(x: int)
requires x >= 0
{
var y := abs(x);
// assert( y == x);
}
/**
* Example 0.b.
* The goal is to compute the maximum of x and y and return it in m.
* The current version is buggy and returns 0 is x > y and 1 if x <= 1.
*
* Try to:
* 1. write the post-condition that shows that max(x,y) (i.e. m) is larger than x and y.
* 2. write a set of post-conditions that fully characterises max.
* 3. fix the code and make sure it verifies.
*/
method max(x: int, y: int) returns (m: int)
requires true;
ensures true;
{
var r : int;
if x > y {
r := 0;
} else {
r := 1;
}
m := r;
// can use return r instead
// return m;
}
/**
* Example 1.
*
* Try to prove
* 1. the final assert statement (uncomment it and you may need to strengthen pre condition).
* 2. termination, propose a decrease clause (to replace *)
*/
method ex1(n: int)
requires true
ensures true
{
var i := 0;
while i < n
invariant true
// decreases * // do not check termination
{
i := i + 1;
}
/** This is the property to prove: */
// assert i == n;
}
/**
* Infinite loop.
*/
method foo2()
ensures false
decreases *
{
while true
decreases *
{
}
assert false;
}
// Specify a post-condition and prove it.
/**
* Example 2.
*
* Find a key in an array.
*
* @param a The array.
* @param key The key to find.
* @returns An index i such a[i] == key if key in a and -1 otherwise.
*
* Try to:
* 0. uncomment line index := index + 2 and check problems
* 1. write the property defined by the @returns above
* 2. prove this property (you may add loop invariants)
*
* @note The code below is flawed on purpose.
* |a| is the length of a
* to test whether an integer `k` is in `a`: k in a (true
* iff exists 0 <= i < |a|, a[i] == k).
* And: !(k in a) <==> k !in a
* a[i..j] is the sub sequence a[i], ..., a[j - 1]
* a[..j] is a[0..j] and a[i..] is a[i..|a|]
* a[..] is same as a
*/
method find(a: seq<int>, key: int) returns (index: int)
requires true
ensures true
{
index := 0;
while index < |a|
invariant true
{
// index := index + 1;
if a[index] == key {
return 0;
}
index := index + 2;
}
index := -10;
}
// Prove more complicated invariants with quantifiers.
/**
* Palindrome checker.
* Example 3.
*
* Check whether a sequence of letters is a palindrome.
*
* Try to:
* 1. write the algorithm to determine whether a string is a palindrome
* 2. write the ensures clauses that specify the palidrome properties
* 3. verify algorithm.
*
* Notes: a[k] accesses element k of a for 0 <= k < |a|
* a[i..j] is (a seq) with the first j elements minus the first i
* a[0..|a|] is same as a.
*/
method isPalindrome(a: seq<char>) returns (b: bool)
{
return true;
}
/**
* Whether a sequence of ints is sorted (ascending).
*
* @param a A sequence on integers.
* @returns Whether the sequence is sorted.
*/
predicate sorted(a: seq<int>)
{
forall j, k::0 <= j < k < |a| ==> a[j] <= a[k]
}
/**
* Example 4.
*
* Remove duplicates from a sorted sequence.
*
* Try to:
* 1. write the code to compute b
* 2. write the ensures clauses that specify the remove duplicates properties
* 3. verify algorithm.
*
* Notes: a[k] accesses element k of a for 0 <= k < |a|
* a[i..j] is (a seq) with the first j elements minus the first i
* a[0.. |a| - 1] is same as a.
*/
method unique(a: seq<int>) returns (b: seq<int>)
requires sorted(a)
ensures true
{
return a;
}
/**
* Dafny compiles the Main method if it finds one in a file.
*/
method Main() {
// run find
var r := find([], 1);
print r, "\n";
r := find([0,3,5,7], 5);
print r, "\n";
// run palindrome
var s1 := ['a'];
var r1 := isPalindrome(s1);
print "is [", s1, "]", " a isPalindrome? ", r1, " \n";
s1 := [];
r1 := isPalindrome(s1);
print "is [", s1, "]", " a isPalindrome? ", r1, " \n";
s1 := ['a', 'b'];
r1 := isPalindrome(s1);
print "is [", s1, "]", " a isPalindrome? ", r1, " \n";
s1 := ['a', 'b', 'a'];
r1 := isPalindrome(s1);
print "is [", s1, "]", " a isPalindrome? ", r1, " \n";
// run unique
var i := [0,1,3,3,5,5,7];
var s := unique(i);
print "unique applied to ", i, " is ", s, "\n";
}
| /*
* Copyright 2021 ConsenSys Software Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software dis-
* tributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/**
* Example 0.a.
* Add pre-cond to specify x >= 0 and a post cond of your choice.
* Counter-example generation.
*/
method abs(x: int) returns (y: int)
ensures true
{
if x < 0 {
y := -x;
} else {
y := x;
}
}
/** Call abs */
method foo(x: int)
requires x >= 0
{
var y := abs(x);
// assert( y == x);
}
/**
* Example 0.b.
* The goal is to compute the maximum of x and y and return it in m.
* The current version is buggy and returns 0 is x > y and 1 if x <= 1.
*
* Try to:
* 1. write the post-condition that shows that max(x,y) (i.e. m) is larger than x and y.
* 2. write a set of post-conditions that fully characterises max.
* 3. fix the code and make sure it verifies.
*/
method max(x: int, y: int) returns (m: int)
requires true;
ensures true;
{
var r : int;
if x > y {
r := 0;
} else {
r := 1;
}
m := r;
// can use return r instead
// return m;
}
/**
* Example 1.
*
* Try to prove
* 1. the final assert statement (uncomment it and you may need to strengthen pre condition).
* 2. termination, propose a decrease clause (to replace *)
*/
method ex1(n: int)
requires true
ensures true
{
var i := 0;
while i < n
// decreases * // do not check termination
{
i := i + 1;
}
/** This is the property to prove: */
// assert i == n;
}
/**
* Infinite loop.
*/
method foo2()
ensures false
{
while true
{
}
}
// Specify a post-condition and prove it.
/**
* Example 2.
*
* Find a key in an array.
*
* @param a The array.
* @param key The key to find.
* @returns An index i such a[i] == key if key in a and -1 otherwise.
*
* Try to:
* 0. uncomment line index := index + 2 and check problems
* 1. write the property defined by the @returns above
* 2. prove this property (you may add loop invariants)
*
* @note The code below is flawed on purpose.
* |a| is the length of a
* to test whether an integer `k` is in `a`: k in a (true
* iff exists 0 <= i < |a|, a[i] == k).
* And: !(k in a) <==> k !in a
* a[i..j] is the sub sequence a[i], ..., a[j - 1]
* a[..j] is a[0..j] and a[i..] is a[i..|a|]
* a[..] is same as a
*/
method find(a: seq<int>, key: int) returns (index: int)
requires true
ensures true
{
index := 0;
while index < |a|
{
// index := index + 1;
if a[index] == key {
return 0;
}
index := index + 2;
}
index := -10;
}
// Prove more complicated invariants with quantifiers.
/**
* Palindrome checker.
* Example 3.
*
* Check whether a sequence of letters is a palindrome.
*
* Try to:
* 1. write the algorithm to determine whether a string is a palindrome
* 2. write the ensures clauses that specify the palidrome properties
* 3. verify algorithm.
*
* Notes: a[k] accesses element k of a for 0 <= k < |a|
* a[i..j] is (a seq) with the first j elements minus the first i
* a[0..|a|] is same as a.
*/
method isPalindrome(a: seq<char>) returns (b: bool)
{
return true;
}
/**
* Whether a sequence of ints is sorted (ascending).
*
* @param a A sequence on integers.
* @returns Whether the sequence is sorted.
*/
predicate sorted(a: seq<int>)
{
forall j, k::0 <= j < k < |a| ==> a[j] <= a[k]
}
/**
* Example 4.
*
* Remove duplicates from a sorted sequence.
*
* Try to:
* 1. write the code to compute b
* 2. write the ensures clauses that specify the remove duplicates properties
* 3. verify algorithm.
*
* Notes: a[k] accesses element k of a for 0 <= k < |a|
* a[i..j] is (a seq) with the first j elements minus the first i
* a[0.. |a| - 1] is same as a.
*/
method unique(a: seq<int>) returns (b: seq<int>)
requires sorted(a)
ensures true
{
return a;
}
/**
* Dafny compiles the Main method if it finds one in a file.
*/
method Main() {
// run find
var r := find([], 1);
print r, "\n";
r := find([0,3,5,7], 5);
print r, "\n";
// run palindrome
var s1 := ['a'];
var r1 := isPalindrome(s1);
print "is [", s1, "]", " a isPalindrome? ", r1, " \n";
s1 := [];
r1 := isPalindrome(s1);
print "is [", s1, "]", " a isPalindrome? ", r1, " \n";
s1 := ['a', 'b'];
r1 := isPalindrome(s1);
print "is [", s1, "]", " a isPalindrome? ", r1, " \n";
s1 := ['a', 'b', 'a'];
r1 := isPalindrome(s1);
print "is [", s1, "]", " a isPalindrome? ", r1, " \n";
// run unique
var i := [0,1,3,3,5,5,7];
var s := unique(i);
print "unique applied to ", i, " is ", s, "\n";
}
|
658 | dafny-workout_tmp_tmp0abkw6f8_starter_ex01.dfy | method Max(a: int, b: int) returns (c: int)
ensures c >= a && c >= b && (c == a || c == b)
{
if (a >= b)
{
return a;
} else {
return b;
}
}
method Main()
{
print "Testing max...\n";
var max := Max(3, 4);
assert max == 4;
max := Max(-3, 4);
assert max == 4;
max := Max(-3, -4);
assert max == -3;
max := Max(5555555, 5555);
assert max == 5555555;
}
| method Max(a: int, b: int) returns (c: int)
ensures c >= a && c >= b && (c == a || c == b)
{
if (a >= b)
{
return a;
} else {
return b;
}
}
method Main()
{
print "Testing max...\n";
var max := Max(3, 4);
max := Max(-3, 4);
max := Max(-3, -4);
max := Max(5555555, 5555);
}
|
659 | dafny-workout_tmp_tmp0abkw6f8_starter_ex02.dfy | method Abs(x: int) returns (y: int)
requires x < 0
ensures 0 < y
ensures y == -x
{
return -x;
}
method Main()
{
var a := Abs(-3);
assert a == 3;
}
| method Abs(x: int) returns (y: int)
requires x < 0
ensures 0 < y
ensures y == -x
{
return -x;
}
method Main()
{
var a := Abs(-3);
}
|
660 | dafny-workout_tmp_tmp0abkw6f8_starter_ex03.dfy | method Abs(x: int) returns (y: int)
requires x == -1
ensures 0 <= y
ensures 0 <= x ==> y == x
ensures x < 0 ==> y == -x
{
return x + 2;
}
method Abs2(x: real) returns (y: real)
requires x == -0.5
ensures 0.0 <= y
ensures 0.0 <= x ==> y == x
ensures x < 0.0 ==> y == -x
{
return x + 1.0;
}
method Main()
{
var a := Abs(-1);
assert a == 1;
var a2 := Abs2(-0.5);
assert a2 == 0.5;
}
| method Abs(x: int) returns (y: int)
requires x == -1
ensures 0 <= y
ensures 0 <= x ==> y == x
ensures x < 0 ==> y == -x
{
return x + 2;
}
method Abs2(x: real) returns (y: real)
requires x == -0.5
ensures 0.0 <= y
ensures 0.0 <= x ==> y == x
ensures x < 0.0 ==> y == -x
{
return x + 1.0;
}
method Main()
{
var a := Abs(-1);
var a2 := Abs2(-0.5);
}
|
661 | dafny-workout_tmp_tmp0abkw6f8_starter_ex09.dfy | function fib(n: nat): nat
{
if n == 0 then 0 else
if n == 1 then 1 else
fib(n - 1) + fib(n - 2)
}
method ComputeFib(n: nat) returns (b: nat)
ensures b == fib(n)
{
var i: int := 1;
if 0 <= n < 2 { return n; }
b := 1;
var c := 1;
while i < n
decreases n - i
invariant 0 < i <= n
invariant b == fib(i)
invariant c == fib(i+1)
{
b, c := c, b + c;
i := i + 1;
}
}
method Main()
{
var ret := ComputeFib(5);
assert ret == fib(5);
}
| function fib(n: nat): nat
{
if n == 0 then 0 else
if n == 1 then 1 else
fib(n - 1) + fib(n - 2)
}
method ComputeFib(n: nat) returns (b: nat)
ensures b == fib(n)
{
var i: int := 1;
if 0 <= n < 2 { return n; }
b := 1;
var c := 1;
while i < n
{
b, c := c, b + c;
i := i + 1;
}
}
method Main()
{
var ret := ComputeFib(5);
}
|
662 | dafny-workout_tmp_tmp0abkw6f8_starter_ex12.dfy |
method FindMax(a: array<int>) returns (max_idx: nat)
requires a.Length > 0
ensures 0 <= max_idx < a.Length
ensures forall j :: 0 <= j < a.Length ==> a[max_idx] >= a[j]
{
max_idx := 0;
var i: nat := 1;
while i < a.Length
decreases a.Length - i
invariant 1 <= i <= a.Length
invariant 0 <= max_idx < i
invariant forall j :: 0 <= j < i ==> a[max_idx] >= a[j]
{
if a[i] > a[max_idx]
{
max_idx := i;
}
i := i + 1;
}
return max_idx;
}
method Main()
{
var arr: array<int> := new int[][1, 1, 25, 7, 2, -2, 3, 3, 20];
var idx := FindMax(arr);
assert forall i :: 0 <= i < arr.Length ==> arr[idx] >= arr[i];
// apparently I can't assert definite values like
// assert idx == 2
// or assert arr[idx] == 25
}
|
method FindMax(a: array<int>) returns (max_idx: nat)
requires a.Length > 0
ensures 0 <= max_idx < a.Length
ensures forall j :: 0 <= j < a.Length ==> a[max_idx] >= a[j]
{
max_idx := 0;
var i: nat := 1;
while i < a.Length
{
if a[i] > a[max_idx]
{
max_idx := i;
}
i := i + 1;
}
return max_idx;
}
method Main()
{
var arr: array<int> := new int[][1, 1, 25, 7, 2, -2, 3, 3, 20];
var idx := FindMax(arr);
// apparently I can't assert definite values like
// assert idx == 2
// or assert arr[idx] == 25
}
|
663 | dafny_examples_tmp_tmp8qotd4ez_leetcode_0001-two-sum.dfy | // If this invariant is added explicitly to the loop then the verfication never finishes.
// It could be {:opaque} for a more controlled verification:
// assert InMap([], m, target) by {
// reveal InMap();
// }
predicate InMap(nums: seq<int>, m: map<int, int>, t: int) {
forall j :: 0 <= j < |nums| ==> t - nums[j] in m
}
method TwoSum(nums: array<int>, target: int) returns (r: (int, int))
ensures 0 <= r.0 ==> 0 <= r.0 < r.1 < nums.Length &&
nums[r.0] + nums[r.1] == target &&
forall i, j :: 0 <= i < j < r.1 ==> nums[i] + nums[j] != target
ensures r.0 == -1 <==> forall i, j :: 0 <= i < j < nums.Length ==> nums[i] + nums[j] != target
{
var m: map<int, int> := map[];
var i := 0;
while i < nums.Length
invariant i <= nums.Length
invariant forall k :: k in m ==> 0 <= m[k] < i
invariant forall k :: k in m ==> nums[m[k]] + k == target
invariant InMap(nums[..i], m, target)
invariant forall u, v :: 0 <= u < v < i ==> nums[u] + nums[v] != target
{
if nums[i] in m {
return (m[nums[i]], i);
}
m := m[target - nums[i] := i];
i := i + 1;
}
return (-1, -1);
}
| // If this invariant is added explicitly to the loop then the verfication never finishes.
// It could be {:opaque} for a more controlled verification:
// assert InMap([], m, target) by {
// reveal InMap();
// }
predicate InMap(nums: seq<int>, m: map<int, int>, t: int) {
forall j :: 0 <= j < |nums| ==> t - nums[j] in m
}
method TwoSum(nums: array<int>, target: int) returns (r: (int, int))
ensures 0 <= r.0 ==> 0 <= r.0 < r.1 < nums.Length &&
nums[r.0] + nums[r.1] == target &&
forall i, j :: 0 <= i < j < r.1 ==> nums[i] + nums[j] != target
ensures r.0 == -1 <==> forall i, j :: 0 <= i < j < nums.Length ==> nums[i] + nums[j] != target
{
var m: map<int, int> := map[];
var i := 0;
while i < nums.Length
{
if nums[i] in m {
return (m[nums[i]], i);
}
m := m[target - nums[i] := i];
i := i + 1;
}
return (-1, -1);
}
|
664 | dafny_examples_tmp_tmp8qotd4ez_leetcode_0027-remove-element.dfy | method RemoveElement(nums: array<int>, val: int) returns (newLength: int)
modifies nums
ensures 0 <= newLength <= nums.Length
ensures forall x :: x in nums[..newLength] ==> x != val
ensures multiset(nums[..newLength]) == multiset(old(nums[..]))[val := 0]
{
var i := 0;
var j := 0;
while i < nums.Length
invariant j <= i
invariant i <= nums.Length
invariant old(nums[i..]) == nums[i..];
invariant multiset(nums[..j]) == multiset(old(nums[..i]))[val := 0]
{
if nums[i] != val {
nums[j] := nums[i];
j := j + 1;
}
i := i + 1;
}
assert old(nums[..i]) == old(nums[..]);
return j;
}
| method RemoveElement(nums: array<int>, val: int) returns (newLength: int)
modifies nums
ensures 0 <= newLength <= nums.Length
ensures forall x :: x in nums[..newLength] ==> x != val
ensures multiset(nums[..newLength]) == multiset(old(nums[..]))[val := 0]
{
var i := 0;
var j := 0;
while i < nums.Length
{
if nums[i] != val {
nums[j] := nums[i];
j := j + 1;
}
i := i + 1;
}
return j;
}
|
665 | dafny_examples_tmp_tmp8qotd4ez_leetcode_0069-sqrt.dfy | // Author: Shaobo He
predicate sqrt(x: int, r: int) {
r*r <= x && (r+1)*(r+1) > x
}
lemma uniqueSqrt(x: int, r1: int, r2: int)
requires x >= 0 && r1 >= 0 && r2 >= 0;
ensures sqrt(x, r1) && sqrt(x, r2) ==> r1 == r2
{}
method mySqrt(x: int) returns (res: int)
requires 0 <= x;
ensures sqrt(x, res);
{
var l, r := 0, x;
while (l <= r)
decreases r - l;
invariant l >= 0;
invariant r >= 0;
invariant l*l <= x;
invariant (r+1)*(r+1) > x;
{
var mid := (l + r) / 2;
if (mid * mid <= x && (mid + 1) * (mid + 1) > x) {
return mid;
} else if (mid * mid <= x) {
l := mid + 1;
} else {
r := mid - 1;
}
}
}
| // Author: Shaobo He
predicate sqrt(x: int, r: int) {
r*r <= x && (r+1)*(r+1) > x
}
lemma uniqueSqrt(x: int, r1: int, r2: int)
requires x >= 0 && r1 >= 0 && r2 >= 0;
ensures sqrt(x, r1) && sqrt(x, r2) ==> r1 == r2
{}
method mySqrt(x: int) returns (res: int)
requires 0 <= x;
ensures sqrt(x, res);
{
var l, r := 0, x;
while (l <= r)
{
var mid := (l + r) / 2;
if (mid * mid <= x && (mid + 1) * (mid + 1) > x) {
return mid;
} else if (mid * mid <= x) {
l := mid + 1;
} else {
r := mid - 1;
}
}
}
|
666 | dafny_examples_tmp_tmp8qotd4ez_leetcode_0070-climbing-stairs.dfy | function Stairs(n: nat): nat {
if n <= 1 then 1 else Stairs(n - 2) + Stairs(n - 1)
}
// A simple specification
method ClimbStairs(n: nat) returns (r: nat)
ensures r == Stairs(n)
{
var a, b := 1, 1;
var i := 1;
while i < n
invariant i <= n || i == 1
invariant a == Stairs(i - 1)
invariant b == Stairs(i)
{
a, b := b, a + b;
i := i + 1;
}
return b;
}
| function Stairs(n: nat): nat {
if n <= 1 then 1 else Stairs(n - 2) + Stairs(n - 1)
}
// A simple specification
method ClimbStairs(n: nat) returns (r: nat)
ensures r == Stairs(n)
{
var a, b := 1, 1;
var i := 1;
while i < n
{
a, b := b, a + b;
i := i + 1;
}
return b;
}
|
667 | dafny_examples_tmp_tmp8qotd4ez_leetcode_0277-find-the-celebrity.dfy | // Author: Shaobo He
predicate knows(a: int, b: int)
predicate isCelebrity(n : int, i : int)
requires n >= 0 && 0 <= i < n;
{
forall j :: 0 <= j < n && i != j ==> knows(j, i) && !knows(i, j)
}
lemma knowerCannotBeCelebrity(n: int, i: int)
requires n >= 0 && 0 <= i < n
ensures (exists j :: 0 <= j < n && j != i && knows(i, j)) ==> !isCelebrity(n, i)
{}
ghost method isCelebrityP(n: int, i: int) returns (r : bool)
requires n >= 0 && 0 <= i < n;
ensures r <==> isCelebrity(n, i);
{
var j := 0;
r := true;
while j < n
decreases n - j;
invariant 0 <= j <= n;
invariant r ==> forall k :: 0 <= k < j && k != i ==> knows(k, i) && !knows(i, k);
{
if j != i {
if !knows(j, i) || knows(i, j) {
return false;
}
}
j := j + 1;
}
return r;
}
ghost method findCelebrity(n : int) returns (r : int)
requires 2 <= n <= 100;
ensures 0 <= r < n ==> isCelebrity(n, r);
ensures r == -1 ==> forall i :: 0 <= i < n ==> !isCelebrity(n, i);
{
var candidate := 0;
var i := 1;
while i < n
invariant 1 <= i <= n;
invariant forall j :: 0 <= j < i && j != candidate ==> !isCelebrity(n, j);
invariant 0 <= candidate < i;
{
if knows(candidate, i) {
candidate := i;
}
i := i + 1;
}
//assert forall j :: 0 <= j < n && j != candidate ==> !isCelebrity(n, j);
var isCelebrityC := isCelebrityP(n, candidate);
if isCelebrityC {
r := candidate;
} else {
r := -1;
}
}
| // Author: Shaobo He
predicate knows(a: int, b: int)
predicate isCelebrity(n : int, i : int)
requires n >= 0 && 0 <= i < n;
{
forall j :: 0 <= j < n && i != j ==> knows(j, i) && !knows(i, j)
}
lemma knowerCannotBeCelebrity(n: int, i: int)
requires n >= 0 && 0 <= i < n
ensures (exists j :: 0 <= j < n && j != i && knows(i, j)) ==> !isCelebrity(n, i)
{}
ghost method isCelebrityP(n: int, i: int) returns (r : bool)
requires n >= 0 && 0 <= i < n;
ensures r <==> isCelebrity(n, i);
{
var j := 0;
r := true;
while j < n
{
if j != i {
if !knows(j, i) || knows(i, j) {
return false;
}
}
j := j + 1;
}
return r;
}
ghost method findCelebrity(n : int) returns (r : int)
requires 2 <= n <= 100;
ensures 0 <= r < n ==> isCelebrity(n, r);
ensures r == -1 ==> forall i :: 0 <= i < n ==> !isCelebrity(n, i);
{
var candidate := 0;
var i := 1;
while i < n
{
if knows(candidate, i) {
candidate := i;
}
i := i + 1;
}
//assert forall j :: 0 <= j < n && j != candidate ==> !isCelebrity(n, j);
var isCelebrityC := isCelebrityP(n, candidate);
if isCelebrityC {
r := candidate;
} else {
r := -1;
}
}
|
668 | dafny_examples_tmp_tmp8qotd4ez_lib_math_DivMod.dfy | module DivMod {
function {:opaque} DivSub(a: int, b: int): int
requires 0 <= a && 0 < b
{
if a < b then 0 else 1 + DivSub(a - b, b)
}
function {:opaque} ModSub(a: int, b: int): int
requires 0 <= a && 0 < b
{
if a < b then a else ModSub(a - b, b)
}
lemma DivModAdd1(a: int, b: int)
requires b != 0
ensures (a + b) % b == a % b
ensures (a + b) / b == a / b + 1
{
var c := (a + b) / b - (a / b) - 1;
assert c * b + (a + b) % b - a % b == 0;
}
lemma DivModSub1(a: int, b: int)
requires b != 0
ensures (a - b) % b == a % b
ensures (a - b) / b == a / b - 1
{
var c := (a - b) / b - (a / b) + 1;
assert c * b + (a - b) % b - a % b == 0;
}
lemma ModEq(a: int, b: int)
requires 0 <= a && 0 < b
ensures a % b == ModSub(a, b)
{
reveal ModSub();
if a >= b {
DivModSub1(a, b);
}
}
lemma DivEq(a: int, b: int)
requires 0 <= a && 0 < b
ensures a / b == DivSub(a, b)
{
reveal DivSub();
if a >= b {
DivModSub1(a, b);
}
}
lemma DivModSpec'(a: int, b: int, q: int, r: int)
requires 0 <= a && 0 < b
requires 0 <= q && 0 <= r < b
requires a == q * b + r
ensures ModSub(a, b) == r
ensures DivSub(a, b) == q
{
reveal ModSub();
reveal DivSub();
if q > 0 {
DivModSpec'(a - b, b, q - 1, r);
}
}
lemma DivModSpec(a: int, b: int, q: int, r: int)
requires 0 <= a && 0 < b
requires 0 <= q && 0 <= r < b
requires a == q * b + r
ensures a % b == r
ensures a / b == q
{
ModEq(a, b);
DivEq(a, b);
DivModSpec'(a, b, q, r);
}
lemma DivMul(a: int, b: int)
requires 0 <= a && 0 < b
ensures a * b / b == a
{
DivModSpec(a * b, b, a, 0);
}
lemma DivModMulAdd(a: int, b: int, c: int)
requires 0 <= a && 0 <= c < b && 0 < b
ensures (a * b + c) / b == a
ensures (a * b + c) % b == c
{
DivModSpec(a * b + c, b, a, c);
}
}
| module DivMod {
function {:opaque} DivSub(a: int, b: int): int
requires 0 <= a && 0 < b
{
if a < b then 0 else 1 + DivSub(a - b, b)
}
function {:opaque} ModSub(a: int, b: int): int
requires 0 <= a && 0 < b
{
if a < b then a else ModSub(a - b, b)
}
lemma DivModAdd1(a: int, b: int)
requires b != 0
ensures (a + b) % b == a % b
ensures (a + b) / b == a / b + 1
{
var c := (a + b) / b - (a / b) - 1;
}
lemma DivModSub1(a: int, b: int)
requires b != 0
ensures (a - b) % b == a % b
ensures (a - b) / b == a / b - 1
{
var c := (a - b) / b - (a / b) + 1;
}
lemma ModEq(a: int, b: int)
requires 0 <= a && 0 < b
ensures a % b == ModSub(a, b)
{
reveal ModSub();
if a >= b {
DivModSub1(a, b);
}
}
lemma DivEq(a: int, b: int)
requires 0 <= a && 0 < b
ensures a / b == DivSub(a, b)
{
reveal DivSub();
if a >= b {
DivModSub1(a, b);
}
}
lemma DivModSpec'(a: int, b: int, q: int, r: int)
requires 0 <= a && 0 < b
requires 0 <= q && 0 <= r < b
requires a == q * b + r
ensures ModSub(a, b) == r
ensures DivSub(a, b) == q
{
reveal ModSub();
reveal DivSub();
if q > 0 {
DivModSpec'(a - b, b, q - 1, r);
}
}
lemma DivModSpec(a: int, b: int, q: int, r: int)
requires 0 <= a && 0 < b
requires 0 <= q && 0 <= r < b
requires a == q * b + r
ensures a % b == r
ensures a / b == q
{
ModEq(a, b);
DivEq(a, b);
DivModSpec'(a, b, q, r);
}
lemma DivMul(a: int, b: int)
requires 0 <= a && 0 < b
ensures a * b / b == a
{
DivModSpec(a * b, b, a, 0);
}
lemma DivModMulAdd(a: int, b: int, c: int)
requires 0 <= a && 0 <= c < b && 0 < b
ensures (a * b + c) / b == a
ensures (a * b + c) % b == c
{
DivModSpec(a * b + c, b, a, c);
}
}
|
669 | dafny_examples_tmp_tmp8qotd4ez_test_shuffle.dfy |
method random(a: int, b: int) returns (r: int)
// requires a <= b
ensures a <= b ==> a <= r <= b
lemma eqMultiset_t<T>(t: T, s1: seq<T>, s2: seq<T>)
requires multiset(s1) == multiset(s2)
ensures t in s1 <==> t in s2
{
calc <==> {
t in s1;
t in multiset(s1);
// Not necessary:
// t in multiset(s2);
// t in s2;
}
/*
if (t in s1) {
assert t in multiset(s1);
}
else {
assert t !in multiset(s1);
}
*/
}
lemma eqMultiset<T>(s1: seq<T>, s2: seq<T>)
requires multiset(s1) == multiset(s2)
ensures forall t :: t in s1 <==> t in s2
{
forall t {
eqMultiset_t(t, s1, s2);
}
}
method swap<T>(a: array<T>, i: int, j: int)
// requires a != null
requires 0 <= i < a.Length && 0 <= j < a.Length
modifies a
ensures a[i] == old(a[j])
ensures a[j] == old(a[i])
ensures forall m :: 0 <= m < a.Length && m != i && m != j ==> a[m] == old(a[m])
ensures multiset(a[..]) == old(multiset(a[..]))
{
var t := a[i];
a[i] := a[j];
a[j] := t;
}
method getAllShuffledDataEntries<T(0)>(m_dataEntries: array<T>) returns (result: array<T>)
// requires m_dataEntries != null
// ensures result != null
ensures result.Length == m_dataEntries.Length
ensures multiset(result[..]) == multiset(m_dataEntries[..])
{
result := new T[m_dataEntries.Length];
forall i | 0 <= i < m_dataEntries.Length {
result[i] := m_dataEntries[i];
}
assert result[..] == m_dataEntries[..];
var k := result.Length - 1;
while (k >= 0)
invariant multiset(result[..]) == multiset(m_dataEntries[..])
{
var i := random(0, k);
assert i >= 0 && i <= k;
if (i != k) {
swap(result, i, k);
}
k := k - 1;
}
}
function set_of_seq<T>(s: seq<T>): set<T>
{
set x: T | x in s :: x
}
lemma in_set_of_seq<T>(x: T, s: seq<T>)
ensures x in s <==> x in set_of_seq(s)
lemma subset_set_of_seq<T>(s1: seq<T>, s2: seq<T>)
requires set_of_seq(s1) <= set_of_seq(s2)
ensures forall x :: x in s1 ==> x in s2
method getRandomDataEntry<T(==)>(m_workList: array<T>, avoidSet: seq<T>) returns (e: T)
requires m_workList.Length > 0
// ensures set_of_seq(avoidSet) < set_of_seq(m_workList[..]) ==> e !in avoidSet
// ensures avoidSet < m_workList[..] ==> e in m_workList[..]
{
var k := m_workList.Length - 1;
while (k >= 0)
{
var i := random(0, k);
assert i >= 0 && i <= k;
e := m_workList[i];
if (e !in avoidSet) {
return e;
}
k := k - 1;
}
return m_workList[0];
}
|
method random(a: int, b: int) returns (r: int)
// requires a <= b
ensures a <= b ==> a <= r <= b
lemma eqMultiset_t<T>(t: T, s1: seq<T>, s2: seq<T>)
requires multiset(s1) == multiset(s2)
ensures t in s1 <==> t in s2
{
calc <==> {
t in s1;
t in multiset(s1);
// Not necessary:
// t in multiset(s2);
// t in s2;
}
/*
if (t in s1) {
}
else {
}
*/
}
lemma eqMultiset<T>(s1: seq<T>, s2: seq<T>)
requires multiset(s1) == multiset(s2)
ensures forall t :: t in s1 <==> t in s2
{
forall t {
eqMultiset_t(t, s1, s2);
}
}
method swap<T>(a: array<T>, i: int, j: int)
// requires a != null
requires 0 <= i < a.Length && 0 <= j < a.Length
modifies a
ensures a[i] == old(a[j])
ensures a[j] == old(a[i])
ensures forall m :: 0 <= m < a.Length && m != i && m != j ==> a[m] == old(a[m])
ensures multiset(a[..]) == old(multiset(a[..]))
{
var t := a[i];
a[i] := a[j];
a[j] := t;
}
method getAllShuffledDataEntries<T(0)>(m_dataEntries: array<T>) returns (result: array<T>)
// requires m_dataEntries != null
// ensures result != null
ensures result.Length == m_dataEntries.Length
ensures multiset(result[..]) == multiset(m_dataEntries[..])
{
result := new T[m_dataEntries.Length];
forall i | 0 <= i < m_dataEntries.Length {
result[i] := m_dataEntries[i];
}
var k := result.Length - 1;
while (k >= 0)
{
var i := random(0, k);
if (i != k) {
swap(result, i, k);
}
k := k - 1;
}
}
function set_of_seq<T>(s: seq<T>): set<T>
{
set x: T | x in s :: x
}
lemma in_set_of_seq<T>(x: T, s: seq<T>)
ensures x in s <==> x in set_of_seq(s)
lemma subset_set_of_seq<T>(s1: seq<T>, s2: seq<T>)
requires set_of_seq(s1) <= set_of_seq(s2)
ensures forall x :: x in s1 ==> x in s2
method getRandomDataEntry<T(==)>(m_workList: array<T>, avoidSet: seq<T>) returns (e: T)
requires m_workList.Length > 0
// ensures set_of_seq(avoidSet) < set_of_seq(m_workList[..]) ==> e !in avoidSet
// ensures avoidSet < m_workList[..] ==> e in m_workList[..]
{
var k := m_workList.Length - 1;
while (k >= 0)
{
var i := random(0, k);
e := m_workList[i];
if (e !in avoidSet) {
return e;
}
k := k - 1;
}
return m_workList[0];
}
|
670 | dafny_experiments_tmp_tmpz29_3_3i_circuit.dfy | module Base
{
// We want to represent circuits.
// A Circuit is composed of nodes.
// Each node can have input ports and output ports.
// The ports are represented just by the index of the node, and the index
// of the port on the node.
datatype INodePort = inodeport(node_id: nat, port_id: nat)
datatype ONodePort = onodeport(node_id: nat, port_id: nat)
// Currently the nodes can just be Xor, And or Identity gates.
datatype Node =
Xor |
And |
Ident
// The number of input ports for each kind of node.
function n_iports (node: Node): nat
{
match node {
case Xor => 2
case And => 2
case Ident => 1
}
}
// The number of output ports for each kind of node.
function n_oports (node: Node): nat
{
match node {
case Xor => 1
case And => 1
case Ident => 1
}
}
// A circuit is represented by the nodes and the connections between the nodes.
// Each output port can go to many input ports.
// But each input port can only be connected to one output port.
datatype Circuit = Circ(
nodes: seq<Node>,
backconns: map<INodePort, ONodePort>
)
// Just checking that the port and node indices mentioned in the connections are sane.
predicate WellformedBackConns(c: Circuit)
{
forall inp :: inp in c.backconns ==>
WellformedINP(c, inp) &&
WellformedONP(c, c.backconns[inp])
}
predicate WellformedINP(c: Circuit, inp: INodePort)
{
(0 <= inp.node_id < |c.nodes|) && (inp.port_id < n_iports(c.nodes[inp.node_id]))
}
predicate WellformedONP(c: Circuit, onp: ONodePort)
{
(0 <= onp.node_id < |c.nodes|) && (onp.port_id < n_oports(c.nodes[onp.node_id]))
}
// All input ports in a circuit.
function AllINPs(c: Circuit): set<INodePort>
ensures forall inp :: inp in AllINPs(c) ==> WellformedINP(c, inp)
{
set node_id: nat, port_id: nat |
0 <= node_id < |c.nodes| && port_id < n_iports(c.nodes[node_id]) ::
inodeport(node_id, port_id)
}
// All output ports in a circuit.
function AllONPs(c: Circuit): set<ONodePort>
ensures forall onp :: onp in AllONPs(c) ==> WellformedONP(c, onp)
{
set node_id: nat, port_id: nat |
0 < node_id < |c.nodes| && port_id < n_oports(c.nodes[node_id]) ::
onodeport(node_id, port_id)
}
ghost predicate Wellformed(c: Circuit)
{
WellformedBackConns(c)
}
}
module Utils
{
// Updates both the keys and values of a map.
function UpdateMap<T(!new), U>(A: map<T, U>, f: T->T, g: U->U): (result: map<T, U>)
requires forall x: T, y: T :: x != y ==> f(x) != f(y)
ensures forall x :: x in A <==> f(x) in result;
ensures forall x :: x in A ==> g(A[x]) == result[f(x)];
{
map x | x in A :: f(x) := g(A[x])
}
// Combines two maps into a single map.
function CombineMaps<T(!new), U>(a: map<T, U>, b: map<T, U>): map<T, U>
requires forall x :: x in a ==> x !in b
requires forall x :: x in b ==> x !in a
ensures
var result := CombineMaps(a, b);
(forall x :: x in a ==> a[x] == result[x]) &&
(forall x :: x in b ==> b[x] == result[x]) &&
(forall x :: x in result ==> (x in a) || (x in b))
{
map x | x in (a.Keys + b.Keys) :: if x in a then a[x] else b[x]
}
function sub(a: nat, b: nat): nat
requires b <= a
{
a - b
}
}
module BackwardConnections
{
import opened Base
import opened Utils
// This is used when we are trying to create a new circuit by combining two existing circuits.
// This function takes care of combining the backwards connections.
// Because the node_indices of the two circuits are just natural numbers when we combine the
// two circuits we need to shift the node indices of the second circuit so that they don't clash.
// We do this by adding `offset` to the node indices.
function CombineBackconns(
offset: nat,
bc1: map<INodePort, ONodePort>, bc2: map<INodePort, ONodePort>): (result: map<INodePort, ONodePort>)
requires
forall inp :: inp in bc1 ==> inp.node_id < offset
{
var f:= (inp: INodePort) => inodeport(inp.node_id + offset, inp.port_id);
var g := (onp: ONodePort) => onodeport(onp.node_id + offset, onp.port_id);
var backconns2 := UpdateMap(bc2, f, g);
CombineMaps(bc1, backconns2)
}
lemma CombineBackconnsHelper(
offset: nat,
bc1: map<INodePort, ONodePort>, bc2: map<INodePort, ONodePort>, result: map<INodePort, ONodePort>)
requires
forall inp :: inp in bc1 ==> inp.node_id < offset
requires
result == CombineBackconns(offset, bc1, bc2);
ensures
forall inp :: inp in bc1 ==> (
inp in result &&
result[inp] == bc1[inp])
ensures
forall inp :: inp in bc2 ==> (
inodeport(inp.node_id+offset, inp.port_id) in result &&
result[inodeport(inp.node_id+offset, inp.port_id)] == onodeport(bc2[inp].node_id+offset, bc2[inp].port_id))
{
var f:= (inp: INodePort) => inodeport(inp.node_id + offset, inp.port_id);
var g := (onp: ONodePort) => onodeport(onp.node_id + offset, onp.port_id);
var backconns2 := UpdateMap(bc2, f, g);
assert forall inp :: inp in bc2 ==> inodeport(inp.node_id+offset, inp.port_id) in backconns2;
assert backconns2 == UpdateMap(bc2, f, g);
}
lemma CombineBackconnsHelper2(
offset: nat,
bc1: map<INodePort, ONodePort>, bc2: map<INodePort, ONodePort>, result: map<INodePort, ONodePort>, inp: INodePort)
requires
forall inp :: inp in bc1 ==> inp.node_id < offset
requires
result == CombineBackconns(offset, bc1, bc2);
requires inp in bc2
ensures
inodeport(inp.node_id+offset, inp.port_id) in result
ensures
result[inodeport(inp.node_id+offset, inp.port_id)] == onodeport(bc2[inp].node_id+offset, bc2[inp].port_id)
{
CombineBackconnsHelper(offset, bc1, bc2, result);
}
}
module CombineCircuits {
import opened Base
import BackwardConnections
import opened Utils
// Combine two circuits into a new circuit.
// This is a bit ugly because we have to offset the node indices of the
// second circuit by |c1.nodes|.
function CombineCircuits(c1: Circuit, c2: Circuit): (r: Circuit)
requires Wellformed(c1)
requires Wellformed(c2)
{
var new_nodes := c1.nodes + c2.nodes;
var new_backconns := BackwardConnections.CombineBackconns(
|c1.nodes|, c1.backconns, c2.backconns);
Circ(new_nodes, new_backconns)
}
// Check that Circuit c2 contains a subcircuit that corresponds to c1 getting mapped with the
// `node_map` function.
predicate IsEquivalentCircuit(node_is_member: nat->bool, node_map: nat-->nat, c1: Circuit, c2: Circuit)
requires forall inp :: inp in c1.backconns && node_is_member(inp.node_id) ==> node_is_member(c1.backconns[inp].node_id)
requires forall n :: node_is_member(n) ==> node_map.requires(n)
{
forall inp :: inp in c1.backconns && node_is_member(inp.node_id) ==>
inodeport(node_map(inp.node_id), inp.port_id) in c2.backconns &&
var inp2 := inodeport(node_map(inp.node_id), inp.port_id);
var onp := c1.backconns[inp];
onodeport(node_map(onp.node_id), onp.port_id) == c2.backconns[inp2]
}
// Check that for every input port and output port in the combined Circuit, they can be assigned
// to a port in one of the two source circuits.
predicate CanBackAssign(c1: Circuit, c2: Circuit, r: Circuit, is_in_c1: nat->bool, is_in_c2: nat-> bool,
map_r_to_c1: nat->nat, map_r_to_c2: nat-->nat)
requires forall a :: is_in_c1(a) ==> map_r_to_c1.requires(a)
requires forall a :: is_in_c2(a) ==> map_r_to_c2.requires(a)
requires Wellformed(c1)
requires Wellformed(c2)
{
(forall inp :: inp in AllINPs(r) ==>
(is_in_c1(inp.node_id) || is_in_c2(inp.node_id)) &&
if is_in_c1(inp.node_id) then
WellformedINP(c1, inodeport(map_r_to_c1(inp.node_id), inp.port_id))
else
WellformedINP(c2, inodeport(map_r_to_c2(inp.node_id), inp.port_id))) &&
(forall onp :: onp in AllONPs(r) ==>
(is_in_c1(onp.node_id) || is_in_c2(onp.node_id)) &&
if is_in_c1(onp.node_id) then
WellformedONP(c1, onodeport(map_r_to_c1(onp.node_id), onp.port_id))
else
WellformedONP(c2, onodeport(map_r_to_c2(onp.node_id), onp.port_id))) &&
true
}
lemma CombineCircuitsCorrectHelper(c1: Circuit, c2: Circuit, r: Circuit)
requires Wellformed(c1)
requires Wellformed(c2)
requires r_is_result: r == CombineCircuits(c1, c2)
{
assert r.backconns ==
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns) by {
reveal r_is_result;
}
}
lemma CombineCircuitsCorrectC1(c1: Circuit, c2: Circuit, r: Circuit)
requires Wellformed(c1)
requires Wellformed(c2)
requires r == CombineCircuits(c1, c2)
ensures
var offset := |c1.nodes|;
// The original c1 has an image in r.
IsEquivalentCircuit(a=>true, a=>a, c1, r) &&
// This subset of r has an image in c1.
IsEquivalentCircuit(a=>a < offset, a=>a, r, c1)
{
}
lemma CombineCircuitsCorrect(c1: Circuit, c2: Circuit, r: Circuit)
requires Wellformed(c1)
requires Wellformed(c2)
requires r_is_result: r == CombineCircuits(c1, c2)
ensures
var offset := |c1.nodes|;
// The original c1 has an image in r.
IsEquivalentCircuit(a=>true, a=>a, c1, r) &&
// This subset of r has an image in c1.
IsEquivalentCircuit(a=>a < offset, a=>a, r, c1) &&
// The original c2 has an image in r.
IsEquivalentCircuit(a=>true, a=>a+offset, c2, r) &&
/*
FIXME: These have been commented out for now
otherwise it takes longer than 20s to solve.
// All ports in r have equivalents in either c1 or c2.
CanBackAssign(c1, c2, r, a=>a < offset, a=> a >= offset, a=>a, a requires a >= offset => sub(a, offset)) &&
// This subset of r has an image in c2.
IsEquivalentCircuit(a=>a >= offset, a requires a >= offset => sub(a, offset), r, c2) &&
*/
true
{
// Trying to prove:
// The original c2 has an image in r.
// IsEquivalentCircuit(a=>true, a=>a+offset, c2, r)
var offset := |c1.nodes|;
var node_is_member := a=>true;
var node_map := a=>a+offset;
calc {
IsEquivalentCircuit(node_is_member, node_map, c2, r);
// Substitute in the IsEquivalentCircuit function definition.
forall inp :: inp in c2.backconns && node_is_member(inp.node_id) ==>
inodeport(node_map(inp.node_id), inp.port_id) in r.backconns &&
var inp2 := inodeport(node_map(inp.node_id), inp.port_id);
var onp := c2.backconns[inp];
onodeport(node_map(onp.node_id), onp.port_id) == r.backconns[inp2];
// Substitute in the node_is_member and node_is_map definiions.
// For some reason this cause the solver to take too long.
forall inp :: inp in c2.backconns ==>
inodeport(inp.node_id+offset, inp.port_id) in r.backconns &&
var inp2 := inodeport(inp.node_id+offset, inp.port_id);
var onp := c2.backconns[inp];
onodeport(onp.node_id+offset, onp.port_id) == r.backconns[inp2];
}
assert basic_result: r.backconns == BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns)
by {
reveal r_is_result;
}
forall inp | inp in c2.backconns
{
calc {
inodeport(inp.node_id+offset, inp.port_id) in r.backconns &&
var inp2 := inodeport(inp.node_id+offset, inp.port_id);
var onp := c2.backconns[inp];
onodeport(onp.node_id+offset, onp.port_id) == r.backconns[inp2];
{reveal basic_result;}
inodeport(inp.node_id+offset, inp.port_id) in
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns) &&
var inp2 := inodeport(inp.node_id+offset, inp.port_id);
var onp := c2.backconns[inp];
onodeport(onp.node_id+offset, onp.port_id) ==
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns)[inp2];
inodeport(inp.node_id+offset, inp.port_id) in
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns) &&
var inp2 := inodeport(inp.node_id+offset, inp.port_id);
var onp := c2.backconns[inp];
onodeport(onp.node_id+offset, onp.port_id) ==
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns)[inp2];
{
var inp2 := inodeport(inp.node_id+offset, inp.port_id);
BackwardConnections.CombineBackconnsHelper2(
offset, c1.backconns, c2.backconns,
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns),
inp
);
assert
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns)[inp2] ==
onodeport(c2.backconns[inp].node_id+offset, c2.backconns[inp].port_id);
assert
inodeport(inp.node_id+offset, inp.port_id) in
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns);
}
true;
}
}
reveal r_is_result;
CombineCircuitsCorrectC1(c1, c2, r);
}
}
| module Base
{
// We want to represent circuits.
// A Circuit is composed of nodes.
// Each node can have input ports and output ports.
// The ports are represented just by the index of the node, and the index
// of the port on the node.
datatype INodePort = inodeport(node_id: nat, port_id: nat)
datatype ONodePort = onodeport(node_id: nat, port_id: nat)
// Currently the nodes can just be Xor, And or Identity gates.
datatype Node =
Xor |
And |
Ident
// The number of input ports for each kind of node.
function n_iports (node: Node): nat
{
match node {
case Xor => 2
case And => 2
case Ident => 1
}
}
// The number of output ports for each kind of node.
function n_oports (node: Node): nat
{
match node {
case Xor => 1
case And => 1
case Ident => 1
}
}
// A circuit is represented by the nodes and the connections between the nodes.
// Each output port can go to many input ports.
// But each input port can only be connected to one output port.
datatype Circuit = Circ(
nodes: seq<Node>,
backconns: map<INodePort, ONodePort>
)
// Just checking that the port and node indices mentioned in the connections are sane.
predicate WellformedBackConns(c: Circuit)
{
forall inp :: inp in c.backconns ==>
WellformedINP(c, inp) &&
WellformedONP(c, c.backconns[inp])
}
predicate WellformedINP(c: Circuit, inp: INodePort)
{
(0 <= inp.node_id < |c.nodes|) && (inp.port_id < n_iports(c.nodes[inp.node_id]))
}
predicate WellformedONP(c: Circuit, onp: ONodePort)
{
(0 <= onp.node_id < |c.nodes|) && (onp.port_id < n_oports(c.nodes[onp.node_id]))
}
// All input ports in a circuit.
function AllINPs(c: Circuit): set<INodePort>
ensures forall inp :: inp in AllINPs(c) ==> WellformedINP(c, inp)
{
set node_id: nat, port_id: nat |
0 <= node_id < |c.nodes| && port_id < n_iports(c.nodes[node_id]) ::
inodeport(node_id, port_id)
}
// All output ports in a circuit.
function AllONPs(c: Circuit): set<ONodePort>
ensures forall onp :: onp in AllONPs(c) ==> WellformedONP(c, onp)
{
set node_id: nat, port_id: nat |
0 < node_id < |c.nodes| && port_id < n_oports(c.nodes[node_id]) ::
onodeport(node_id, port_id)
}
ghost predicate Wellformed(c: Circuit)
{
WellformedBackConns(c)
}
}
module Utils
{
// Updates both the keys and values of a map.
function UpdateMap<T(!new), U>(A: map<T, U>, f: T->T, g: U->U): (result: map<T, U>)
requires forall x: T, y: T :: x != y ==> f(x) != f(y)
ensures forall x :: x in A <==> f(x) in result;
ensures forall x :: x in A ==> g(A[x]) == result[f(x)];
{
map x | x in A :: f(x) := g(A[x])
}
// Combines two maps into a single map.
function CombineMaps<T(!new), U>(a: map<T, U>, b: map<T, U>): map<T, U>
requires forall x :: x in a ==> x !in b
requires forall x :: x in b ==> x !in a
ensures
var result := CombineMaps(a, b);
(forall x :: x in a ==> a[x] == result[x]) &&
(forall x :: x in b ==> b[x] == result[x]) &&
(forall x :: x in result ==> (x in a) || (x in b))
{
map x | x in (a.Keys + b.Keys) :: if x in a then a[x] else b[x]
}
function sub(a: nat, b: nat): nat
requires b <= a
{
a - b
}
}
module BackwardConnections
{
import opened Base
import opened Utils
// This is used when we are trying to create a new circuit by combining two existing circuits.
// This function takes care of combining the backwards connections.
// Because the node_indices of the two circuits are just natural numbers when we combine the
// two circuits we need to shift the node indices of the second circuit so that they don't clash.
// We do this by adding `offset` to the node indices.
function CombineBackconns(
offset: nat,
bc1: map<INodePort, ONodePort>, bc2: map<INodePort, ONodePort>): (result: map<INodePort, ONodePort>)
requires
forall inp :: inp in bc1 ==> inp.node_id < offset
{
var f:= (inp: INodePort) => inodeport(inp.node_id + offset, inp.port_id);
var g := (onp: ONodePort) => onodeport(onp.node_id + offset, onp.port_id);
var backconns2 := UpdateMap(bc2, f, g);
CombineMaps(bc1, backconns2)
}
lemma CombineBackconnsHelper(
offset: nat,
bc1: map<INodePort, ONodePort>, bc2: map<INodePort, ONodePort>, result: map<INodePort, ONodePort>)
requires
forall inp :: inp in bc1 ==> inp.node_id < offset
requires
result == CombineBackconns(offset, bc1, bc2);
ensures
forall inp :: inp in bc1 ==> (
inp in result &&
result[inp] == bc1[inp])
ensures
forall inp :: inp in bc2 ==> (
inodeport(inp.node_id+offset, inp.port_id) in result &&
result[inodeport(inp.node_id+offset, inp.port_id)] == onodeport(bc2[inp].node_id+offset, bc2[inp].port_id))
{
var f:= (inp: INodePort) => inodeport(inp.node_id + offset, inp.port_id);
var g := (onp: ONodePort) => onodeport(onp.node_id + offset, onp.port_id);
var backconns2 := UpdateMap(bc2, f, g);
}
lemma CombineBackconnsHelper2(
offset: nat,
bc1: map<INodePort, ONodePort>, bc2: map<INodePort, ONodePort>, result: map<INodePort, ONodePort>, inp: INodePort)
requires
forall inp :: inp in bc1 ==> inp.node_id < offset
requires
result == CombineBackconns(offset, bc1, bc2);
requires inp in bc2
ensures
inodeport(inp.node_id+offset, inp.port_id) in result
ensures
result[inodeport(inp.node_id+offset, inp.port_id)] == onodeport(bc2[inp].node_id+offset, bc2[inp].port_id)
{
CombineBackconnsHelper(offset, bc1, bc2, result);
}
}
module CombineCircuits {
import opened Base
import BackwardConnections
import opened Utils
// Combine two circuits into a new circuit.
// This is a bit ugly because we have to offset the node indices of the
// second circuit by |c1.nodes|.
function CombineCircuits(c1: Circuit, c2: Circuit): (r: Circuit)
requires Wellformed(c1)
requires Wellformed(c2)
{
var new_nodes := c1.nodes + c2.nodes;
var new_backconns := BackwardConnections.CombineBackconns(
|c1.nodes|, c1.backconns, c2.backconns);
Circ(new_nodes, new_backconns)
}
// Check that Circuit c2 contains a subcircuit that corresponds to c1 getting mapped with the
// `node_map` function.
predicate IsEquivalentCircuit(node_is_member: nat->bool, node_map: nat-->nat, c1: Circuit, c2: Circuit)
requires forall inp :: inp in c1.backconns && node_is_member(inp.node_id) ==> node_is_member(c1.backconns[inp].node_id)
requires forall n :: node_is_member(n) ==> node_map.requires(n)
{
forall inp :: inp in c1.backconns && node_is_member(inp.node_id) ==>
inodeport(node_map(inp.node_id), inp.port_id) in c2.backconns &&
var inp2 := inodeport(node_map(inp.node_id), inp.port_id);
var onp := c1.backconns[inp];
onodeport(node_map(onp.node_id), onp.port_id) == c2.backconns[inp2]
}
// Check that for every input port and output port in the combined Circuit, they can be assigned
// to a port in one of the two source circuits.
predicate CanBackAssign(c1: Circuit, c2: Circuit, r: Circuit, is_in_c1: nat->bool, is_in_c2: nat-> bool,
map_r_to_c1: nat->nat, map_r_to_c2: nat-->nat)
requires forall a :: is_in_c1(a) ==> map_r_to_c1.requires(a)
requires forall a :: is_in_c2(a) ==> map_r_to_c2.requires(a)
requires Wellformed(c1)
requires Wellformed(c2)
{
(forall inp :: inp in AllINPs(r) ==>
(is_in_c1(inp.node_id) || is_in_c2(inp.node_id)) &&
if is_in_c1(inp.node_id) then
WellformedINP(c1, inodeport(map_r_to_c1(inp.node_id), inp.port_id))
else
WellformedINP(c2, inodeport(map_r_to_c2(inp.node_id), inp.port_id))) &&
(forall onp :: onp in AllONPs(r) ==>
(is_in_c1(onp.node_id) || is_in_c2(onp.node_id)) &&
if is_in_c1(onp.node_id) then
WellformedONP(c1, onodeport(map_r_to_c1(onp.node_id), onp.port_id))
else
WellformedONP(c2, onodeport(map_r_to_c2(onp.node_id), onp.port_id))) &&
true
}
lemma CombineCircuitsCorrectHelper(c1: Circuit, c2: Circuit, r: Circuit)
requires Wellformed(c1)
requires Wellformed(c2)
requires r_is_result: r == CombineCircuits(c1, c2)
{
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns) by {
reveal r_is_result;
}
}
lemma CombineCircuitsCorrectC1(c1: Circuit, c2: Circuit, r: Circuit)
requires Wellformed(c1)
requires Wellformed(c2)
requires r == CombineCircuits(c1, c2)
ensures
var offset := |c1.nodes|;
// The original c1 has an image in r.
IsEquivalentCircuit(a=>true, a=>a, c1, r) &&
// This subset of r has an image in c1.
IsEquivalentCircuit(a=>a < offset, a=>a, r, c1)
{
}
lemma CombineCircuitsCorrect(c1: Circuit, c2: Circuit, r: Circuit)
requires Wellformed(c1)
requires Wellformed(c2)
requires r_is_result: r == CombineCircuits(c1, c2)
ensures
var offset := |c1.nodes|;
// The original c1 has an image in r.
IsEquivalentCircuit(a=>true, a=>a, c1, r) &&
// This subset of r has an image in c1.
IsEquivalentCircuit(a=>a < offset, a=>a, r, c1) &&
// The original c2 has an image in r.
IsEquivalentCircuit(a=>true, a=>a+offset, c2, r) &&
/*
FIXME: These have been commented out for now
otherwise it takes longer than 20s to solve.
// All ports in r have equivalents in either c1 or c2.
CanBackAssign(c1, c2, r, a=>a < offset, a=> a >= offset, a=>a, a requires a >= offset => sub(a, offset)) &&
// This subset of r has an image in c2.
IsEquivalentCircuit(a=>a >= offset, a requires a >= offset => sub(a, offset), r, c2) &&
*/
true
{
// Trying to prove:
// The original c2 has an image in r.
// IsEquivalentCircuit(a=>true, a=>a+offset, c2, r)
var offset := |c1.nodes|;
var node_is_member := a=>true;
var node_map := a=>a+offset;
calc {
IsEquivalentCircuit(node_is_member, node_map, c2, r);
// Substitute in the IsEquivalentCircuit function definition.
forall inp :: inp in c2.backconns && node_is_member(inp.node_id) ==>
inodeport(node_map(inp.node_id), inp.port_id) in r.backconns &&
var inp2 := inodeport(node_map(inp.node_id), inp.port_id);
var onp := c2.backconns[inp];
onodeport(node_map(onp.node_id), onp.port_id) == r.backconns[inp2];
// Substitute in the node_is_member and node_is_map definiions.
// For some reason this cause the solver to take too long.
forall inp :: inp in c2.backconns ==>
inodeport(inp.node_id+offset, inp.port_id) in r.backconns &&
var inp2 := inodeport(inp.node_id+offset, inp.port_id);
var onp := c2.backconns[inp];
onodeport(onp.node_id+offset, onp.port_id) == r.backconns[inp2];
}
by {
reveal r_is_result;
}
forall inp | inp in c2.backconns
{
calc {
inodeport(inp.node_id+offset, inp.port_id) in r.backconns &&
var inp2 := inodeport(inp.node_id+offset, inp.port_id);
var onp := c2.backconns[inp];
onodeport(onp.node_id+offset, onp.port_id) == r.backconns[inp2];
{reveal basic_result;}
inodeport(inp.node_id+offset, inp.port_id) in
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns) &&
var inp2 := inodeport(inp.node_id+offset, inp.port_id);
var onp := c2.backconns[inp];
onodeport(onp.node_id+offset, onp.port_id) ==
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns)[inp2];
inodeport(inp.node_id+offset, inp.port_id) in
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns) &&
var inp2 := inodeport(inp.node_id+offset, inp.port_id);
var onp := c2.backconns[inp];
onodeport(onp.node_id+offset, onp.port_id) ==
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns)[inp2];
{
var inp2 := inodeport(inp.node_id+offset, inp.port_id);
BackwardConnections.CombineBackconnsHelper2(
offset, c1.backconns, c2.backconns,
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns),
inp
);
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns)[inp2] ==
onodeport(c2.backconns[inp].node_id+offset, c2.backconns[inp].port_id);
inodeport(inp.node_id+offset, inp.port_id) in
BackwardConnections.CombineBackconns(|c1.nodes|, c1.backconns, c2.backconns);
}
true;
}
}
reveal r_is_result;
CombineCircuitsCorrectC1(c1, c2, r);
}
}
|
671 | dafny_misc_tmp_tmpg4vzlnm1_rosetta_code_factorial.dfy | // recursive definition of factorial
function Factorial(n: nat): nat {
if n == 0 then 1 else n * Factorial(n - 1)
}
// iterative implementation of factorial
method IterativeFactorial(n: nat) returns (result: nat)
ensures result == Factorial(n)
{
result := 1;
var i := 1;
while i <= n
invariant i <= n + 1
invariant result == Factorial(i - 1)
{
result := result * i;
i := i + 1;
}
}
| // recursive definition of factorial
function Factorial(n: nat): nat {
if n == 0 then 1 else n * Factorial(n - 1)
}
// iterative implementation of factorial
method IterativeFactorial(n: nat) returns (result: nat)
ensures result == Factorial(n)
{
result := 1;
var i := 1;
while i <= n
{
result := result * i;
i := i + 1;
}
}
|
672 | dafny_misc_tmp_tmpg4vzlnm1_rosetta_code_fibonacci_sequence.dfy | // definition of Fibonacci numbers
function Fibonacci(n: nat): nat {
match n {
case 0 => 0
case 1 => 1
case _ => Fibonacci(n - 1) + Fibonacci(n - 2)
}
}
// iterative calculation of Fibonacci numbers
method FibonacciIterative(n: nat) returns (f: nat)
ensures f == Fibonacci(n)
{
if n < 2 {
return n;
}
var prev := 1;
f := 1;
var i := 2;
while i < n
invariant i <= n
invariant prev == Fibonacci(i - 1)
invariant f == Fibonacci(i)
{
prev, f := f, f + prev;
i := i + 1;
}
}
| // definition of Fibonacci numbers
function Fibonacci(n: nat): nat {
match n {
case 0 => 0
case 1 => 1
case _ => Fibonacci(n - 1) + Fibonacci(n - 2)
}
}
// iterative calculation of Fibonacci numbers
method FibonacciIterative(n: nat) returns (f: nat)
ensures f == Fibonacci(n)
{
if n < 2 {
return n;
}
var prev := 1;
f := 1;
var i := 2;
while i < n
{
prev, f := f, f + prev;
i := i + 1;
}
}
|
673 | dafny_projects_tmp_tmpjutqwjv4_tutorial_tutorial.dfy | // Working through https://dafny.org/dafny/OnlineTutorial/guide
function fib(n: nat): nat
{
if n == 0 then 0
else if n == 1 then 1
else fib(n - 1) + fib(n - 2)
}
method ComputeFib(n: nat) returns (ret: nat)
ensures ret == fib(n)
{
var a := 0;
var b := 1;
var i := 0;
while i < n
invariant 0 <= i <= n
invariant a == fib(i)
invariant b == fib(i+1)
{
a, b := b, a+b;
i := i + 1;
}
assert i == n;
return a;
}
method Find(a: array<int>, key: int) returns (index: int)
ensures 0 <= index ==> index < a.Length && a[index] == key
ensures index < 0 ==> (forall k :: 0 <= k < a.Length ==> a[k] != key)
{
index := 0;
while index < a.Length
invariant 0 <= index <= a.Length
invariant forall k :: 0 <= k < index ==> a[k] != key
{
if a[index] == key {
return index;
}
index := index + 1;
}
return -1;
}
predicate sorted(a: array<int>)
reads a
{
forall n, m :: 0 <= n < m < a.Length ==> a[n] <= a[m]
}
method BinarySearch(a: array<int>, value: int) returns (index: int)
requires 0 <= a.Length && sorted(a)
ensures 0 <= index ==> index < a.Length && a[index] == value
ensures index < 0 ==> forall k :: 0 <= k < a.Length ==> a[k] != value
{
var low := 0;
var high := a.Length - 1;
while low < high
invariant 0 <= low && high < a.Length
invariant forall k :: 0 <= k < a.Length && (k < low || k > high) ==> a[k] != value
{
var mid : int := (low + high) / 2;
assert 0 <= low <= mid < high < a.Length;
if a[mid] < value {
low := mid + 1;
} else if a[mid] > value {
high := mid - 1;
} else {
assert a[mid] == value;
return mid;
}
}
if low < a.Length && a[low] == value {
return low;
} else {
return -1;
}
}
// https://dafny.org/dafny/OnlineTutorial/ValueTypes
function update(s: seq<int>, i: int, v: int): seq<int>
requires 0 <= i < |s|
ensures update(s, i, v) == s[i := v]
{
s[..i] + [v] + s[i+1..]
}
// https://dafny.org/dafny/OnlineTutorial/Lemmas
lemma SkippingLemma(a: array<int>, j: int)
requires forall i :: 0 <= i < a.Length ==> 0 <= a[i]
requires forall i :: 0 < i < a.Length ==> a[i-1]-1 <= a[i]
requires 0 <= j < a.Length
ensures forall i :: j <= i < j + a[j] && i < a.Length ==> a[i] != 0
{
var i := j;
while i < j + a[j] && i < a.Length
invariant i < a.Length ==> a[i] >= a[j] - (i-j)
invariant forall k :: j <= k < i && k < a.Length ==> a[k] != 0
{
i := i + 1;
}
}
method FindZero(a: array<int>) returns (index: int)
requires forall i :: 0 <= i < a.Length ==> 0 <= a[i]
requires forall i :: 0 < i < a.Length ==> a[i-1]-1 <= a[i]
ensures index < 0 ==> forall i :: 0 <= i < a.Length ==> a[i] != 0
ensures 0 <= index ==> index < a.Length && a[index] == 0
{
index := 0;
while index < a.Length
invariant 0 <= index
invariant forall k :: 0 <= k < index && k < a.Length ==> a[k] != 0
{
if a[index] == 0 { return; }
SkippingLemma(a, index);
index := index + a[index];
}
index := -1;
}
function count(a: seq<bool>): nat
{
if |a| == 0 then 0 else
(if a[0] then 1 else 0) + count(a[1..])
}
lemma DistributiveLemma(a: seq<bool>, b: seq<bool>)
ensures count(a + b) == count(a) + count(b)
{
if a == [] {
assert a+b == b;
} else {
// Unnecessary! DistributiveLemma(a[1..], b);
assert a + b == [a[0]] + (a[1..] + b);
}
}
class Node
{
var next: seq<Node>
}
predicate closed(graph: set<Node>)
reads graph
{
forall i :: i in graph ==> forall k :: 0 <= k < |i.next| ==> i.next[k] in graph && i.next[k] != i
}
predicate path(p: seq<Node>, graph: set<Node>)
requires closed(graph) && 0 < |p|
reads graph
{
p[0] in graph &&
(|p| > 1 ==> p[1] in p[0].next && // the first link is valid, if it exists
path(p[1..], graph)) // and the rest of the sequence is a valid
}
predicate pathSpecific(p: seq<Node>, start: Node, end: Node, graph: set<Node>)
requires closed(graph)
reads graph
{
0 < |p| && // path is nonempty
start == p[0] && end == p[|p|-1] && // it starts and ends correctly
path(p, graph) // and it is a valid path
}
lemma DisproofLemma(p: seq<Node>, subgraph: set<Node>,
root: Node, goal: Node, graph: set<Node>)
requires closed(subgraph) && closed(graph) && subgraph <= graph
requires root in subgraph && goal in graph - subgraph
ensures !pathSpecific(p, root, goal, graph)
{
if |p| >= 2 && p[0] == root && p[1] in p[0].next {
DisproofLemma(p[1..], subgraph, p[1], goal, graph);
}
}
lemma ClosedLemma(subgraph: set<Node>, root: Node, goal: Node, graph: set<Node>)
requires closed(subgraph) && closed(graph) && subgraph <= graph
requires root in subgraph && goal in graph - subgraph
ensures !(exists p: seq<Node> :: pathSpecific(p, root, goal, graph))
{
forall p { DisproofLemma(p, subgraph, root, goal, graph); }
}
| // Working through https://dafny.org/dafny/OnlineTutorial/guide
function fib(n: nat): nat
{
if n == 0 then 0
else if n == 1 then 1
else fib(n - 1) + fib(n - 2)
}
method ComputeFib(n: nat) returns (ret: nat)
ensures ret == fib(n)
{
var a := 0;
var b := 1;
var i := 0;
while i < n
{
a, b := b, a+b;
i := i + 1;
}
return a;
}
method Find(a: array<int>, key: int) returns (index: int)
ensures 0 <= index ==> index < a.Length && a[index] == key
ensures index < 0 ==> (forall k :: 0 <= k < a.Length ==> a[k] != key)
{
index := 0;
while index < a.Length
{
if a[index] == key {
return index;
}
index := index + 1;
}
return -1;
}
predicate sorted(a: array<int>)
reads a
{
forall n, m :: 0 <= n < m < a.Length ==> a[n] <= a[m]
}
method BinarySearch(a: array<int>, value: int) returns (index: int)
requires 0 <= a.Length && sorted(a)
ensures 0 <= index ==> index < a.Length && a[index] == value
ensures index < 0 ==> forall k :: 0 <= k < a.Length ==> a[k] != value
{
var low := 0;
var high := a.Length - 1;
while low < high
{
var mid : int := (low + high) / 2;
if a[mid] < value {
low := mid + 1;
} else if a[mid] > value {
high := mid - 1;
} else {
return mid;
}
}
if low < a.Length && a[low] == value {
return low;
} else {
return -1;
}
}
// https://dafny.org/dafny/OnlineTutorial/ValueTypes
function update(s: seq<int>, i: int, v: int): seq<int>
requires 0 <= i < |s|
ensures update(s, i, v) == s[i := v]
{
s[..i] + [v] + s[i+1..]
}
// https://dafny.org/dafny/OnlineTutorial/Lemmas
lemma SkippingLemma(a: array<int>, j: int)
requires forall i :: 0 <= i < a.Length ==> 0 <= a[i]
requires forall i :: 0 < i < a.Length ==> a[i-1]-1 <= a[i]
requires 0 <= j < a.Length
ensures forall i :: j <= i < j + a[j] && i < a.Length ==> a[i] != 0
{
var i := j;
while i < j + a[j] && i < a.Length
{
i := i + 1;
}
}
method FindZero(a: array<int>) returns (index: int)
requires forall i :: 0 <= i < a.Length ==> 0 <= a[i]
requires forall i :: 0 < i < a.Length ==> a[i-1]-1 <= a[i]
ensures index < 0 ==> forall i :: 0 <= i < a.Length ==> a[i] != 0
ensures 0 <= index ==> index < a.Length && a[index] == 0
{
index := 0;
while index < a.Length
{
if a[index] == 0 { return; }
SkippingLemma(a, index);
index := index + a[index];
}
index := -1;
}
function count(a: seq<bool>): nat
{
if |a| == 0 then 0 else
(if a[0] then 1 else 0) + count(a[1..])
}
lemma DistributiveLemma(a: seq<bool>, b: seq<bool>)
ensures count(a + b) == count(a) + count(b)
{
if a == [] {
} else {
// Unnecessary! DistributiveLemma(a[1..], b);
}
}
class Node
{
var next: seq<Node>
}
predicate closed(graph: set<Node>)
reads graph
{
forall i :: i in graph ==> forall k :: 0 <= k < |i.next| ==> i.next[k] in graph && i.next[k] != i
}
predicate path(p: seq<Node>, graph: set<Node>)
requires closed(graph) && 0 < |p|
reads graph
{
p[0] in graph &&
(|p| > 1 ==> p[1] in p[0].next && // the first link is valid, if it exists
path(p[1..], graph)) // and the rest of the sequence is a valid
}
predicate pathSpecific(p: seq<Node>, start: Node, end: Node, graph: set<Node>)
requires closed(graph)
reads graph
{
0 < |p| && // path is nonempty
start == p[0] && end == p[|p|-1] && // it starts and ends correctly
path(p, graph) // and it is a valid path
}
lemma DisproofLemma(p: seq<Node>, subgraph: set<Node>,
root: Node, goal: Node, graph: set<Node>)
requires closed(subgraph) && closed(graph) && subgraph <= graph
requires root in subgraph && goal in graph - subgraph
ensures !pathSpecific(p, root, goal, graph)
{
if |p| >= 2 && p[0] == root && p[1] in p[0].next {
DisproofLemma(p[1..], subgraph, p[1], goal, graph);
}
}
lemma ClosedLemma(subgraph: set<Node>, root: Node, goal: Node, graph: set<Node>)
requires closed(subgraph) && closed(graph) && subgraph <= graph
requires root in subgraph && goal in graph - subgraph
ensures !(exists p: seq<Node> :: pathSpecific(p, root, goal, graph))
{
forall p { DisproofLemma(p, subgraph, root, goal, graph); }
}
|
674 | dafny_tmp_tmp2ewu6s7x_ListReverse.dfy | function reverse(xs: seq<nat>): seq<nat>
{
if xs == [] then [] else reverse(xs[1..]) + [xs[0]]
}
lemma ReverseAppendDistr(xs: seq<nat>, ys: seq<nat>)
ensures reverse(xs + ys) == reverse(ys) + reverse(xs)
{
if {
case xs == [] =>
calc {
reverse([] + ys);
calc {
[] + ys;
ys;
}
reverse(ys);
reverse(ys) + reverse([]);
}
case xs != [] => {
var zs := xs + ys;
assert zs[1..] == xs[1..] + ys;
}
}
}
lemma ReverseInvolution(xxs: seq<nat>)
ensures reverse(reverse(xxs)) == xxs
{
if {
case xxs == [] => {}
case xxs != [] => calc {
reverse(reverse(xxs));
==
reverse(reverse(xxs[1..]) + [xxs[0]]);
==
{ ReverseAppendDistr(reverse(xxs[1..]), [xxs[0]]); }
reverse([xxs[0]]) + reverse(reverse(xxs[1..]));
==
{ ReverseInvolution(xxs[1..]); }
calc {
reverse([xxs[0]]);
==
[] + [xxs[0]];
==
[xxs[0]];
}
[xxs[0]] + xxs[1..];
==
xxs;
}
}
}
| function reverse(xs: seq<nat>): seq<nat>
{
if xs == [] then [] else reverse(xs[1..]) + [xs[0]]
}
lemma ReverseAppendDistr(xs: seq<nat>, ys: seq<nat>)
ensures reverse(xs + ys) == reverse(ys) + reverse(xs)
{
if {
case xs == [] =>
calc {
reverse([] + ys);
calc {
[] + ys;
ys;
}
reverse(ys);
reverse(ys) + reverse([]);
}
case xs != [] => {
var zs := xs + ys;
}
}
}
lemma ReverseInvolution(xxs: seq<nat>)
ensures reverse(reverse(xxs)) == xxs
{
if {
case xxs == [] => {}
case xxs != [] => calc {
reverse(reverse(xxs));
==
reverse(reverse(xxs[1..]) + [xxs[0]]);
==
{ ReverseAppendDistr(reverse(xxs[1..]), [xxs[0]]); }
reverse([xxs[0]]) + reverse(reverse(xxs[1..]));
==
{ ReverseInvolution(xxs[1..]); }
calc {
reverse([xxs[0]]);
==
[] + [xxs[0]];
==
[xxs[0]];
}
[xxs[0]] + xxs[1..];
==
xxs;
}
}
}
|
675 | dafny_tmp_tmp49a6ihvk_m4.dfy | datatype Color = Red | White | Blue
predicate Below(c: Color, d: Color)
{
c == Red || c == d || d == Blue
}
method DutchFlag(a: array<Color>)
modifies a
ensures forall i, j :: 0 <= i < j < a.Length ==> Below(a[i], a[j])
ensures multiset(a[..]) == multiset(old(a[..]))
{
var r,w,b := 0, 0, a.Length;
while w < b
invariant 0 <= r <= w <= b <= a.Length
invariant forall i :: 0 <= i < r ==> a[i] == Red
invariant forall i :: r <= i < w ==> a[i] == White
invariant forall i :: b <= i < a.Length ==> a[i] == Blue
invariant multiset(a[..]) == multiset(old(a[..]))
{
match a[w]
case Red =>
a[r], a[w] := a[w], a[r];
r, w := r + 1, w + 1;
case White =>
w := w + 1;
case Blue =>
a[b-1], a[w] := a[w], a[b-1];
b := b - 1;
}
}
| datatype Color = Red | White | Blue
predicate Below(c: Color, d: Color)
{
c == Red || c == d || d == Blue
}
method DutchFlag(a: array<Color>)
modifies a
ensures forall i, j :: 0 <= i < j < a.Length ==> Below(a[i], a[j])
ensures multiset(a[..]) == multiset(old(a[..]))
{
var r,w,b := 0, 0, a.Length;
while w < b
{
match a[w]
case Red =>
a[r], a[w] := a[w], a[r];
r, w := r + 1, w + 1;
case White =>
w := w + 1;
case Blue =>
a[b-1], a[w] := a[w], a[b-1];
b := b - 1;
}
}
|
676 | dafny_tmp_tmp59p638nn_examples_GenericSelectionSort.dfy |
trait Comparable<T(==)> {
function Lt(x: T, y: T): bool
}
trait Sorted<T(==)> extends Comparable<T> {
ghost predicate Ordered(a: array<T>, left: nat, right: nat)
reads a
requires left <= right <= a.Length
{
forall i: nat :: 0 < left <= i < right ==> Lt(a[i-1],a[i]) || a[i-1] == a[i]
}
twostate predicate Preserved(a: array<T>, left: nat, right: nat)
reads a
requires left <= right <= a.Length
{
multiset(a[left..right]) == multiset(old(a[left..right]))
}
twostate predicate Sorted(a: array<T>)
reads a
{
Ordered(a,0,a.Length) && Preserved(a,0,a.Length)
}
}
// trait SelectionSort<T(==)> extends Comparable<T>, Sorted<T> {
// method SelectionSort(a: array<T>)
// modifies a
// ensures Sorted(a)
// {
// for i := 0 to a.Length
// invariant Ordered(a,0,i)
// invariant Preserved(a,0,a.Length)
// {
// var minValue := a[i];
// var minPos := i;
// for j := i + 1 to a.Length
// invariant minPos < a.Length
// invariant a[minPos] == minValue
// {
// if Lt(a[j], minValue) {
// minValue := a[j];
// minPos := j;
// }
// }
// if i != minPos {
// a[i], a[minPos] := minValue, a[i];
// }
// }
// }
// }
class Sort<T(==)> extends SelectionSort<T> {
const CMP: (T,T) -> bool
constructor(cmp: (T,T) -> bool)
ensures CMP == cmp
ensures comparisonCount == 0
{
CMP := cmp;
comparisonCount := 0;
}
function Lt(x: T, y: T): bool {
CMP(x,y)
}
}
ghost function Sum(x: int): nat
{
if x <= 0 then 0 else x + Sum(x-1)
}
trait Measurable<T(==)> extends Comparable<T> {
ghost var comparisonCount: nat
method Ltm(x: T, y: T) returns (b: bool)
modifies this`comparisonCount
ensures b ==> Lt(x,y)
ensures comparisonCount == old(comparisonCount) + 1
{
comparisonCount := comparisonCount + 1;
b := Lt(x,y);
}
}
trait SelectionSort<T(==)> extends Comparable<T>, Measurable<T>, Sorted<T> {
method SelectionSort(a: array<T>)
modifies a, this
requires comparisonCount == 0
ensures Sorted(a)
ensures comparisonCount <= a.Length * a.Length
{
for i := 0 to a.Length
invariant Ordered(a,0,i)
invariant Preserved(a,0,a.Length)
invariant comparisonCount == i * a.Length - Sum(i)
{
var minValue := a[i];
var minPos := i;
assert comparisonCount == i * a.Length - Sum(i) + (i + 1 - i) - 1;
for j := i + 1 to a.Length
invariant minPos < a.Length
invariant a[minPos] == minValue
invariant Preserved(a,0,a.Length)
invariant comparisonCount == i * a.Length - Sum(i) + (j - i) - 1
{
label L:
var cmp := Ltm(a[j], minValue);
assert a[..] == old@L(a[..]);
if cmp {
minValue := a[j];
minPos := j;
}
assert(i * a.Length - Sum(i) + (j - i) - 1) + 1 == i * a.Length - Sum(i) + ((j + 1) - i) - 1;
}
if i != minPos {
a[i], a[minPos] := minValue, a[i];
}
assert comparisonCount == (i+1) * a.Length - Sum(i+1);
}
}
}
method Main()
{
var a: array<int> := new int[3];
a[0] := 2; a[1] := 4; a[2] := 1;
var Sort := new Sort((x: int, y: int) => x < y);
Sort.SelectionSort(a);
print a[..];
}
|
trait Comparable<T(==)> {
function Lt(x: T, y: T): bool
}
trait Sorted<T(==)> extends Comparable<T> {
ghost predicate Ordered(a: array<T>, left: nat, right: nat)
reads a
requires left <= right <= a.Length
{
forall i: nat :: 0 < left <= i < right ==> Lt(a[i-1],a[i]) || a[i-1] == a[i]
}
twostate predicate Preserved(a: array<T>, left: nat, right: nat)
reads a
requires left <= right <= a.Length
{
multiset(a[left..right]) == multiset(old(a[left..right]))
}
twostate predicate Sorted(a: array<T>)
reads a
{
Ordered(a,0,a.Length) && Preserved(a,0,a.Length)
}
}
// trait SelectionSort<T(==)> extends Comparable<T>, Sorted<T> {
// method SelectionSort(a: array<T>)
// modifies a
// ensures Sorted(a)
// {
// for i := 0 to a.Length
// invariant Ordered(a,0,i)
// invariant Preserved(a,0,a.Length)
// {
// var minValue := a[i];
// var minPos := i;
// for j := i + 1 to a.Length
// invariant minPos < a.Length
// invariant a[minPos] == minValue
// {
// if Lt(a[j], minValue) {
// minValue := a[j];
// minPos := j;
// }
// }
// if i != minPos {
// a[i], a[minPos] := minValue, a[i];
// }
// }
// }
// }
class Sort<T(==)> extends SelectionSort<T> {
const CMP: (T,T) -> bool
constructor(cmp: (T,T) -> bool)
ensures CMP == cmp
ensures comparisonCount == 0
{
CMP := cmp;
comparisonCount := 0;
}
function Lt(x: T, y: T): bool {
CMP(x,y)
}
}
ghost function Sum(x: int): nat
{
if x <= 0 then 0 else x + Sum(x-1)
}
trait Measurable<T(==)> extends Comparable<T> {
ghost var comparisonCount: nat
method Ltm(x: T, y: T) returns (b: bool)
modifies this`comparisonCount
ensures b ==> Lt(x,y)
ensures comparisonCount == old(comparisonCount) + 1
{
comparisonCount := comparisonCount + 1;
b := Lt(x,y);
}
}
trait SelectionSort<T(==)> extends Comparable<T>, Measurable<T>, Sorted<T> {
method SelectionSort(a: array<T>)
modifies a, this
requires comparisonCount == 0
ensures Sorted(a)
ensures comparisonCount <= a.Length * a.Length
{
for i := 0 to a.Length
{
var minValue := a[i];
var minPos := i;
for j := i + 1 to a.Length
{
label L:
var cmp := Ltm(a[j], minValue);
if cmp {
minValue := a[j];
minPos := j;
}
}
if i != minPos {
a[i], a[minPos] := minValue, a[i];
}
}
}
}
method Main()
{
var a: array<int> := new int[3];
a[0] := 2; a[1] := 4; a[2] := 1;
var Sort := new Sort((x: int, y: int) => x < y);
Sort.SelectionSort(a);
print a[..];
}
|
677 | dafny_tmp_tmp59p638nn_examples_SelectionSort.dfy |
twostate predicate Preserved(a: array<int>, left: nat, right: nat)
reads a
requires left <= right <= a.Length
{
multiset(a[left..right]) == multiset(old(a[left..right]))
}
ghost predicate Ordered(a: array<int>, left: nat, right: nat)
reads a
requires left <= right <= a.Length
{
forall i: nat :: 0 < left <= i < right ==> a[i-1] <= a[i]
}
twostate predicate Sorted(a: array<int>)
reads a
{
Ordered(a,0,a.Length) && Preserved(a,0,a.Length)
}
method SelectionnSort(a: array<int>)
modifies a
ensures Sorted(a)
{
for i := 0 to a.Length
invariant Ordered(a,0,i)
invariant Preserved(a,0,a.Length)
{
var minValue := a[i];
var minPos := i;
for j := i + 1 to a.Length
invariant minPos < a.Length
invariant a[minPos] == minValue
{
if a[j] < minValue {
minValue := a[j];
minPos := j;
}
}
if i != minPos {
a[i], a[minPos] := minValue, a[i];
}
}
}
method SelectionSort(a: array<int>)
modifies a
ensures Sorted(a)
{
for i := 0 to a.Length
invariant Ordered(a,0,i)
invariant Preserved(a,0,a.Length)
{
ghost var minValue := a[i];
for j := i + 1 to a.Length
invariant a[i] == minValue
invariant Preserved(a,0,a.Length)
{
label L:
// assert a[..] == a[0..a.Length];
assert a[..] == old@L(a[..]);
if a[j] < minValue {
minValue := a[j];
}
if a[j] < a[i] {
assert j != i;
a[i], a[j] := a[j], a[i];
// assert Preserved(a, 0, a.Length);
}else{
// assert Preserved(a, 0, a.Length);
}
}
assert a[i] == minValue;
}
}
|
twostate predicate Preserved(a: array<int>, left: nat, right: nat)
reads a
requires left <= right <= a.Length
{
multiset(a[left..right]) == multiset(old(a[left..right]))
}
ghost predicate Ordered(a: array<int>, left: nat, right: nat)
reads a
requires left <= right <= a.Length
{
forall i: nat :: 0 < left <= i < right ==> a[i-1] <= a[i]
}
twostate predicate Sorted(a: array<int>)
reads a
{
Ordered(a,0,a.Length) && Preserved(a,0,a.Length)
}
method SelectionnSort(a: array<int>)
modifies a
ensures Sorted(a)
{
for i := 0 to a.Length
{
var minValue := a[i];
var minPos := i;
for j := i + 1 to a.Length
{
if a[j] < minValue {
minValue := a[j];
minPos := j;
}
}
if i != minPos {
a[i], a[minPos] := minValue, a[i];
}
}
}
method SelectionSort(a: array<int>)
modifies a
ensures Sorted(a)
{
for i := 0 to a.Length
{
ghost var minValue := a[i];
for j := i + 1 to a.Length
{
label L:
// assert a[..] == a[0..a.Length];
if a[j] < minValue {
minValue := a[j];
}
if a[j] < a[i] {
a[i], a[j] := a[j], a[i];
// assert Preserved(a, 0, a.Length);
}else{
// assert Preserved(a, 0, a.Length);
}
}
}
}
|
678 | dafny_tmp_tmp59p638nn_examples_derangement.dfy |
predicate derangement(s: seq<nat>) {
forall i :: 0 <= i < |s| ==> s[i] != i
}
predicate permutation(s: seq<nat>) {
forall i :: 0 <= i < |s| ==> i in s
}
function multisetRange(n: nat): multiset<nat> {
multiset(seq(n, i => i))
}
predicate distinct<A(==)>(s: seq<A>) {
forall x,y :: x != y && 0 <= x <= y < |s| ==> s[x] != s[y]
}
method test() {
var tests := [2,0,1];
var tests2 := [0,1,2];
var t4 := seq(3, i => i);
var test3 := multisetRange(3);
assert t4 == tests2;
assert 0 in t4;
assert 0 in test3;
assert multiset(tests) == multisetRange(3);
assert derangement(tests);
assert permutation(tests);
assert permutation(tests2);
// assert !derangement(tests2);
}
method {:timelimit 40} end(links: seq<nat>)
requires |links| > 0
requires permutation(links)
requires derangement(links)
requires distinct(links)
{
assume forall x :: x in links ==> 0 <= x < |links|;
assume forall x :: x in links ==> multiset(links)[x] ==1;
// assume multiset(links) == multisetRange(|links|);
var qAct: nat := links[0];
assert links[0] in links;
var i : nat := 0;
ghost var oldIndex := 0;
ghost var indices: multiset<nat> := multiset{0};
ghost var visited: multiset<nat> := multiset{};
while qAct != 0
invariant 0 <= oldIndex < |links|
invariant qAct == links[oldIndex]
invariant oldIndex in indices
invariant qAct in links
invariant indices == visited + multiset{0}
invariant forall x :: x in visited ==> exists k :: 0 <= k < |links| && links[k] == x && k in indices
invariant qAct !in visited
invariant 0 <= qAct < |links|
decreases multiset(links) - visited
{
ghost var oldVisit := visited;
ghost var oldqAct := qAct;
ghost var oldOldIndex := oldIndex;
oldIndex := qAct;
visited := visited + multiset{qAct};
indices := indices + multiset{qAct};
assert oldqAct in visited;
assert forall x :: x in visited ==> exists k :: 0 <= k < |links| && links[k] == x && k in indices;// by {
// forall x | x in visited
// ensures exists k :: 0 <= k < |links| && links[k] == x && k in indices
// {
// if x == oldqAct {
// // assert links[oldOldIndex] == oldqAct;
// // assert exists k :: 0 <= k < |links| && links[k] == x && k in indices;
// }else {
// // assert x in oldVisit;
// // assert exists k :: 0 <= k < |links| && links[k] == x && k in indices;
// }
// }
//}
qAct := links[qAct];
i := i + 1;
}
}
|
predicate derangement(s: seq<nat>) {
forall i :: 0 <= i < |s| ==> s[i] != i
}
predicate permutation(s: seq<nat>) {
forall i :: 0 <= i < |s| ==> i in s
}
function multisetRange(n: nat): multiset<nat> {
multiset(seq(n, i => i))
}
predicate distinct<A(==)>(s: seq<A>) {
forall x,y :: x != y && 0 <= x <= y < |s| ==> s[x] != s[y]
}
method test() {
var tests := [2,0,1];
var tests2 := [0,1,2];
var t4 := seq(3, i => i);
var test3 := multisetRange(3);
// assert !derangement(tests2);
}
method {:timelimit 40} end(links: seq<nat>)
requires |links| > 0
requires permutation(links)
requires derangement(links)
requires distinct(links)
{
assume forall x :: x in links ==> 0 <= x < |links|;
assume forall x :: x in links ==> multiset(links)[x] ==1;
// assume multiset(links) == multisetRange(|links|);
var qAct: nat := links[0];
var i : nat := 0;
ghost var oldIndex := 0;
ghost var indices: multiset<nat> := multiset{0};
ghost var visited: multiset<nat> := multiset{};
while qAct != 0
{
ghost var oldVisit := visited;
ghost var oldqAct := qAct;
ghost var oldOldIndex := oldIndex;
oldIndex := qAct;
visited := visited + multiset{qAct};
indices := indices + multiset{qAct};
// forall x | x in visited
// ensures exists k :: 0 <= k < |links| && links[k] == x && k in indices
// {
// if x == oldqAct {
// // assert links[oldOldIndex] == oldqAct;
// // assert exists k :: 0 <= k < |links| && links[k] == x && k in indices;
// }else {
// // assert x in oldVisit;
// // assert exists k :: 0 <= k < |links| && links[k] == x && k in indices;
// }
// }
//}
qAct := links[qAct];
i := i + 1;
}
}
|
679 | dafny_tmp_tmp59p638nn_examples_minmax2.dfy | method DifferenceMinMax(a: array<int>) returns (diff: int)
requires a.Length > 0
ensures diff == (Max(a[..]) - Min(a[..]))
{
var minVal := a[0];
var maxVal := a[0];
for i := 1 to a.Length
invariant 1 <= i <= a.Length
invariant minVal <= maxVal
invariant forall k :: 0 <= k < i ==> minVal <= a[k] && a[k] <= maxVal
invariant minVal == Min(a[..i])
invariant maxVal == Max(a[..i])
{
if a[i] < minVal {
minVal := a[i];
} else if a[i] > maxVal {
maxVal := a[i];
}
assert a[..i+1][..i] == a[..i];
}
assert a[..a.Length] == a[..];
diff := maxVal - minVal;
}
function Min(a: seq<int>) : (m: int)
requires |a| > 0
{
if |a| == 1 then a[0]
else
var minPrefix := Min(a[..|a|-1]);
if a[|a|-1] <= minPrefix then a[|a|-1] else minPrefix
}
function Max(a: seq<int>) : (m: int)
requires |a| > 0
{
if |a| == 1 then a[0]
else
var maxPrefix := Max(a[..|a|-1]);
if a[|a|-1] >= maxPrefix then a[|a|-1] else maxPrefix
}
| method DifferenceMinMax(a: array<int>) returns (diff: int)
requires a.Length > 0
ensures diff == (Max(a[..]) - Min(a[..]))
{
var minVal := a[0];
var maxVal := a[0];
for i := 1 to a.Length
{
if a[i] < minVal {
minVal := a[i];
} else if a[i] > maxVal {
maxVal := a[i];
}
}
diff := maxVal - minVal;
}
function Min(a: seq<int>) : (m: int)
requires |a| > 0
{
if |a| == 1 then a[0]
else
var minPrefix := Min(a[..|a|-1]);
if a[|a|-1] <= minPrefix then a[|a|-1] else minPrefix
}
function Max(a: seq<int>) : (m: int)
requires |a| > 0
{
if |a| == 1 then a[0]
else
var maxPrefix := Max(a[..|a|-1]);
if a[|a|-1] >= maxPrefix then a[|a|-1] else maxPrefix
}
|
680 | dafny_tmp_tmp59p638nn_examples_realExponent.dfy |
ghost function power(n: real, alpha: real): real
requires n > 0.0 && alpha > 0.0
ensures power(n, alpha) > 0.0
ghost function log(n: real, alpha: real): real
requires n > 0.0 && alpha > 0.0
ensures log(n, alpha) > 0.0
lemma consistency(n: real, alpha: real)
requires n > 0.0 && alpha > 0.0
ensures log(power(n,alpha), alpha) == n
ensures power(log(n, alpha), alpha) == n
lemma logarithmSum(n: real, alpha: real, x: real, y: real)
requires n > 0.0 && alpha > 0.0
requires x > 0.0
requires n == x * y
ensures log(n,alpha) == log(x, alpha) + log(y, alpha)
lemma powerLemma(n: real, alpha: real)
requires n > 0.0 && alpha > 0.0
ensures power(n, alpha) * alpha == power(n+1.0, alpha)
lemma power1(alpha: real)
requires alpha > 0.0
ensures power(1.0, alpha) == alpha
lemma test() {
var pow3 := power(3.0,4.0);
consistency(3.0,4.0);
assert log(pow3, 4.0) == 3.0;
var log6 := log(6.0,8.0);
logarithmSum(6.0, 8.0, 2.0, 3.0);
assert log6 == log(2.0,8.0)+log(3.0,8.0);
}
lemma test2() {
var pow3 := power(3.0, 4.0);
var power4 := power(4.0, 4.0);
powerLemma(3.0, 4.0);
assert pow3 * 4.0 == power4;
}
method pow(n: nat, alpha: real) returns (product: real)
requires n > 0
requires alpha > 0.0
ensures product == power(n as real, alpha)
{
product := alpha;
var i: nat := 1;
power1(alpha);
assert product == power(1.0, alpha);
while i < n
invariant i <= n
invariant product == power(i as real, alpha)
{
powerLemma(i as real, alpha);
product := product * alpha;
i := i + 1;
}
assert i == n;
assert product == power(n as real, alpha);
}
|
ghost function power(n: real, alpha: real): real
requires n > 0.0 && alpha > 0.0
ensures power(n, alpha) > 0.0
ghost function log(n: real, alpha: real): real
requires n > 0.0 && alpha > 0.0
ensures log(n, alpha) > 0.0
lemma consistency(n: real, alpha: real)
requires n > 0.0 && alpha > 0.0
ensures log(power(n,alpha), alpha) == n
ensures power(log(n, alpha), alpha) == n
lemma logarithmSum(n: real, alpha: real, x: real, y: real)
requires n > 0.0 && alpha > 0.0
requires x > 0.0
requires n == x * y
ensures log(n,alpha) == log(x, alpha) + log(y, alpha)
lemma powerLemma(n: real, alpha: real)
requires n > 0.0 && alpha > 0.0
ensures power(n, alpha) * alpha == power(n+1.0, alpha)
lemma power1(alpha: real)
requires alpha > 0.0
ensures power(1.0, alpha) == alpha
lemma test() {
var pow3 := power(3.0,4.0);
consistency(3.0,4.0);
var log6 := log(6.0,8.0);
logarithmSum(6.0, 8.0, 2.0, 3.0);
}
lemma test2() {
var pow3 := power(3.0, 4.0);
var power4 := power(4.0, 4.0);
powerLemma(3.0, 4.0);
}
method pow(n: nat, alpha: real) returns (product: real)
requires n > 0
requires alpha > 0.0
ensures product == power(n as real, alpha)
{
product := alpha;
var i: nat := 1;
power1(alpha);
while i < n
{
powerLemma(i as real, alpha);
product := product * alpha;
i := i + 1;
}
}
|
681 | eth2-dafny_tmp_tmpcrgexrgb_src_dafny_utils_SetHelpers.dfy | /*
* Copyright 2021 ConsenSys Software Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software dis-
* tributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/**
* Provide some folk theorems on sets.
*/
module SetHelpers {
/**
* If a set is included in another one, their intersection
* is the smallest one.
*
* @param T A type.
* @param x A finite set.
* @param y A finite set.
* @returns A proof that x <= y implies x * y == x.
*/
lemma interSmallest<T>(x : set<T>, y : set<T>)
requires x <= y
ensures x * y == x
decreases y
{ // Thanks Dafny
}
/**
* If x [= {0, ..., k - 1} and y [= {0, .., k - 1}
* then x U y has at most k elements.
*
* @param T A type.
* @param x A finite set.
* @param y A finite set.
* @param k k a natural number.
* @returns A proof that if x [= {0, ..., k - 1} and y [= {0, .., k - 1}
* then |x + y| <=k.
*/
lemma unionCardBound(x : set<nat>, y : set<nat>, k : nat)
requires forall e :: e in x ==> e < k
requires forall e :: e in y ==> e < k
ensures forall e :: e in x + y ==> e < k
ensures |x + y| <= k
{
natSetCardBound(x + y, k);
}
/**
* If x [= {0, ..., k - 1} then x has at most k elements.
*
* @param T A type.
* @param x A finite set.
* @param k k a natural number.
* @returns A proof that if x [= {0, ..., k - 1} then |x| <= k.
*/
lemma natSetCardBound(x : set<nat>, k : nat)
requires forall e :: e in x ==> e < k
ensures |x| <= k
decreases k
{
if k == 0 {
assert(x == { });
} else {
natSetCardBound(x - { k - 1}, k - 1);
}
}
/**
* If x contains all successive elements {0, ..., k-1} then x has k elements.
*
* @param T A type.
* @param x A finite set.
* @param k k a natural number.
* @returns A proof that if x = {0, ..., k - 1} then |x| == k.
*/
lemma {:induction k} successiveNatSetCardBound(x : set<nat>, k : nat)
requires x == set x: nat | 0 <= x < k :: x
ensures |x| == k
{
if k == 0 {
// Thanks Dafny
} else {
successiveNatSetCardBound(x - {k - 1}, k - 1);
}
}
/**
* If a finite set x is included in a finite set y, then
* card(x) <= card(y).
*
* @param T A type.
* @param x A finite set.
* @param y A finite set.
* @returns A proof that x <= y implies card(x) <= card(y)
* in other terms, card(_) is monotonic.
*/
lemma cardIsMonotonic<T>(x : set<T>, y : set<T>)
requires x <= y
ensures |x| <= |y|
decreases y
{
if |y| == 0 {
// Thanks Dafny
} else {
// |y| >= 1, get an element in y
var e :| e in y;
var y' := y - { e };
// Split recursion according to whether e in x or not
cardIsMonotonic(if e in x then x - {e} else x, y');
}
}
/**
* If two finite sets x and y are included in another one z and
* have more than 2/3(|z|) elements, then their intersection has more
* then |z|/3 elements.
*
* @param T A type.
* @param x A finite set.
* @param y A finite set.
* @param z A finite set.
* @returns A proof that if two finite sets x and y are included in
* another one z and have more than 2/3(|z|) elements, then
* their intersection has more then |z|/3 elements.
*/
lemma pigeonHolePrinciple<T>(x: set<T>, y : set<T>, z : set<T>)
requires x <= z
requires y <= z
requires |x| >= 2 * |z| / 3 + 1 // or equivalently 2 * |z| < 3 * |x|
requires |y| >= 2 * |z| / 3 + 1 // or equivalently 2 * |z| < 3 * |y|
ensures |x * y| >= |z| / 3 + 1 // or equivalently 3 * |x * y| < |z|
{
// Proof of alternative assumption
assert(|x| >= 2 * |z| / 3 + 1 <==> 2 * |z| < 3 * |x|);
assert(|y| >= 2 * |z| / 3 + 1 <==> 2 * |z| < 3 * |y|);
// Proof by contradiction
if |x * y| < |z| / 3 + 1 {
// size of union is sum of sizes minus size of intersection.
calc == {
|x + y|;
|x| + |y| - |x * y|;
}
cardIsMonotonic(x + y, z);
}
// proof of alternative conclusion
assert(3 * |x * y| > |z| <==> |x * y| >= |z| / 3 + 1 );
}
}
| /*
* Copyright 2021 ConsenSys Software Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software dis-
* tributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/**
* Provide some folk theorems on sets.
*/
module SetHelpers {
/**
* If a set is included in another one, their intersection
* is the smallest one.
*
* @param T A type.
* @param x A finite set.
* @param y A finite set.
* @returns A proof that x <= y implies x * y == x.
*/
lemma interSmallest<T>(x : set<T>, y : set<T>)
requires x <= y
ensures x * y == x
{ // Thanks Dafny
}
/**
* If x [= {0, ..., k - 1} and y [= {0, .., k - 1}
* then x U y has at most k elements.
*
* @param T A type.
* @param x A finite set.
* @param y A finite set.
* @param k k a natural number.
* @returns A proof that if x [= {0, ..., k - 1} and y [= {0, .., k - 1}
* then |x + y| <=k.
*/
lemma unionCardBound(x : set<nat>, y : set<nat>, k : nat)
requires forall e :: e in x ==> e < k
requires forall e :: e in y ==> e < k
ensures forall e :: e in x + y ==> e < k
ensures |x + y| <= k
{
natSetCardBound(x + y, k);
}
/**
* If x [= {0, ..., k - 1} then x has at most k elements.
*
* @param T A type.
* @param x A finite set.
* @param k k a natural number.
* @returns A proof that if x [= {0, ..., k - 1} then |x| <= k.
*/
lemma natSetCardBound(x : set<nat>, k : nat)
requires forall e :: e in x ==> e < k
ensures |x| <= k
{
if k == 0 {
} else {
natSetCardBound(x - { k - 1}, k - 1);
}
}
/**
* If x contains all successive elements {0, ..., k-1} then x has k elements.
*
* @param T A type.
* @param x A finite set.
* @param k k a natural number.
* @returns A proof that if x = {0, ..., k - 1} then |x| == k.
*/
lemma {:induction k} successiveNatSetCardBound(x : set<nat>, k : nat)
requires x == set x: nat | 0 <= x < k :: x
ensures |x| == k
{
if k == 0 {
// Thanks Dafny
} else {
successiveNatSetCardBound(x - {k - 1}, k - 1);
}
}
/**
* If a finite set x is included in a finite set y, then
* card(x) <= card(y).
*
* @param T A type.
* @param x A finite set.
* @param y A finite set.
* @returns A proof that x <= y implies card(x) <= card(y)
* in other terms, card(_) is monotonic.
*/
lemma cardIsMonotonic<T>(x : set<T>, y : set<T>)
requires x <= y
ensures |x| <= |y|
{
if |y| == 0 {
// Thanks Dafny
} else {
// |y| >= 1, get an element in y
var e :| e in y;
var y' := y - { e };
// Split recursion according to whether e in x or not
cardIsMonotonic(if e in x then x - {e} else x, y');
}
}
/**
* If two finite sets x and y are included in another one z and
* have more than 2/3(|z|) elements, then their intersection has more
* then |z|/3 elements.
*
* @param T A type.
* @param x A finite set.
* @param y A finite set.
* @param z A finite set.
* @returns A proof that if two finite sets x and y are included in
* another one z and have more than 2/3(|z|) elements, then
* their intersection has more then |z|/3 elements.
*/
lemma pigeonHolePrinciple<T>(x: set<T>, y : set<T>, z : set<T>)
requires x <= z
requires y <= z
requires |x| >= 2 * |z| / 3 + 1 // or equivalently 2 * |z| < 3 * |x|
requires |y| >= 2 * |z| / 3 + 1 // or equivalently 2 * |z| < 3 * |y|
ensures |x * y| >= |z| / 3 + 1 // or equivalently 3 * |x * y| < |z|
{
// Proof of alternative assumption
// Proof by contradiction
if |x * y| < |z| / 3 + 1 {
// size of union is sum of sizes minus size of intersection.
calc == {
|x + y|;
|x| + |y| - |x * y|;
}
cardIsMonotonic(x + y, z);
}
// proof of alternative conclusion
}
}
|
682 | feup-mfes_tmp_tmp6_a1y5a5_examples_SelectionSort.dfy | /*
* Formal verification of the selection sort algorithm with Dafny.
* FEUP, MIEIC, MFES, 2020/21.
*/
// Checks if array 'a' is sorted between positions 'from' (inclusive) and 'to' (exclusive).
predicate isSorted(a: array<real>, from: nat, to: nat)
requires 0 <= from <= to <= a.Length
reads a
{
forall i, j :: from <= i < j < to ==> a[i] <= a[j]
}
// Sorts array 'a' using the selection sort algorithm.
method selectionSort(a: array<real>)
modifies a
ensures isSorted(a, 0, a.Length)
ensures multiset(a[..]) == multiset(old(a[..]))
{
var i := 0;
while i < a.Length - 1
invariant 0 <= i <= a.Length
invariant isSorted(a, 0, i)
invariant forall lhs, rhs :: 0 <= lhs < i <= rhs < a.Length ==> a[lhs] <= a[rhs]
invariant multiset(a[..]) == multiset(old(a[..]))
{
var j := findMin(a, i, a.Length);
a[i], a[j] := a[j], a[i];
i := i + 1;
}
}
// Finds the position of a miminum value in non-empty subarray 'a' between positions
// 'from' (inclusive) and 'to' (exclusive)
method findMin(a: array<real>, from: nat, to: nat) returns(index: nat)
requires 0 <= from < to <= a.Length
ensures from <= index < to
ensures forall k :: from <= k < to ==> a[k] >= a[index]
{
var i := from + 1;
index := from; // position of min up to position i (excluded)
while i < to
decreases a.Length - i
invariant from <= index < i <= to
invariant forall k :: from <= k < i ==> a[k] >= a[index]
{
if a[i] < a[index] {
index := i;
}
i := i + 1;
}
}
method testSelectionSort() {
var a := new real[5] [9.0, 4.0, 6.0, 3.0, 8.0];
assert a[..] == [9.0, 4.0, 6.0, 3.0, 8.0]; // to help Dafny ...
selectionSort(a);
assert a[..] == [3.0, 4.0, 6.0, 8.0, 9.0];
}
method testFindMin() {
var a := new real[5] [9.0, 5.0, 6.0, 4.0, 8.0];
var m := findMin(a, 0, 5);
assert a[3] == 4.0; // to help Dafny ...
assert m == 3;
}
| /*
* Formal verification of the selection sort algorithm with Dafny.
* FEUP, MIEIC, MFES, 2020/21.
*/
// Checks if array 'a' is sorted between positions 'from' (inclusive) and 'to' (exclusive).
predicate isSorted(a: array<real>, from: nat, to: nat)
requires 0 <= from <= to <= a.Length
reads a
{
forall i, j :: from <= i < j < to ==> a[i] <= a[j]
}
// Sorts array 'a' using the selection sort algorithm.
method selectionSort(a: array<real>)
modifies a
ensures isSorted(a, 0, a.Length)
ensures multiset(a[..]) == multiset(old(a[..]))
{
var i := 0;
while i < a.Length - 1
{
var j := findMin(a, i, a.Length);
a[i], a[j] := a[j], a[i];
i := i + 1;
}
}
// Finds the position of a miminum value in non-empty subarray 'a' between positions
// 'from' (inclusive) and 'to' (exclusive)
method findMin(a: array<real>, from: nat, to: nat) returns(index: nat)
requires 0 <= from < to <= a.Length
ensures from <= index < to
ensures forall k :: from <= k < to ==> a[k] >= a[index]
{
var i := from + 1;
index := from; // position of min up to position i (excluded)
while i < to
{
if a[i] < a[index] {
index := i;
}
i := i + 1;
}
}
method testSelectionSort() {
var a := new real[5] [9.0, 4.0, 6.0, 3.0, 8.0];
selectionSort(a);
}
method testFindMin() {
var a := new real[5] [9.0, 5.0, 6.0, 4.0, 8.0];
var m := findMin(a, 0, 5);
}
|
683 | formal-methods-in-software-engineering_tmp_tmpe7fjnek6_Labs2_gr2.dfy | datatype Nat = Zero | Succ(Pred: Nat)
/*
Nat: Zero, Succ(Zero), Succ(Succ(Zero)), ...
*/
lemma Disc(n: Nat)
ensures n.Succ? || n.Zero?
{
//
}
lemma LPred(n: Nat)
ensures Succ(n).Pred == n
{
//
}
// Succ(m') > m'
function add(m: Nat, n: Nat) : Nat
decreases m
{
match m
case Zero => n
case Succ(m') => Succ(add(m', n))
}
// add(m, Zero) = m
lemma AddZero(m: Nat)
ensures add(m, Zero) == m
{
//
}
lemma AddAssoc(m: Nat, n: Nat, p: Nat)
ensures add(m, add(n, p)) == add(add(m, n), p)
{
//
}
lemma AddComm(m: Nat, n: Nat)
ensures add(m, n) == add(n, m)
{
match m
case Zero => AddZero(n);
case Succ(m') => AddComm(m', n);
}
predicate lt(m: Nat, n: Nat)
{
(m.Zero? && n.Succ?) ||
(m.Succ? && n.Succ? && lt(m.Pred, n.Pred))
}
lemma Test1(n:Nat)
ensures lt(n, Succ(Succ(n)))
{
//
}
lemma Test2(n: Nat)
ensures n < Succ(n)
{
//
}
/*
lemma L1()
ensures exists x: Nat :: x == Zero.Pred
{
//
}
*/
/*
lemma L2(m: Nat, n: Nat)
ensures lt(m, n) == lt(n, m)
{
//
}
*/
lemma LtTrans(m: Nat, n: Nat, p: Nat)
requires lt(m, n)
requires lt(n, p)
ensures lt(m, p)
{
//assert n.Succ?;
//assert p.Pred.Succ?;
/*
match m
case Zero => {
match n
case Zero => assert true;
case Succ(n') => LtTrans(Zero, n', p);
}
case Succ(m') => LtTrans(m', n, p);
*/
}
datatype List<T> = Nil | Cons(head: T, tail: List<T>)
lemma Disc2<T>(l: List<T>, a: T)
ensures Cons(a, l).head == a && Cons(a, l).tail == l
{
//
}
function size<T>(l: List<T>): nat
{
match l
case Nil => 0
case Cons(x, l') => size<T>(l') + 1
}
function app<T>(l1: List<T>, l2: List<T>) : List<T>
{
match l1
case Nil => l2
case Cons(x, l1') => Cons(x, app(l1', l2))
}
lemma LenApp<T>(l1: List<T>, l2: List<T>)
ensures size(app(l1, l2)) == size(l1) + size(l2)
{
//
}
/*
(1,(2,3)) -> ((3,2),1)
(x, l') -> (rev(l'), x)
*/
function rev<T> (l: List<T>) : List<T>
{
match l
case Nil => Nil
case Cons(x, l') => app(rev(l'), Cons(x, Nil))
}
lemma AppNil<T>(l: List<T>)
ensures app(l, Nil) == l
{
//
}
/*
lemma RevApp<T>(l1: List<T>, l2: List<T>)
ensures rev(app(l1, l2)) == app(rev(l2), rev(l1))
{
match l1
case Nil => AppNil(rev(l2));
case Cons(x, l1') => {
// rev(Cons(x, app(l1', l2))) == app(rev(app(l1', l2)), Cons(x, Nil)))
assert rev(Cons(x, app(l1', l2))) == app(rev(app(l1', l2)), Cons(x, Nil));
RevApp(l1', l2);
}
}
*/
lemma LR1<T> (l: List<T>, x: T)
ensures rev(app(l, Cons(x, Nil))) == Cons(x, rev(l))
{
//
}
lemma RevRev<T>(l: List<T>)
ensures rev(rev(l)) == l
{
match l
case Nil => assert true;
case Cons(x, l') => {
assert rev(rev(l)) == rev(app(rev(l'), Cons(x, Nil)));
LR1(rev(l'), x);
}
}
/*
HW1: Define over naturals (as an algebraic data type) the predicates odd(x) and even(x)
and prove that the addition of two odd numbers is an even number.
Deadline: Tuesday 12.10, 14:00
*/
| datatype Nat = Zero | Succ(Pred: Nat)
/*
Nat: Zero, Succ(Zero), Succ(Succ(Zero)), ...
*/
lemma Disc(n: Nat)
ensures n.Succ? || n.Zero?
{
//
}
lemma LPred(n: Nat)
ensures Succ(n).Pred == n
{
//
}
// Succ(m') > m'
function add(m: Nat, n: Nat) : Nat
{
match m
case Zero => n
case Succ(m') => Succ(add(m', n))
}
// add(m, Zero) = m
lemma AddZero(m: Nat)
ensures add(m, Zero) == m
{
//
}
lemma AddAssoc(m: Nat, n: Nat, p: Nat)
ensures add(m, add(n, p)) == add(add(m, n), p)
{
//
}
lemma AddComm(m: Nat, n: Nat)
ensures add(m, n) == add(n, m)
{
match m
case Zero => AddZero(n);
case Succ(m') => AddComm(m', n);
}
predicate lt(m: Nat, n: Nat)
{
(m.Zero? && n.Succ?) ||
(m.Succ? && n.Succ? && lt(m.Pred, n.Pred))
}
lemma Test1(n:Nat)
ensures lt(n, Succ(Succ(n)))
{
//
}
lemma Test2(n: Nat)
ensures n < Succ(n)
{
//
}
/*
lemma L1()
ensures exists x: Nat :: x == Zero.Pred
{
//
}
*/
/*
lemma L2(m: Nat, n: Nat)
ensures lt(m, n) == lt(n, m)
{
//
}
*/
lemma LtTrans(m: Nat, n: Nat, p: Nat)
requires lt(m, n)
requires lt(n, p)
ensures lt(m, p)
{
//assert n.Succ?;
//assert p.Pred.Succ?;
/*
match m
case Zero => {
match n
case Zero => assert true;
case Succ(n') => LtTrans(Zero, n', p);
}
case Succ(m') => LtTrans(m', n, p);
*/
}
datatype List<T> = Nil | Cons(head: T, tail: List<T>)
lemma Disc2<T>(l: List<T>, a: T)
ensures Cons(a, l).head == a && Cons(a, l).tail == l
{
//
}
function size<T>(l: List<T>): nat
{
match l
case Nil => 0
case Cons(x, l') => size<T>(l') + 1
}
function app<T>(l1: List<T>, l2: List<T>) : List<T>
{
match l1
case Nil => l2
case Cons(x, l1') => Cons(x, app(l1', l2))
}
lemma LenApp<T>(l1: List<T>, l2: List<T>)
ensures size(app(l1, l2)) == size(l1) + size(l2)
{
//
}
/*
(1,(2,3)) -> ((3,2),1)
(x, l') -> (rev(l'), x)
*/
function rev<T> (l: List<T>) : List<T>
{
match l
case Nil => Nil
case Cons(x, l') => app(rev(l'), Cons(x, Nil))
}
lemma AppNil<T>(l: List<T>)
ensures app(l, Nil) == l
{
//
}
/*
lemma RevApp<T>(l1: List<T>, l2: List<T>)
ensures rev(app(l1, l2)) == app(rev(l2), rev(l1))
{
match l1
case Nil => AppNil(rev(l2));
case Cons(x, l1') => {
// rev(Cons(x, app(l1', l2))) == app(rev(app(l1', l2)), Cons(x, Nil)))
RevApp(l1', l2);
}
}
*/
lemma LR1<T> (l: List<T>, x: T)
ensures rev(app(l, Cons(x, Nil))) == Cons(x, rev(l))
{
//
}
lemma RevRev<T>(l: List<T>)
ensures rev(rev(l)) == l
{
match l
case Nil => assert true;
case Cons(x, l') => {
LR1(rev(l'), x);
}
}
/*
HW1: Define over naturals (as an algebraic data type) the predicates odd(x) and even(x)
and prove that the addition of two odd numbers is an even number.
Deadline: Tuesday 12.10, 14:00
*/
|
684 | formal-methods-in-software-engineering_tmp_tmpe7fjnek6_Labs2_hw1.dfy | /*
HW1: Define over naturals (as an algebraic data type) the predicates odd(x) and even(x)
and prove that the addition of two odd numbers is an even number.
Deadline: Tuesday 12.10, 14:00
*/
datatype Nat = Zero | Succ(Pred: Nat)
function add(m: Nat, n: Nat) : Nat
decreases m
{
match m
case Zero => n
case Succ(m') => Succ(add(m', n))
}
predicate Odd(m: Nat)
decreases m
{
match m
case Zero => false
case Succ(m') => Even(m')
}
predicate Even(m: Nat)
decreases m
{
match m
case Zero => true
case Succ(m') => Odd(m')
}
lemma SumMNIsEven(m: Nat, n: Nat)
requires Odd(m)
requires Odd(n)
ensures Even(add(m,n))
{
match m
case Succ(Zero) => assert Even(add(Succ(Zero),n));
case Succ(Succ(m')) => SumMNIsEven(m',n);
}
| /*
HW1: Define over naturals (as an algebraic data type) the predicates odd(x) and even(x)
and prove that the addition of two odd numbers is an even number.
Deadline: Tuesday 12.10, 14:00
*/
datatype Nat = Zero | Succ(Pred: Nat)
function add(m: Nat, n: Nat) : Nat
{
match m
case Zero => n
case Succ(m') => Succ(add(m', n))
}
predicate Odd(m: Nat)
{
match m
case Zero => false
case Succ(m') => Even(m')
}
predicate Even(m: Nat)
{
match m
case Zero => true
case Succ(m') => Odd(m')
}
lemma SumMNIsEven(m: Nat, n: Nat)
requires Odd(m)
requires Odd(n)
ensures Even(add(m,n))
{
match m
case Succ(Zero) => assert Even(add(Succ(Zero),n));
case Succ(Succ(m')) => SumMNIsEven(m',n);
}
|
685 | formal-methods-in-software-engineering_tmp_tmpe7fjnek6_Labs4_gr2.dfy | /*
Dafny include 2 limbaje:
* un limbaj pentru specificare
MSFOL (ce am discutat până acum)
adnotări care să ajute în procesul de verificare
* un limbaj pentru scris programe
*/
// Exemplu de program
method SqrSum(n: int) returns (s: int)
{
var i,k : int;
s := 0;
k := 1;
i := 1;
while (i <= n)
decreases n - i
{
s := s + k;
k := k + 2 * i + 1;
i := i+1;
}
}
method DivMod(a: int, b: int) returns (q: int, r: int)
decreases *
{
q := 0;
r := a;
while (r >= b)
decreases *
{
r := r - b;
q := q + 1;
}
}
/*
triple Hoare (| P |) S (| Q |)
*/
// varianta assume-assert
method HoareTripleAssmAssrt()
{
var i: int := *;
var k: int := *;
// (| k == i*i |) k := k + 2 * i +1; (| k = (i+1)*(i+1) |)
assume k == i*i; // P = precondition
k := k + 2 * i + 1; // S
assert k == (i+1)*(i+1); // Q = postcondition
}
// varianta requires-ensures
method HoareTripleReqEns(i: int, k: int) returns (k': int)
// (| k == i*i |) k := k + 2 * i +1; (| k = (i+1)*(i+1) |)
requires k == i*i
ensures k' == (i+1)*(i+1)
{
k' := k + 2 * i + 1;
}
/*
regula pentru while
*/
// varianta cu assert
/*
method WhileRule()
{
// var n: int := *; // havoc
// assume n >= 0;
var n: int :| n >= 0;
var y := n;
var x := 0;
assert (x + y) == n;
while (y >= 0)
decreases y
{
assert (x + y) == n; // fails
x := x+1;
y := y-1;
assert (x + y) == n;
}
assert (y < 0) && (x + y) == n;
}
*/
// varianta cu invariant
method Invariant1()
{
// var n: int := *; // havoc
var n: int :| n >= 0;
var y := n;
var x := 0;
while (y >= 0)
decreases y
invariant (x + y) == n;
{
x := x+1;
y := y-1;
}
assert (y < 0) && (x + y) == n;
}
//specificarea sumei de patrate
function SqrSumRec(n: int) : int
requires n >= 0
{
if (n == 0) then 0 else n*n + SqrSumRec(n-1)
}
/*
method SqrSum1(n: int) returns (s: int)
requires n >= 0
ensures s == SqrSumRec(n) // s = 0^2 + 1^2 + 2^2 + ... + n^2 == n(n+1)(2n+1)/6
{
// ???
}
*/
// verificarea programului pentru suma de patrate
method SqrSum1(n: int) returns (s: int)
requires n >= 0
ensures s == SqrSumRec(n)
{
var i,k : int;
s := 0;
k := 1;
i := 1;
while (i <= n)
decreases n - i
invariant k == i*i
// s: 0*0, 0*0 + 1*1, 0*0 + 1*1 + 2*2, ...
// i: 1, 2, 3,
invariant s == SqrSumRec(i-1)
invariant i <= n+1
{
// k = i*i
s := s + k;
// k = i*i
k := k + 2 * i + 1;
// k = (i+1)*(i+1)
i := i+1;
// k = i*i
}
//s == SqrSumRec(i-1) && i <= n+1 && i > n
// implies
//s == SqrSumRec(n)
}
// SqrSumRec(n) = 0^2 + 1^2 + 2^2 + ... + n^2 == n(n+1)(2n+1)/6
least lemma L1(n: int)
requires n >= 0
ensures SqrSumRec(n) == n*(n+1)*(2*n + 1)/6
{
//OK
}
/*
function SqrSumBy6(n: int) : int
{
n * (n + 1) * (2 * n + 1)
}
inductive lemma L(n: int) // it takes a while
decreases n
requires n >= 0
ensures SqrSumBy6(n) == 6 * SqrSumRec(n)
{
if (n == 0) {}
else {
assert n > 0;
L(n-1);
assert SqrSumBy6(n-1) == n*(n-1)*(2*n - 1);
assert SqrSumBy6(n-1) == 6*SqrSumRec(n-1);
assert 6*SqrSumRec(n-1) == n*(n-1)*(2*n - 1);
calc == {
n*((n-1)*(2*n - 1));
n*(2*n*(n-1) - n + 1);
n*(2*n*n - 3*n + 1);
n*(2*n*n - 3*n + 1);
}
calc == {
2*n*n + n;
(2*n + 1)*n;
}
calc == {
(2*n + 1)*n + (2*n + 1);
(2*n + 1)*(n+1);
}
calc == {
n*((n-1)*(2*n - 1)) + 6*n*n;
n*(2*n*(n-1) - n + 1) + 6*n*n;
n*(2*n*(n-1) - n + 1) + 6*n*n;
n*(2*n*n - 3*n + 1) + 6*n*n;
n*(2*n*n - 3*n + 1 + 6*n);
n*(2*n*n + 6*n - 3*n + 1);
n*(2*n*n + 3*n + 1);
n*(2*n*n + n + (2*n + 1));
n*((2*n + 1)*n + (2*n + 1));
n*((2*n + 1)*(n+1));
}
}
}
*/
method DivMod1(a: int, b: int) returns (q: int, r: int)
requires b > 0 && a >= 0
ensures a == b*q + r && 0 <= r < b
//decreases *
{
q := 0;
r := a;
while (r >= b)
invariant r >= 0
invariant a == b*q + r
decreases r // variant == expresie descrescatoare si marginita inferior
{
r := r - b;
q := q + 1;
}
//a == b*q + r && r <= 0 && r< b
}
method Main()
decreases *
{
var v := SqrSum(5);
print "SqrSum(5): ", v, "\n";
var q, r := DivMod(5, 3);
print "DivMod(5, 3): ", q, ", ", r, "\n";
}
| /*
Dafny include 2 limbaje:
* un limbaj pentru specificare
MSFOL (ce am discutat până acum)
adnotări care să ajute în procesul de verificare
* un limbaj pentru scris programe
*/
// Exemplu de program
method SqrSum(n: int) returns (s: int)
{
var i,k : int;
s := 0;
k := 1;
i := 1;
while (i <= n)
{
s := s + k;
k := k + 2 * i + 1;
i := i+1;
}
}
method DivMod(a: int, b: int) returns (q: int, r: int)
{
q := 0;
r := a;
while (r >= b)
{
r := r - b;
q := q + 1;
}
}
/*
triple Hoare (| P |) S (| Q |)
*/
// varianta assume-assert
method HoareTripleAssmAssrt()
{
var i: int := *;
var k: int := *;
// (| k == i*i |) k := k + 2 * i +1; (| k = (i+1)*(i+1) |)
assume k == i*i; // P = precondition
k := k + 2 * i + 1; // S
}
// varianta requires-ensures
method HoareTripleReqEns(i: int, k: int) returns (k': int)
// (| k == i*i |) k := k + 2 * i +1; (| k = (i+1)*(i+1) |)
requires k == i*i
ensures k' == (i+1)*(i+1)
{
k' := k + 2 * i + 1;
}
/*
regula pentru while
*/
// varianta cu assert
/*
method WhileRule()
{
// var n: int := *; // havoc
// assume n >= 0;
var n: int :| n >= 0;
var y := n;
var x := 0;
while (y >= 0)
{
x := x+1;
y := y-1;
}
}
*/
// varianta cu invariant
method Invariant1()
{
// var n: int := *; // havoc
var n: int :| n >= 0;
var y := n;
var x := 0;
while (y >= 0)
{
x := x+1;
y := y-1;
}
}
//specificarea sumei de patrate
function SqrSumRec(n: int) : int
requires n >= 0
{
if (n == 0) then 0 else n*n + SqrSumRec(n-1)
}
/*
method SqrSum1(n: int) returns (s: int)
requires n >= 0
ensures s == SqrSumRec(n) // s = 0^2 + 1^2 + 2^2 + ... + n^2 == n(n+1)(2n+1)/6
{
// ???
}
*/
// verificarea programului pentru suma de patrate
method SqrSum1(n: int) returns (s: int)
requires n >= 0
ensures s == SqrSumRec(n)
{
var i,k : int;
s := 0;
k := 1;
i := 1;
while (i <= n)
// s: 0*0, 0*0 + 1*1, 0*0 + 1*1 + 2*2, ...
// i: 1, 2, 3,
{
// k = i*i
s := s + k;
// k = i*i
k := k + 2 * i + 1;
// k = (i+1)*(i+1)
i := i+1;
// k = i*i
}
//s == SqrSumRec(i-1) && i <= n+1 && i > n
// implies
//s == SqrSumRec(n)
}
// SqrSumRec(n) = 0^2 + 1^2 + 2^2 + ... + n^2 == n(n+1)(2n+1)/6
least lemma L1(n: int)
requires n >= 0
ensures SqrSumRec(n) == n*(n+1)*(2*n + 1)/6
{
//OK
}
/*
function SqrSumBy6(n: int) : int
{
n * (n + 1) * (2 * n + 1)
}
inductive lemma L(n: int) // it takes a while
requires n >= 0
ensures SqrSumBy6(n) == 6 * SqrSumRec(n)
{
if (n == 0) {}
else {
L(n-1);
calc == {
n*((n-1)*(2*n - 1));
n*(2*n*(n-1) - n + 1);
n*(2*n*n - 3*n + 1);
n*(2*n*n - 3*n + 1);
}
calc == {
2*n*n + n;
(2*n + 1)*n;
}
calc == {
(2*n + 1)*n + (2*n + 1);
(2*n + 1)*(n+1);
}
calc == {
n*((n-1)*(2*n - 1)) + 6*n*n;
n*(2*n*(n-1) - n + 1) + 6*n*n;
n*(2*n*(n-1) - n + 1) + 6*n*n;
n*(2*n*n - 3*n + 1) + 6*n*n;
n*(2*n*n - 3*n + 1 + 6*n);
n*(2*n*n + 6*n - 3*n + 1);
n*(2*n*n + 3*n + 1);
n*(2*n*n + n + (2*n + 1));
n*((2*n + 1)*n + (2*n + 1));
n*((2*n + 1)*(n+1));
}
}
}
*/
method DivMod1(a: int, b: int) returns (q: int, r: int)
requires b > 0 && a >= 0
ensures a == b*q + r && 0 <= r < b
//decreases *
{
q := 0;
r := a;
while (r >= b)
{
r := r - b;
q := q + 1;
}
//a == b*q + r && r <= 0 && r< b
}
method Main()
{
var v := SqrSum(5);
print "SqrSum(5): ", v, "\n";
var q, r := DivMod(5, 3);
print "DivMod(5, 3): ", q, ", ", r, "\n";
}
|
686 | formal-verification_tmp_tmpoepcssay_strings3.dfy | predicate isPrefixPred(pre:string, str:string)
{
(|pre| <= |str|) &&
pre == str[..|pre|]
}
predicate isNotPrefixPred(pre:string, str:string)
{
(|pre| > |str|) ||
pre != str[..|pre|]
}
lemma PrefixNegationLemma(pre:string, str:string)
ensures isPrefixPred(pre,str) <==> !isNotPrefixPred(pre,str)
ensures !isPrefixPred(pre,str) <==> isNotPrefixPred(pre,str)
{}
method isPrefix(pre: string, str: string) returns (res:bool)
ensures !res <==> isNotPrefixPred(pre,str)
ensures res <==> isPrefixPred(pre,str)
{
return |pre| <= |str| && forall i :: 0 <= i < |pre| ==> pre[i] == str[i];
}
predicate isSubstringPred(sub:string, str:string)
{
(exists i :: 0 <= i <= |str| && isPrefixPred(sub, str[i..]))
}
predicate isNotSubstringPred(sub:string, str:string)
{
(forall i :: 0 <= i <= |str| ==> isNotPrefixPred(sub,str[i..]))
}
lemma SubstringNegationLemma(sub:string, str:string)
ensures isSubstringPred(sub,str) <==> !isNotSubstringPred(sub,str)
ensures !isSubstringPred(sub,str) <==> isNotSubstringPred(sub,str)
{}
method isSubstring(sub: string, str: string) returns (res:bool)
ensures res <==> isSubstringPred(sub, str)
ensures res ==> isSubstringPred(sub, str)
// ensures !res ==> !isSubstringPred(sub, str)
ensures isSubstringPred(sub, str) ==> res
ensures isSubstringPred(sub, str) ==> res
ensures !res <==> isNotSubstringPred(sub, str) // This postcondition follows from the above lemma.
{
if(|str| < |sub|)
{
return false;
}
else
{
var i: nat := 0;
res := false;
while (i <= |str|-|sub| && res == false)
decreases |str| - |sub| - i + (if !res then 1 else 0)
invariant 0 <= i <= |str|-|sub| + 1
invariant res ==> isSubstringPred(sub,str)
invariant forall j :: 0 <= j < i ==> isNotPrefixPred(sub, str[j..])
{
res := isPrefix(sub,str[i..]);
if(!res)
{
i := i + 1;
}
}
}
}
predicate haveCommonKSubstringPred(k:nat, str1:string, str2:string)
{
exists i1, j1 :: 0 <= i1 <= |str1|- k && j1 == i1 + k && isSubstringPred(str1[i1..j1],str2)
}
predicate haveNotCommonKSubstringPred(k:nat, str1:string, str2:string)
{
forall i1, j1 :: 0 <= i1 <= |str1|- k && j1 == i1 + k ==> isNotSubstringPred(str1[i1..j1],str2)
}
lemma commonKSubstringLemma(k:nat, str1:string, str2:string)
ensures haveCommonKSubstringPred(k,str1,str2) <==> !haveNotCommonKSubstringPred(k,str1,str2)
ensures !haveCommonKSubstringPred(k,str1,str2) <==> haveNotCommonKSubstringPred(k,str1,str2)
{}
method haveCommonKSubstring(k: nat, str1: string, str2: string) returns (found: bool)
ensures found <==> haveCommonKSubstringPred(k,str1,str2)
ensures !found <==> haveNotCommonKSubstringPred(k,str1,str2) // This postcondition follows from the above lemma.
{
if (k <= |str1| && k <= |str2|)
{
var slice : string;
found := false;
var i: nat := 0;
while (i <= |str1| - k && found == false)
decreases |str1| - k - i + (if !found then 1 else 0)
invariant found ==> haveCommonKSubstringPred(k,str1,str2)
invariant forall x, y :: 0 <= x < i && found == false && y == x + k && y <= |str1| ==> isNotSubstringPred(str1[x..y], str2)
{
slice := str1[i..i+k];
found := isSubstring(slice, str2);
i := i + 1;
}
} else {
return false;
}
}
method maxCommonSubstringLength(str1: string, str2: string) returns (len:nat)
requires (|str1| <= |str2|)
ensures (forall k :: len < k <= |str1| ==> !haveCommonKSubstringPred(k,str1,str2))
ensures haveCommonKSubstringPred(len,str1,str2)
{
assert isPrefixPred(str1[0..0],str2[0..]);
len := |str1|;
var hasCommon : bool := true;
while(len > 0)
decreases len
invariant forall i :: len < i <= |str1| ==> !haveCommonKSubstringPred(i,str1,str2)
{
hasCommon := haveCommonKSubstring(len, str1, str2);
if(hasCommon){
return len;
}
len := len - 1;
}
return len;
}
| predicate isPrefixPred(pre:string, str:string)
{
(|pre| <= |str|) &&
pre == str[..|pre|]
}
predicate isNotPrefixPred(pre:string, str:string)
{
(|pre| > |str|) ||
pre != str[..|pre|]
}
lemma PrefixNegationLemma(pre:string, str:string)
ensures isPrefixPred(pre,str) <==> !isNotPrefixPred(pre,str)
ensures !isPrefixPred(pre,str) <==> isNotPrefixPred(pre,str)
{}
method isPrefix(pre: string, str: string) returns (res:bool)
ensures !res <==> isNotPrefixPred(pre,str)
ensures res <==> isPrefixPred(pre,str)
{
return |pre| <= |str| && forall i :: 0 <= i < |pre| ==> pre[i] == str[i];
}
predicate isSubstringPred(sub:string, str:string)
{
(exists i :: 0 <= i <= |str| && isPrefixPred(sub, str[i..]))
}
predicate isNotSubstringPred(sub:string, str:string)
{
(forall i :: 0 <= i <= |str| ==> isNotPrefixPred(sub,str[i..]))
}
lemma SubstringNegationLemma(sub:string, str:string)
ensures isSubstringPred(sub,str) <==> !isNotSubstringPred(sub,str)
ensures !isSubstringPred(sub,str) <==> isNotSubstringPred(sub,str)
{}
method isSubstring(sub: string, str: string) returns (res:bool)
ensures res <==> isSubstringPred(sub, str)
ensures res ==> isSubstringPred(sub, str)
// ensures !res ==> !isSubstringPred(sub, str)
ensures isSubstringPred(sub, str) ==> res
ensures isSubstringPred(sub, str) ==> res
ensures !res <==> isNotSubstringPred(sub, str) // This postcondition follows from the above lemma.
{
if(|str| < |sub|)
{
return false;
}
else
{
var i: nat := 0;
res := false;
while (i <= |str|-|sub| && res == false)
{
res := isPrefix(sub,str[i..]);
if(!res)
{
i := i + 1;
}
}
}
}
predicate haveCommonKSubstringPred(k:nat, str1:string, str2:string)
{
exists i1, j1 :: 0 <= i1 <= |str1|- k && j1 == i1 + k && isSubstringPred(str1[i1..j1],str2)
}
predicate haveNotCommonKSubstringPred(k:nat, str1:string, str2:string)
{
forall i1, j1 :: 0 <= i1 <= |str1|- k && j1 == i1 + k ==> isNotSubstringPred(str1[i1..j1],str2)
}
lemma commonKSubstringLemma(k:nat, str1:string, str2:string)
ensures haveCommonKSubstringPred(k,str1,str2) <==> !haveNotCommonKSubstringPred(k,str1,str2)
ensures !haveCommonKSubstringPred(k,str1,str2) <==> haveNotCommonKSubstringPred(k,str1,str2)
{}
method haveCommonKSubstring(k: nat, str1: string, str2: string) returns (found: bool)
ensures found <==> haveCommonKSubstringPred(k,str1,str2)
ensures !found <==> haveNotCommonKSubstringPred(k,str1,str2) // This postcondition follows from the above lemma.
{
if (k <= |str1| && k <= |str2|)
{
var slice : string;
found := false;
var i: nat := 0;
while (i <= |str1| - k && found == false)
{
slice := str1[i..i+k];
found := isSubstring(slice, str2);
i := i + 1;
}
} else {
return false;
}
}
method maxCommonSubstringLength(str1: string, str2: string) returns (len:nat)
requires (|str1| <= |str2|)
ensures (forall k :: len < k <= |str1| ==> !haveCommonKSubstringPred(k,str1,str2))
ensures haveCommonKSubstringPred(len,str1,str2)
{
len := |str1|;
var hasCommon : bool := true;
while(len > 0)
{
hasCommon := haveCommonKSubstring(len, str1, str2);
if(hasCommon){
return len;
}
len := len - 1;
}
return len;
}
|
687 | formal_verication_dafny_tmp_tmpwgl2qz28_Challenges_ex1.dfy | // ex3errors.dfy in Assignment 1
// verify that an array of characters is a Palindrome
/*
A Palindrome is a word that is the same when written forwards and when written backwards.
For example, the word ”refer” is a Palindrome.
The method PalVerify is supposed to verify whether a word is a Palindrome,
where the word is represented as an array of characters.
The method was written by a novice software engineer, and contains many errors.
i) Without changing the signature or the code in the while loop,
fix the method so that it veriifes the code. Do not add any Dafny predicates or functions:
keep the changes to a minimum.
ii) Write a tester method (you may call it anything you like) that verifies that the
testcases refer, z and the empty string are Palindromes, and xy and 123421 are not.
The tester should not generate any output.
*/
method PalVerify(a: array<char>) returns (yn: bool)
ensures yn == true ==> forall i :: 0 <= i < a.Length/2 ==> a[i] == a[a.Length - i -1]
ensures yn == false ==> exists i :: 0 <= i < a.Length/2 && a[i] != a[a.Length - i -1]
ensures forall j :: 0<=j<a.Length ==> a[j] == old(a[j])
{
var i:int := 0;
while i < a.Length/2
invariant 0 <= i <= a.Length/2 && forall j:: 0<=j<i ==> a[j] == a[a.Length-j-1]
decreases a.Length/2 - i
{
if a[i] != a[a.Length-i-1]
{
return false;
}
i := i+1;
}
return true;
}
method TEST()
{
var a:array<char> := new char[]['r','e','f','e','r'];
var r:bool := PalVerify(a);
assert r;
var b:array<char> := new char[]['z'];
r := PalVerify(b);
assert r;
var c:array<char> := new char[][];
r := PalVerify(c);
assert r;
var d:array<char> := new char[]['x', 'y'];
assert d[0]=='x' && d[1]=='y';
r := PalVerify(d);
assert !r;
var e:array<char> := new char[]['1', '2', '3', '4', '2', '1'];
assert e[0]=='1' && e[1]=='2' && e[2]=='3' && e[3]=='4' && e[4]=='2' && e[5]=='1';
r := PalVerify(e);
assert !r;
}
| // ex3errors.dfy in Assignment 1
// verify that an array of characters is a Palindrome
/*
A Palindrome is a word that is the same when written forwards and when written backwards.
For example, the word ”refer” is a Palindrome.
The method PalVerify is supposed to verify whether a word is a Palindrome,
where the word is represented as an array of characters.
The method was written by a novice software engineer, and contains many errors.
i) Without changing the signature or the code in the while loop,
fix the method so that it veriifes the code. Do not add any Dafny predicates or functions:
keep the changes to a minimum.
ii) Write a tester method (you may call it anything you like) that verifies that the
testcases refer, z and the empty string are Palindromes, and xy and 123421 are not.
The tester should not generate any output.
*/
method PalVerify(a: array<char>) returns (yn: bool)
ensures yn == true ==> forall i :: 0 <= i < a.Length/2 ==> a[i] == a[a.Length - i -1]
ensures yn == false ==> exists i :: 0 <= i < a.Length/2 && a[i] != a[a.Length - i -1]
ensures forall j :: 0<=j<a.Length ==> a[j] == old(a[j])
{
var i:int := 0;
while i < a.Length/2
{
if a[i] != a[a.Length-i-1]
{
return false;
}
i := i+1;
}
return true;
}
method TEST()
{
var a:array<char> := new char[]['r','e','f','e','r'];
var r:bool := PalVerify(a);
var b:array<char> := new char[]['z'];
r := PalVerify(b);
var c:array<char> := new char[][];
r := PalVerify(c);
var d:array<char> := new char[]['x', 'y'];
r := PalVerify(d);
var e:array<char> := new char[]['1', '2', '3', '4', '2', '1'];
r := PalVerify(e);
}
|
688 | formal_verication_dafny_tmp_tmpwgl2qz28_Challenges_ex2.dfy | /*
i) Write a verified method with signature
method Forbid42(x:int, y:int) returns (z: int)
that returns x/(42 − y). The method is not defined for y = 42.
ii) Write a verified method with signature
method Allow42(x:int, y:int) returns (z: int, err:bool)
If y is not equal to 42 then z = x/(42 − y), otherwise z = 0.
The variable err is true if y == 42, otherwise it is false.
iii) Test your two methods by writing a tester with the following testcases.
You may call your tester anything you like.
*/
method Forbid42(x:int, y:int) returns (z:int)
requires y != 42;
ensures z == x/(42-y);
{
z:= x/(42-y);
return z;
}
method Allow42(x:int, y:int) returns (z: int, err:bool)
ensures y != 42 ==> z == x/(42-y) && err == false;
ensures y == 42 ==> z == 0 && err == true;
{
if (y != 42){
z:= x/(42-y);
return z, false;
}
return 0, true;
}
method TEST1()
{
var c:int := Forbid42(0, 1);
assert c == 0;
c := Forbid42(10, 32);
assert c == 1;
c := Forbid42(-100, 38);
assert c == -25;
var d:int,z:bool := Allow42(0,42);
assert d == 0 && z == true;
d,z := Allow42(-10,42);
assert d == 0 && z == true;
d,z := Allow42(0,1);
assert d == 0 && z == false;
d,z := Allow42(10,32);
assert d == 1 && z == false;
d,z := Allow42(-100,38);
assert d == -25 && z == false;
}
| /*
i) Write a verified method with signature
method Forbid42(x:int, y:int) returns (z: int)
that returns x/(42 − y). The method is not defined for y = 42.
ii) Write a verified method with signature
method Allow42(x:int, y:int) returns (z: int, err:bool)
If y is not equal to 42 then z = x/(42 − y), otherwise z = 0.
The variable err is true if y == 42, otherwise it is false.
iii) Test your two methods by writing a tester with the following testcases.
You may call your tester anything you like.
*/
method Forbid42(x:int, y:int) returns (z:int)
requires y != 42;
ensures z == x/(42-y);
{
z:= x/(42-y);
return z;
}
method Allow42(x:int, y:int) returns (z: int, err:bool)
ensures y != 42 ==> z == x/(42-y) && err == false;
ensures y == 42 ==> z == 0 && err == true;
{
if (y != 42){
z:= x/(42-y);
return z, false;
}
return 0, true;
}
method TEST1()
{
var c:int := Forbid42(0, 1);
c := Forbid42(10, 32);
c := Forbid42(-100, 38);
var d:int,z:bool := Allow42(0,42);
d,z := Allow42(-10,42);
d,z := Allow42(0,1);
d,z := Allow42(10,32);
d,z := Allow42(-100,38);
}
|
689 | formal_verication_dafny_tmp_tmpwgl2qz28_Challenges_ex6.dfy | // see pdf 'ex6 & 7 documentation' for excercise question
function bullspec(s:seq<nat>, u:seq<nat>): nat
requires 0 <= |u| == |s| && nomultiples(u)
{reccbull(s, u, 0)}
function cowspec(s:seq<nat>, u:seq<nat>): nat
requires 0 <= |u| == |s| && nomultiples(u)
{recccow(s, u, 0)}
function reccbull(s: seq<nat>, u:seq<nat>, i:int): nat
requires 0 <= i <= |s| == |u|
decreases |s| - i
{
if i ==|s| then 0
else if s[i] == u[i] then reccbull(s, u, i + 1) + 1
else reccbull(s, u, i + 1)
}
function recccow(s: seq<nat>, u:seq<nat>, i:int): nat
requires 0 <= i <= |s| == |u|
decreases |s| - i
{
if i == |s| then 0
else if s[i] != u[i] && u[i] in s then recccow(s, u, i + 1) + 1
else recccow(s, u, i + 1)
}
predicate nomultiples(u:seq<nat>)
{forall j, k :: 0<=j<k<|u| ==> u[j] != u[k]}
method BullsCows (s:seq<nat>, u:seq<nat>) returns (b:nat, c:nat)
requires 0 < |u| == |s| <= 10
requires nomultiples(u) && nomultiples(s);
ensures b >= 0 && c >= 0
ensures b == bullspec(s, u)
ensures c == cowspec(s, u)
{
b, c := 0, 0;
var i:int := |s|;
while i > 0
invariant 0 <= i <= |s| == |u|
invariant b >= 0 && c >= 0
invariant b == reccbull(s,u, i)
invariant c == recccow(s, u, i)
decreases i
{
i := i - 1;
if s[i] != u[i] && u[i] in s {c:= c + 1;}
else if s[i] == u[i] {b := b + 1;}
}
return b, c;
}
method TEST(){
var sys:seq<nat> := [1,2,9,10];
var usr:seq<nat> := [1,2,3,7];
assert bullspec(sys, usr) == 2;
assert cowspec(sys, usr) == 0;
var b:nat, c:nat := BullsCows(sys, usr);
assert b == 2 && c == 0;
var sys1:seq<nat> := [1, 2, 3, 4];
var usr2:seq<nat> := [4, 3, 2, 1];
assert bullspec(sys1, usr2) == 0;
assert cowspec(sys1, usr2) == 4;
b, c := BullsCows(sys1, usr2);
assert b == 0 && c == 4;
var sys3:seq<nat> := [1, 2, 3, 4, 5, 6, 7];
var usr3:seq<nat> := [1, 2, 3, 4, 5, 6, 7];
assert bullspec(sys3, usr3) == 7;
assert cowspec(sys3, usr3) == 0;
b, c := BullsCows(sys3, usr3);
assert b == 7 && c == 0;
var sys4:seq<nat> := [1, 2, 3, 4, 5, 6, 7];
var usr4:seq<nat> := [1, 2, 3, 7, 8, 6, 5];
assert bullspec(sys4, usr4) == 4;
assert cowspec(sys4, usr4) == 2;
b, c := BullsCows(sys4, usr4);
}
| // see pdf 'ex6 & 7 documentation' for excercise question
function bullspec(s:seq<nat>, u:seq<nat>): nat
requires 0 <= |u| == |s| && nomultiples(u)
{reccbull(s, u, 0)}
function cowspec(s:seq<nat>, u:seq<nat>): nat
requires 0 <= |u| == |s| && nomultiples(u)
{recccow(s, u, 0)}
function reccbull(s: seq<nat>, u:seq<nat>, i:int): nat
requires 0 <= i <= |s| == |u|
{
if i ==|s| then 0
else if s[i] == u[i] then reccbull(s, u, i + 1) + 1
else reccbull(s, u, i + 1)
}
function recccow(s: seq<nat>, u:seq<nat>, i:int): nat
requires 0 <= i <= |s| == |u|
{
if i == |s| then 0
else if s[i] != u[i] && u[i] in s then recccow(s, u, i + 1) + 1
else recccow(s, u, i + 1)
}
predicate nomultiples(u:seq<nat>)
{forall j, k :: 0<=j<k<|u| ==> u[j] != u[k]}
method BullsCows (s:seq<nat>, u:seq<nat>) returns (b:nat, c:nat)
requires 0 < |u| == |s| <= 10
requires nomultiples(u) && nomultiples(s);
ensures b >= 0 && c >= 0
ensures b == bullspec(s, u)
ensures c == cowspec(s, u)
{
b, c := 0, 0;
var i:int := |s|;
while i > 0
{
i := i - 1;
if s[i] != u[i] && u[i] in s {c:= c + 1;}
else if s[i] == u[i] {b := b + 1;}
}
return b, c;
}
method TEST(){
var sys:seq<nat> := [1,2,9,10];
var usr:seq<nat> := [1,2,3,7];
var b:nat, c:nat := BullsCows(sys, usr);
var sys1:seq<nat> := [1, 2, 3, 4];
var usr2:seq<nat> := [4, 3, 2, 1];
b, c := BullsCows(sys1, usr2);
var sys3:seq<nat> := [1, 2, 3, 4, 5, 6, 7];
var usr3:seq<nat> := [1, 2, 3, 4, 5, 6, 7];
b, c := BullsCows(sys3, usr3);
var sys4:seq<nat> := [1, 2, 3, 4, 5, 6, 7];
var usr4:seq<nat> := [1, 2, 3, 7, 8, 6, 5];
b, c := BullsCows(sys4, usr4);
}
|
690 | formal_verication_dafny_tmp_tmpwgl2qz28_Challenges_ex7.dfy | // see pdf 'ex6 & 7 documentation' for excercise question
datatype Bases = A | C | G | T
//swaps two sequence indexes
method Exchanger(s: seq<Bases>, x:nat, y:nat) returns (t: seq<Bases>)
requires 0 < |s| && x < |s| && y < |s|
ensures |t| == |s|
ensures forall b:nat :: 0 <= b < |s| && b != x && b != y ==> t[b] == s[b]
ensures t[x] == s[y] && s[x] == t[y]
ensures multiset(s) == multiset(t)
{
t := s;
t := t[ x := s[y]];
t := t[ y := s[x] ];
return t;
}
//idea from Rustan Leino video "Basics of specification and verification: Lecture 3, the Dutch National Flag algorithm"
//modified for 4 elements
predicate below(first: Bases, second: Bases)
{
first == second ||
first == A ||
(first == C && (second == G || second == T)) ||
(first == G && second == T) ||
second == T
}
//checks if a sequence is in base order
predicate bordered(s:seq<Bases>)
{
forall j, k :: 0 <= j < k < |s| ==> below(s[j], s[k])
}
method Sorter(bases: seq<Bases>) returns (sobases:seq<Bases>)
requires 0 < |bases|
ensures |sobases| == |bases|
ensures bordered(sobases)
ensures multiset(bases) == multiset(sobases);
{
sobases := bases;
var c, next:nat := 0, 0;
var g, t:nat := |bases|, |bases|;
while next != g
invariant 0 <= c <= next <= g <= t <= |bases|
invariant |sobases| == |bases|
invariant multiset(bases) == multiset(sobases);
invariant forall i:nat :: 0 <= i < c ==> sobases[i] == A
invariant forall i:nat :: c <= i < next ==> sobases[i] == C
invariant forall i:nat :: g <= i < t ==> sobases[i] == G
invariant forall i:nat :: t <= i < |bases| ==> sobases[i] == T
{
match(sobases[next]) {
case C => next := next + 1;
case A => sobases := Exchanger(sobases, next, c);
c, next:= c + 1, next + 1;
case G => g := g - 1;
sobases := Exchanger(sobases, next, g);
case T => g , t:= g - 1, t - 1;
sobases := Exchanger(sobases, next, t);
if (g != t) {sobases := Exchanger(sobases, next, g);}
}
}
return sobases;
}
method Testerexchange() {
var a:seq<Bases> := [A, C, A, T];
var b:seq<Bases> := Exchanger(a, 2, 3);
assert b == [A, C, T, A];
var c:seq<Bases> := [A, C, A, T, A, T, C];
var d:seq<Bases> := Exchanger(c, 5, 1);
assert d == [A, T, A, T, A, C, C];
var e:seq<Bases> := [A, C, A, T, A, T, C];
var f:seq<Bases> := Exchanger(e, 1, 1);
assert f == [A, C, A, T, A, T, C];
var g:seq<Bases> := [A, C];
var h:seq<Bases> := Exchanger(g, 0, 1);
assert h == [C, A];
}
method Testsort() {
var a:seq<Bases> := [G,A,T];
assert a == [G,A,T];
var b:seq<Bases> := Sorter(a);
assert bordered(b);
assert multiset(b) == multiset(a);
var c:seq<Bases> := [G, A, T, T, A, C, G, C, T, A, C, G, T, T, G];
assert c == [G, A, T, T, A, C, G, C, T, A, C, G, T, T, G];
var d:seq<Bases> := Sorter(c);
assert bordered(d);
assert multiset(c) == multiset(d);
var e:seq<Bases> := [A];
assert e == [A];
var f:seq<Bases> := Sorter(e);
assert bordered(b);
assert multiset(e) == multiset(f);
var g:seq<Bases> := [A, C, G, T];
assert g == [A, C, G, T];
var h:seq<Bases> := Sorter(g);
assert bordered(b);
assert multiset(g) == multiset(h);
var i:seq<Bases> := [A, T, C, T, T];
assert i[0]==A && i[1]==T && i[2]==C && i[3]==T && i[4]==T;
assert !bordered(i);
}
| // see pdf 'ex6 & 7 documentation' for excercise question
datatype Bases = A | C | G | T
//swaps two sequence indexes
method Exchanger(s: seq<Bases>, x:nat, y:nat) returns (t: seq<Bases>)
requires 0 < |s| && x < |s| && y < |s|
ensures |t| == |s|
ensures forall b:nat :: 0 <= b < |s| && b != x && b != y ==> t[b] == s[b]
ensures t[x] == s[y] && s[x] == t[y]
ensures multiset(s) == multiset(t)
{
t := s;
t := t[ x := s[y]];
t := t[ y := s[x] ];
return t;
}
//idea from Rustan Leino video "Basics of specification and verification: Lecture 3, the Dutch National Flag algorithm"
//modified for 4 elements
predicate below(first: Bases, second: Bases)
{
first == second ||
first == A ||
(first == C && (second == G || second == T)) ||
(first == G && second == T) ||
second == T
}
//checks if a sequence is in base order
predicate bordered(s:seq<Bases>)
{
forall j, k :: 0 <= j < k < |s| ==> below(s[j], s[k])
}
method Sorter(bases: seq<Bases>) returns (sobases:seq<Bases>)
requires 0 < |bases|
ensures |sobases| == |bases|
ensures bordered(sobases)
ensures multiset(bases) == multiset(sobases);
{
sobases := bases;
var c, next:nat := 0, 0;
var g, t:nat := |bases|, |bases|;
while next != g
{
match(sobases[next]) {
case C => next := next + 1;
case A => sobases := Exchanger(sobases, next, c);
c, next:= c + 1, next + 1;
case G => g := g - 1;
sobases := Exchanger(sobases, next, g);
case T => g , t:= g - 1, t - 1;
sobases := Exchanger(sobases, next, t);
if (g != t) {sobases := Exchanger(sobases, next, g);}
}
}
return sobases;
}
method Testerexchange() {
var a:seq<Bases> := [A, C, A, T];
var b:seq<Bases> := Exchanger(a, 2, 3);
var c:seq<Bases> := [A, C, A, T, A, T, C];
var d:seq<Bases> := Exchanger(c, 5, 1);
var e:seq<Bases> := [A, C, A, T, A, T, C];
var f:seq<Bases> := Exchanger(e, 1, 1);
var g:seq<Bases> := [A, C];
var h:seq<Bases> := Exchanger(g, 0, 1);
}
method Testsort() {
var a:seq<Bases> := [G,A,T];
var b:seq<Bases> := Sorter(a);
var c:seq<Bases> := [G, A, T, T, A, C, G, C, T, A, C, G, T, T, G];
var d:seq<Bases> := Sorter(c);
var e:seq<Bases> := [A];
var f:seq<Bases> := Sorter(e);
var g:seq<Bases> := [A, C, G, T];
var h:seq<Bases> := Sorter(g);
var i:seq<Bases> := [A, T, C, T, T];
}
|
691 | fv2020-tms_tmp_tmpnp85b47l_modeling_concurrency_safety.dfy | /*
* Model of the ticket system and correctness theorem
* Parts 4 and 5 in the paper
*/
type Process(==) = int // Philosopher
datatype CState = Thinking | Hungry | Eating // Control states
// A class can have state, with multiple fields, methods, a constructor, and declare functions and lemmas
class TicketSystem
{
var ticket: int // Ticket dispenser
var serving: int // Serving display
const P: set<Process> // Fixed set of processes
// State for each process
var cs: map<Process, CState> // (Partial) Map from process to state
var t: map<Process, int> // (Partial) Map from process to ticket number
// Invariant of the system
// Checks that P is a subset of the domain/keys of each map
predicate Valid()
reads this // Depends on the fields on the current class
{
&& cs.Keys == t.Keys == P // Alt. P <= cs.Keys && P <= t.Keys
&& serving <= ticket
&& (forall p :: // ticket help is in range(serving, ticket)
p in P && cs[p] != Thinking
==> serving <= t[p] < ticket
)
&& (forall p, q :: // No other process can have the ticket number equals to serving
p in P && q in P && p != q && cs[p] != Thinking && cs[q] != Thinking
==> t[p] != t[q]
)
&& (forall p :: // We are serving the correct ticket number
p in P && cs[p] == Eating
==> t[p] == serving
)
}
// Initialize the ticket system
constructor (processes: set<Process>)
ensures Valid() // Postcondition
ensures P == processes // Connection between processes and ts.P
{
P := processes;
ticket, serving := 0, 0; // Alt. ticket := serving;
// The two following use map comprehension
cs := map p | p in processes :: Thinking; // The map from p, where p in processes, takes value Thinking
t := map p | p in processes :: 0;
}
// The next three methods are our atomic events
// A Philosopher is Thinking and gets Hungry
method Request(p: Process)
requires Valid() && p in P && cs[p] == Thinking // Control process precondition
modifies this // Depends on the fields on the current class
ensures Valid() // Postcondition
{
t, ticket := t[p := ticket], ticket + 1; // Philosopher gets current ticket, next ticket's number increases
cs := cs[p := Hungry]; // Philosopher's state changes to Hungry
}
// A Philosopher is Hungry and enters the kitchen
method Enter(p: Process)
requires Valid() && p in P && cs[p] == Hungry // Control process precondition
modifies this // Depends on the fields on the current class
ensures Valid() // Postcondition
{
if t[p] == serving // The kitchen is available for this Philosopher
{
cs := cs[p := Eating]; // Philosopher's state changes to Eating
}
}
// A Philosopher is done Eating and leaves the kitchen
method Leave(p: Process)
requires Valid() && p in P && cs[p] == Eating // Control process precondition
modifies this // Depends on the fields on the current class
ensures Valid() // Postcondition
{
//assert t[p] == serving; // Ticket held by p is equal to serving
serving := serving + 1; // Kitchen is ready to serve the next ticket holder
cs := cs[p := Thinking]; // Philosopher's state changes to Thinking
}
// Ensures that no two processes are in the same state
lemma MutualExclusion(p: Process, q: Process)
// Antecedents
requires Valid() && p in P && q in P
requires cs[p] == Eating && cs[q] == Eating
// Conclusion/Proof goal
ensures p == q
{
}
}
/*
* Event scheduler
* Part 6 in the paper
* Part 6.1 for alternatives
*/
method Run(processes: set<Process>)
requires processes != {} // Cannot schedule no processes
decreases * // Needed so that the loop omits termination checks
{
var ts := new TicketSystem(processes);
var schedule := []; // Scheduling choices
var trace := [(ts.ticket, ts.serving, ts.cs, ts.t)]; // Record sequence of states
while true
invariant ts.Valid()
decreases * // Omits termination checks
{
var p :| p in ts.P; // p exists such that p is in ts.P
match ts.cs[p] {
case Thinking => ts.Request(p);
case Hungry => ts.Enter(p);
case Eating => ts.Leave(p);
}
schedule := schedule + [p];
trace:=trace + [(ts.ticket, ts.serving, ts.cs, ts.t)];
}
}
/*
* Event scheduler with planified schedule
* Part 6.2
*/
method RunFromSchedule(processes: set<Process>, schedule: nat -> Process)
requires processes != {}
requires forall n :: schedule(n) in processes
decreases *
{
var ts := new TicketSystem(processes);
var n := 0;
while true
invariant ts.Valid()
decreases * // Omits termination checks
{
var p := schedule(n);
match ts.cs[p] {
case Thinking => ts.Request(p);
case Hungry => ts.Enter(p);
case Eating => ts.Leave(p);
}
n := n + 1;
}
}
| /*
* Model of the ticket system and correctness theorem
* Parts 4 and 5 in the paper
*/
type Process(==) = int // Philosopher
datatype CState = Thinking | Hungry | Eating // Control states
// A class can have state, with multiple fields, methods, a constructor, and declare functions and lemmas
class TicketSystem
{
var ticket: int // Ticket dispenser
var serving: int // Serving display
const P: set<Process> // Fixed set of processes
// State for each process
var cs: map<Process, CState> // (Partial) Map from process to state
var t: map<Process, int> // (Partial) Map from process to ticket number
// Invariant of the system
// Checks that P is a subset of the domain/keys of each map
predicate Valid()
reads this // Depends on the fields on the current class
{
&& cs.Keys == t.Keys == P // Alt. P <= cs.Keys && P <= t.Keys
&& serving <= ticket
&& (forall p :: // ticket help is in range(serving, ticket)
p in P && cs[p] != Thinking
==> serving <= t[p] < ticket
)
&& (forall p, q :: // No other process can have the ticket number equals to serving
p in P && q in P && p != q && cs[p] != Thinking && cs[q] != Thinking
==> t[p] != t[q]
)
&& (forall p :: // We are serving the correct ticket number
p in P && cs[p] == Eating
==> t[p] == serving
)
}
// Initialize the ticket system
constructor (processes: set<Process>)
ensures Valid() // Postcondition
ensures P == processes // Connection between processes and ts.P
{
P := processes;
ticket, serving := 0, 0; // Alt. ticket := serving;
// The two following use map comprehension
cs := map p | p in processes :: Thinking; // The map from p, where p in processes, takes value Thinking
t := map p | p in processes :: 0;
}
// The next three methods are our atomic events
// A Philosopher is Thinking and gets Hungry
method Request(p: Process)
requires Valid() && p in P && cs[p] == Thinking // Control process precondition
modifies this // Depends on the fields on the current class
ensures Valid() // Postcondition
{
t, ticket := t[p := ticket], ticket + 1; // Philosopher gets current ticket, next ticket's number increases
cs := cs[p := Hungry]; // Philosopher's state changes to Hungry
}
// A Philosopher is Hungry and enters the kitchen
method Enter(p: Process)
requires Valid() && p in P && cs[p] == Hungry // Control process precondition
modifies this // Depends on the fields on the current class
ensures Valid() // Postcondition
{
if t[p] == serving // The kitchen is available for this Philosopher
{
cs := cs[p := Eating]; // Philosopher's state changes to Eating
}
}
// A Philosopher is done Eating and leaves the kitchen
method Leave(p: Process)
requires Valid() && p in P && cs[p] == Eating // Control process precondition
modifies this // Depends on the fields on the current class
ensures Valid() // Postcondition
{
//assert t[p] == serving; // Ticket held by p is equal to serving
serving := serving + 1; // Kitchen is ready to serve the next ticket holder
cs := cs[p := Thinking]; // Philosopher's state changes to Thinking
}
// Ensures that no two processes are in the same state
lemma MutualExclusion(p: Process, q: Process)
// Antecedents
requires Valid() && p in P && q in P
requires cs[p] == Eating && cs[q] == Eating
// Conclusion/Proof goal
ensures p == q
{
}
}
/*
* Event scheduler
* Part 6 in the paper
* Part 6.1 for alternatives
*/
method Run(processes: set<Process>)
requires processes != {} // Cannot schedule no processes
{
var ts := new TicketSystem(processes);
var schedule := []; // Scheduling choices
var trace := [(ts.ticket, ts.serving, ts.cs, ts.t)]; // Record sequence of states
while true
{
var p :| p in ts.P; // p exists such that p is in ts.P
match ts.cs[p] {
case Thinking => ts.Request(p);
case Hungry => ts.Enter(p);
case Eating => ts.Leave(p);
}
schedule := schedule + [p];
trace:=trace + [(ts.ticket, ts.serving, ts.cs, ts.t)];
}
}
/*
* Event scheduler with planified schedule
* Part 6.2
*/
method RunFromSchedule(processes: set<Process>, schedule: nat -> Process)
requires processes != {}
requires forall n :: schedule(n) in processes
{
var ts := new TicketSystem(processes);
var n := 0;
while true
{
var p := schedule(n);
match ts.cs[p] {
case Thinking => ts.Request(p);
case Hungry => ts.Enter(p);
case Eating => ts.Leave(p);
}
n := n + 1;
}
}
|
692 | fv2020-tms_tmp_tmpnp85b47l_simple_tm.dfy | module ModelingTM {
type ProcessId = nat
type MemoryObject = nat
type TimeStamp = nat
class Operation {
const isWrite: bool
const memObject: MemoryObject
}
class Transaction {
const ops: seq<Operation>
}
// Process state : transaction progress and process memory.
class ProcessState {
// currentTx : id of tx being processed. txs.size() means done.
const currentTx: nat
// currentOp :
// - tx.ops.size() represents tryCommit operation.
// - -1 represents abort operation
// - values in between represent read and write operations
const currentOp: int
// sub-operations of the operation, see the step function
const currentSubOp: nat
// Set of read objects with original observed timestamp.
const readSet: map<MemoryObject, TimeStamp>
// Set of written objects.
const writeSet: set<MemoryObject>
constructor () {
currentTx := 0;
currentOp := 0;
currentSubOp := 0;
readSet := map[];
writeSet := {};
}
constructor nextSubOp(that: ProcessState)
ensures this.currentTx == that.currentTx
ensures this.currentOp == that.currentOp
ensures this.currentSubOp == that.currentSubOp + 1
ensures this.readSet == that.readSet
ensures this.writeSet == that.writeSet
{
currentTx := that.currentTx;
currentOp := that.currentOp;
currentSubOp := that.currentSubOp + 1;
readSet := that.readSet;
writeSet := that.writeSet;
}
constructor nextOp(that: ProcessState)
ensures this.currentTx == that.currentTx
ensures this.currentOp == that.currentOp + 1
ensures this.currentSubOp == 0
ensures this.readSet == that.readSet
ensures this.writeSet == that.writeSet
{
currentTx := that.currentTx;
currentOp := that.currentOp + 1;
currentSubOp := 0;
readSet := that.readSet;
writeSet := that.writeSet;
}
constructor abortTx(that: ProcessState)
ensures this.currentTx == that.currentTx
ensures this.currentOp == -1
ensures this.currentSubOp == 0
ensures this.readSet == that.readSet
ensures this.writeSet == that.writeSet
{
currentTx := that.currentTx;
currentOp := -1;
currentSubOp := 0;
readSet := that.readSet;
writeSet := that.writeSet;
}
constructor restartTx(that: ProcessState)
ensures this.currentTx == that.currentTx
ensures this.currentOp == 0
ensures this.currentSubOp == 0
ensures this.readSet == map[]
ensures this.writeSet == {}
{
currentTx := that.currentTx;
currentOp := 0;
currentSubOp := 0;
readSet := map[];
writeSet := {};
}
constructor nextTx(that: ProcessState)
ensures this.currentTx == that.currentTx + 1
ensures this.currentOp == 0
ensures this.currentSubOp == 0
ensures this.readSet == map[]
ensures this.writeSet == {}
{
currentTx := that.currentTx + 1;
currentOp := 0;
currentSubOp := 0;
readSet := map[];
writeSet := {};
}
constructor addToReadSet(that: ProcessState, obj: MemoryObject, ts: TimeStamp)
ensures currentTx == that.currentTx
ensures currentOp == that.currentOp
ensures currentSubOp == that.currentSubOp
ensures readSet.Keys == that.readSet.Keys + {obj}
&& readSet[obj] == ts
&& forall o :: o in readSet && o != obj ==> readSet[o] == that.readSet[o]
ensures writeSet == that.writeSet
{
currentTx := that.currentTx;
currentOp := that.currentOp;
currentSubOp := that.currentSubOp;
readSet := that.readSet[obj := ts];
writeSet := that.writeSet;
}
constructor addToWriteSet(that: ProcessState, obj: MemoryObject)
ensures this.currentTx == that.currentTx
ensures this.currentOp == that.currentOp
ensures this.currentSubOp == that.currentSubOp
ensures this.readSet == that.readSet
ensures this.writeSet == that.writeSet + {obj}
{
currentTx := that.currentTx;
currentOp := that.currentOp;
currentSubOp := that.currentSubOp;
readSet := that.readSet;
writeSet := that.writeSet + {obj};
}
}
class TMSystem {
// Ordered list of transaction that each process should process
const txQueues : map<ProcessId, seq<Transaction>>
// State and memory of processes
const procStates : map<ProcessId, ProcessState>
// Dirty objects. (Replaces the object value in a real representation. Used for safety proof)
const dirtyObjs: set<MemoryObject>
// Object lock.
const lockedObjs: set<MemoryObject>
// Object timestamp. (Incremented at the end of any write transaction)
const objTimeStamps: map<MemoryObject, nat>
constructor (q: map<ProcessId, seq<Transaction>>) {
txQueues := q;
procStates := map[];
dirtyObjs := {};
lockedObjs := {};
objTimeStamps := map[];
}
constructor initTimestamp(that: TMSystem, obj: MemoryObject)
ensures txQueues == that.txQueues
ensures procStates == that.procStates
ensures dirtyObjs == that.dirtyObjs
ensures lockedObjs == that.lockedObjs
ensures objTimeStamps.Keys == that.objTimeStamps.Keys + {obj}
&& objTimeStamps[obj] == 0
&& forall o :: o in objTimeStamps && o != obj ==> objTimeStamps[o] == that.objTimeStamps[o]
{
txQueues := that.txQueues;
procStates := that.procStates;
dirtyObjs := that.dirtyObjs;
lockedObjs := that.lockedObjs;
objTimeStamps := that.objTimeStamps[obj := 0];
}
constructor updateState(that: TMSystem, pid: ProcessId, state: ProcessState)
ensures txQueues == that.txQueues
ensures procStates.Keys == that.procStates.Keys + {pid}
&& procStates[pid] == state
&& forall p :: p in procStates && p != pid ==> procStates[p] == that.procStates[p]
ensures dirtyObjs == that.dirtyObjs
ensures lockedObjs == that.lockedObjs
ensures objTimeStamps == that.objTimeStamps
{
txQueues := that.txQueues;
procStates := that.procStates[pid := state];
dirtyObjs := that.dirtyObjs;
lockedObjs := that.lockedObjs;
objTimeStamps := that.objTimeStamps;
}
constructor markDirty(that: TMSystem, obj: MemoryObject)
ensures txQueues == that.txQueues
ensures procStates == that.procStates
ensures dirtyObjs == that.dirtyObjs + {obj}
ensures lockedObjs == that.lockedObjs
ensures objTimeStamps == that.objTimeStamps
{
txQueues := that.txQueues;
procStates := that.procStates;
dirtyObjs := that.dirtyObjs + {obj};
lockedObjs := that.lockedObjs;
objTimeStamps := that.objTimeStamps;
}
constructor clearDirty(that: TMSystem, writeSet: set<MemoryObject>)
ensures txQueues == that.txQueues
ensures procStates == that.procStates
ensures dirtyObjs == that.dirtyObjs - writeSet
ensures lockedObjs == that.lockedObjs
ensures objTimeStamps == that.objTimeStamps
{
txQueues := that.txQueues;
procStates := that.procStates;
dirtyObjs := that.dirtyObjs - writeSet;
lockedObjs := that.lockedObjs;
objTimeStamps := that.objTimeStamps;
}
constructor acquireLock(that: TMSystem, o: MemoryObject)
ensures txQueues == that.txQueues
ensures procStates == that.procStates
ensures dirtyObjs == that.dirtyObjs
ensures lockedObjs == that.lockedObjs + {o}
ensures objTimeStamps == that.objTimeStamps
{
txQueues := that.txQueues;
procStates := that.procStates;
dirtyObjs := that.dirtyObjs;
lockedObjs := that.lockedObjs + {o};
objTimeStamps := that.objTimeStamps;
}
constructor releaseLocks(that: TMSystem, objs: set<MemoryObject>)
ensures txQueues == that.txQueues
ensures procStates == that.procStates
ensures dirtyObjs == that.dirtyObjs
ensures lockedObjs == that.lockedObjs - objs
ensures objTimeStamps == that.objTimeStamps
{
txQueues := that.txQueues;
procStates := that.procStates;
dirtyObjs := that.dirtyObjs;
lockedObjs := that.lockedObjs - objs;
objTimeStamps := that.objTimeStamps;
}
constructor updateTimestamps(that: TMSystem, objs: set<MemoryObject>)
ensures txQueues == that.txQueues
ensures procStates == that.procStates
ensures dirtyObjs == that.dirtyObjs
ensures lockedObjs == that.lockedObjs
ensures objTimeStamps.Keys == that.objTimeStamps.Keys
&& forall o :: o in that.objTimeStamps ==>
if(o in objs) then objTimeStamps[o] != that.objTimeStamps[o] else objTimeStamps[o] == that.objTimeStamps[o]
{
txQueues := that.txQueues;
procStates := that.procStates;
dirtyObjs := that.dirtyObjs;
lockedObjs := that.lockedObjs;
objTimeStamps := map o | o in that.objTimeStamps ::
if(o in objs) then (that.objTimeStamps[o] + 1) else that.objTimeStamps[o];
}
predicate stateValid(pid: ProcessId, state: ProcessState)
requires pid in procStates && state == procStates[pid]
{
&& pid in txQueues
&& state.currentTx <= |txQueues[pid]|
&& if state.currentTx == |txQueues[pid]| then (
// Queue finished
&& state.currentOp == 0
&& state.currentSubOp == 0
&& |state.readSet| == 0
&& |state.writeSet| == 0
) else if state.currentTx < |txQueues[pid]| then (
// Queue unfinished
&& exists tx :: (
&& tx == txQueues[pid][state.currentTx]
&& state.currentOp <= |tx.ops|
&& state.currentOp >= -1
&& if (state.currentOp >= 0 && state.currentOp < |tx.ops|) then (
// Read/Write operations have at most two subOps
state.currentSubOp < 2
) else if state.currentOp == |tx.ops| then (
// tryCommit has 4 subOps
state.currentSubOp < 4
) else if state.currentOp == -1 then (
// abort has 3 subOps
state.currentSubOp < 3
) else false
)
&& state.readSet.Keys <= objTimeStamps.Keys
&& state.writeSet <= lockedObjs
) else false
}
predicate validSystem()
{
&& procStates.Keys <= txQueues.Keys
&& dirtyObjs <= objTimeStamps.Keys
&& lockedObjs <= objTimeStamps.Keys
&& forall p, s :: p in procStates && s == procStates[p] ==> stateValid(p, s)
}
}
method Step(input: TMSystem, pid: ProcessId) returns (system: TMSystem)
requires pid in input.txQueues
requires pid in input.procStates
requires input.validSystem()
ensures system.validSystem()
{
system := input;
var state: ProcessState := system.procStates[pid];
assert(system.stateValid(pid, state)); // Given by input.validSystem()
var txs := system.txQueues[pid];
if (state.currentTx >= |txs|) {
// Nothing left to do.
return;
}
var tx := txs[state.currentTx];
if (state.currentOp == |tx.ops|) {
// tryCommit
if(state.currentSubOp == 0) {
// Check locks
if !(forall o :: o in state.readSet ==> o in state.writeSet || o !in system.lockedObjs) {
// Write detected (locked), aborting.
state := new ProcessState.abortTx(state);
system := new TMSystem.updateState(system, pid, state);
assume(system.validSystem()); // TODO : Remove assumption.
return;
}
// Continue to next sub-op.
state := new ProcessState.nextSubOp(state);
} else if (state.currentSubOp == 1) {
// Validate timestamps
if !(forall o :: o in state.readSet ==> state.readSet[o] == system.objTimeStamps[o]) {
// Write detected (timestamp changed), aborting.
state := new ProcessState.abortTx(state);
system := new TMSystem.updateState(system, pid, state);
assume(system.validSystem()); // TODO : Remove assumption.
return;
}
// Can (and will) commit !
// The writeset can now be read safely by others so we can remove the dirty mark.
system := new TMSystem.clearDirty(system, state.writeSet);
// Continue to next sub-op.
state := new ProcessState.nextSubOp(state);
} else if (state.currentSubOp == 2) {
// Update timestamps
system := new TMSystem.updateTimestamps(system, state.writeSet);
// Continue to next sub-op.
state := new ProcessState.nextSubOp(state);
} else if (state.currentSubOp == 3) {
// Release locks
system := new TMSystem.releaseLocks(system, state.writeSet);
// Commited. Continue to next transaction.
state := new ProcessState.nextTx(state);
} else {
assert(false);
}
} else if (state.currentOp == -1) {
// Abort
if(state.currentSubOp == 0) {
assert(state.currentTx < |system.txQueues[pid]|);
// Restore written values (equivalent to removing dirty marks here).
system := new TMSystem.clearDirty(system, state.writeSet);
// Continue to next sub-op.
state := new ProcessState.nextSubOp(state);
} else if (state.currentSubOp == 1) {
// Update timestamps
system := new TMSystem.updateTimestamps(system, state.writeSet);
// Continue to next sub-op.
state := new ProcessState.nextSubOp(state);
} else if (state.currentSubOp == 2) {
// Release locks
system := new TMSystem.releaseLocks(system, state.writeSet);
// Restart transaction.
state := new ProcessState.restartTx(state);
} else {
assert(false);
}
} else if (state.currentOp >= 0 && state.currentOp < |tx.ops|) {
// Read/Write op
var op := tx.ops[state.currentOp];
var o := op.memObject;
// Init object timestamp if not present
if(o !in system.objTimeStamps) {
system := new TMSystem.initTimestamp(system, o);
}
assert(o in system.objTimeStamps);
if(op.isWrite) {
// Write
if(state.currentSubOp == 0) {
if(!(op.memObject in state.writeSet)) {
// trylock
if(o in system.lockedObjs) {
// Failed locking, aborting.
state := new ProcessState.abortTx(state);
} else {
// Aquire lock. Continue to next sub-op.
system := new TMSystem.acquireLock(system, o);
state := new ProcessState.addToWriteSet(state, o);
state := new ProcessState.nextSubOp(state);
}
} else {
// Already in writeset, continue to next subOp.
state := new ProcessState.nextSubOp(state);
}
} else if (state.currentSubOp == 1) {
// Do the write (equivalent to marking as dirty). Continue to next op.
system := new TMSystem.markDirty(system, o);
state := new ProcessState.nextOp(state);
} else {
assert(false);
}
} else {
// Read operation
if(state.currentSubOp == 0) {
if(o in state.writeSet || o in state.readSet) {
// Already in writeSet or readSet, fast-skip to next op.
state := new ProcessState.nextOp(state);
} else {
// Read timestamp and add to readSet. Continue to next sub-op.
state := new ProcessState.addToReadSet(state, o, system.objTimeStamps[o]);
state := new ProcessState.nextSubOp(state);
}
} else if (state.currentSubOp == 1) {
if(o in system.lockedObjs) {
// Object is locked, aborting.
state := new ProcessState.abortTx(state);
} else {
// All good. Continue to next op.
state := new ProcessState.nextOp(state);
}
} else {
assert(false);
}
}
} else {
assert(false);
}
// Save the new state.
system := new TMSystem.updateState(system, pid, state);
assume(system.validSystem()); // TODO : Remove assumption.
}
}
| module ModelingTM {
type ProcessId = nat
type MemoryObject = nat
type TimeStamp = nat
class Operation {
const isWrite: bool
const memObject: MemoryObject
}
class Transaction {
const ops: seq<Operation>
}
// Process state : transaction progress and process memory.
class ProcessState {
// currentTx : id of tx being processed. txs.size() means done.
const currentTx: nat
// currentOp :
// - tx.ops.size() represents tryCommit operation.
// - -1 represents abort operation
// - values in between represent read and write operations
const currentOp: int
// sub-operations of the operation, see the step function
const currentSubOp: nat
// Set of read objects with original observed timestamp.
const readSet: map<MemoryObject, TimeStamp>
// Set of written objects.
const writeSet: set<MemoryObject>
constructor () {
currentTx := 0;
currentOp := 0;
currentSubOp := 0;
readSet := map[];
writeSet := {};
}
constructor nextSubOp(that: ProcessState)
ensures this.currentTx == that.currentTx
ensures this.currentOp == that.currentOp
ensures this.currentSubOp == that.currentSubOp + 1
ensures this.readSet == that.readSet
ensures this.writeSet == that.writeSet
{
currentTx := that.currentTx;
currentOp := that.currentOp;
currentSubOp := that.currentSubOp + 1;
readSet := that.readSet;
writeSet := that.writeSet;
}
constructor nextOp(that: ProcessState)
ensures this.currentTx == that.currentTx
ensures this.currentOp == that.currentOp + 1
ensures this.currentSubOp == 0
ensures this.readSet == that.readSet
ensures this.writeSet == that.writeSet
{
currentTx := that.currentTx;
currentOp := that.currentOp + 1;
currentSubOp := 0;
readSet := that.readSet;
writeSet := that.writeSet;
}
constructor abortTx(that: ProcessState)
ensures this.currentTx == that.currentTx
ensures this.currentOp == -1
ensures this.currentSubOp == 0
ensures this.readSet == that.readSet
ensures this.writeSet == that.writeSet
{
currentTx := that.currentTx;
currentOp := -1;
currentSubOp := 0;
readSet := that.readSet;
writeSet := that.writeSet;
}
constructor restartTx(that: ProcessState)
ensures this.currentTx == that.currentTx
ensures this.currentOp == 0
ensures this.currentSubOp == 0
ensures this.readSet == map[]
ensures this.writeSet == {}
{
currentTx := that.currentTx;
currentOp := 0;
currentSubOp := 0;
readSet := map[];
writeSet := {};
}
constructor nextTx(that: ProcessState)
ensures this.currentTx == that.currentTx + 1
ensures this.currentOp == 0
ensures this.currentSubOp == 0
ensures this.readSet == map[]
ensures this.writeSet == {}
{
currentTx := that.currentTx + 1;
currentOp := 0;
currentSubOp := 0;
readSet := map[];
writeSet := {};
}
constructor addToReadSet(that: ProcessState, obj: MemoryObject, ts: TimeStamp)
ensures currentTx == that.currentTx
ensures currentOp == that.currentOp
ensures currentSubOp == that.currentSubOp
ensures readSet.Keys == that.readSet.Keys + {obj}
&& readSet[obj] == ts
&& forall o :: o in readSet && o != obj ==> readSet[o] == that.readSet[o]
ensures writeSet == that.writeSet
{
currentTx := that.currentTx;
currentOp := that.currentOp;
currentSubOp := that.currentSubOp;
readSet := that.readSet[obj := ts];
writeSet := that.writeSet;
}
constructor addToWriteSet(that: ProcessState, obj: MemoryObject)
ensures this.currentTx == that.currentTx
ensures this.currentOp == that.currentOp
ensures this.currentSubOp == that.currentSubOp
ensures this.readSet == that.readSet
ensures this.writeSet == that.writeSet + {obj}
{
currentTx := that.currentTx;
currentOp := that.currentOp;
currentSubOp := that.currentSubOp;
readSet := that.readSet;
writeSet := that.writeSet + {obj};
}
}
class TMSystem {
// Ordered list of transaction that each process should process
const txQueues : map<ProcessId, seq<Transaction>>
// State and memory of processes
const procStates : map<ProcessId, ProcessState>
// Dirty objects. (Replaces the object value in a real representation. Used for safety proof)
const dirtyObjs: set<MemoryObject>
// Object lock.
const lockedObjs: set<MemoryObject>
// Object timestamp. (Incremented at the end of any write transaction)
const objTimeStamps: map<MemoryObject, nat>
constructor (q: map<ProcessId, seq<Transaction>>) {
txQueues := q;
procStates := map[];
dirtyObjs := {};
lockedObjs := {};
objTimeStamps := map[];
}
constructor initTimestamp(that: TMSystem, obj: MemoryObject)
ensures txQueues == that.txQueues
ensures procStates == that.procStates
ensures dirtyObjs == that.dirtyObjs
ensures lockedObjs == that.lockedObjs
ensures objTimeStamps.Keys == that.objTimeStamps.Keys + {obj}
&& objTimeStamps[obj] == 0
&& forall o :: o in objTimeStamps && o != obj ==> objTimeStamps[o] == that.objTimeStamps[o]
{
txQueues := that.txQueues;
procStates := that.procStates;
dirtyObjs := that.dirtyObjs;
lockedObjs := that.lockedObjs;
objTimeStamps := that.objTimeStamps[obj := 0];
}
constructor updateState(that: TMSystem, pid: ProcessId, state: ProcessState)
ensures txQueues == that.txQueues
ensures procStates.Keys == that.procStates.Keys + {pid}
&& procStates[pid] == state
&& forall p :: p in procStates && p != pid ==> procStates[p] == that.procStates[p]
ensures dirtyObjs == that.dirtyObjs
ensures lockedObjs == that.lockedObjs
ensures objTimeStamps == that.objTimeStamps
{
txQueues := that.txQueues;
procStates := that.procStates[pid := state];
dirtyObjs := that.dirtyObjs;
lockedObjs := that.lockedObjs;
objTimeStamps := that.objTimeStamps;
}
constructor markDirty(that: TMSystem, obj: MemoryObject)
ensures txQueues == that.txQueues
ensures procStates == that.procStates
ensures dirtyObjs == that.dirtyObjs + {obj}
ensures lockedObjs == that.lockedObjs
ensures objTimeStamps == that.objTimeStamps
{
txQueues := that.txQueues;
procStates := that.procStates;
dirtyObjs := that.dirtyObjs + {obj};
lockedObjs := that.lockedObjs;
objTimeStamps := that.objTimeStamps;
}
constructor clearDirty(that: TMSystem, writeSet: set<MemoryObject>)
ensures txQueues == that.txQueues
ensures procStates == that.procStates
ensures dirtyObjs == that.dirtyObjs - writeSet
ensures lockedObjs == that.lockedObjs
ensures objTimeStamps == that.objTimeStamps
{
txQueues := that.txQueues;
procStates := that.procStates;
dirtyObjs := that.dirtyObjs - writeSet;
lockedObjs := that.lockedObjs;
objTimeStamps := that.objTimeStamps;
}
constructor acquireLock(that: TMSystem, o: MemoryObject)
ensures txQueues == that.txQueues
ensures procStates == that.procStates
ensures dirtyObjs == that.dirtyObjs
ensures lockedObjs == that.lockedObjs + {o}
ensures objTimeStamps == that.objTimeStamps
{
txQueues := that.txQueues;
procStates := that.procStates;
dirtyObjs := that.dirtyObjs;
lockedObjs := that.lockedObjs + {o};
objTimeStamps := that.objTimeStamps;
}
constructor releaseLocks(that: TMSystem, objs: set<MemoryObject>)
ensures txQueues == that.txQueues
ensures procStates == that.procStates
ensures dirtyObjs == that.dirtyObjs
ensures lockedObjs == that.lockedObjs - objs
ensures objTimeStamps == that.objTimeStamps
{
txQueues := that.txQueues;
procStates := that.procStates;
dirtyObjs := that.dirtyObjs;
lockedObjs := that.lockedObjs - objs;
objTimeStamps := that.objTimeStamps;
}
constructor updateTimestamps(that: TMSystem, objs: set<MemoryObject>)
ensures txQueues == that.txQueues
ensures procStates == that.procStates
ensures dirtyObjs == that.dirtyObjs
ensures lockedObjs == that.lockedObjs
ensures objTimeStamps.Keys == that.objTimeStamps.Keys
&& forall o :: o in that.objTimeStamps ==>
if(o in objs) then objTimeStamps[o] != that.objTimeStamps[o] else objTimeStamps[o] == that.objTimeStamps[o]
{
txQueues := that.txQueues;
procStates := that.procStates;
dirtyObjs := that.dirtyObjs;
lockedObjs := that.lockedObjs;
objTimeStamps := map o | o in that.objTimeStamps ::
if(o in objs) then (that.objTimeStamps[o] + 1) else that.objTimeStamps[o];
}
predicate stateValid(pid: ProcessId, state: ProcessState)
requires pid in procStates && state == procStates[pid]
{
&& pid in txQueues
&& state.currentTx <= |txQueues[pid]|
&& if state.currentTx == |txQueues[pid]| then (
// Queue finished
&& state.currentOp == 0
&& state.currentSubOp == 0
&& |state.readSet| == 0
&& |state.writeSet| == 0
) else if state.currentTx < |txQueues[pid]| then (
// Queue unfinished
&& exists tx :: (
&& tx == txQueues[pid][state.currentTx]
&& state.currentOp <= |tx.ops|
&& state.currentOp >= -1
&& if (state.currentOp >= 0 && state.currentOp < |tx.ops|) then (
// Read/Write operations have at most two subOps
state.currentSubOp < 2
) else if state.currentOp == |tx.ops| then (
// tryCommit has 4 subOps
state.currentSubOp < 4
) else if state.currentOp == -1 then (
// abort has 3 subOps
state.currentSubOp < 3
) else false
)
&& state.readSet.Keys <= objTimeStamps.Keys
&& state.writeSet <= lockedObjs
) else false
}
predicate validSystem()
{
&& procStates.Keys <= txQueues.Keys
&& dirtyObjs <= objTimeStamps.Keys
&& lockedObjs <= objTimeStamps.Keys
&& forall p, s :: p in procStates && s == procStates[p] ==> stateValid(p, s)
}
}
method Step(input: TMSystem, pid: ProcessId) returns (system: TMSystem)
requires pid in input.txQueues
requires pid in input.procStates
requires input.validSystem()
ensures system.validSystem()
{
system := input;
var state: ProcessState := system.procStates[pid];
var txs := system.txQueues[pid];
if (state.currentTx >= |txs|) {
// Nothing left to do.
return;
}
var tx := txs[state.currentTx];
if (state.currentOp == |tx.ops|) {
// tryCommit
if(state.currentSubOp == 0) {
// Check locks
if !(forall o :: o in state.readSet ==> o in state.writeSet || o !in system.lockedObjs) {
// Write detected (locked), aborting.
state := new ProcessState.abortTx(state);
system := new TMSystem.updateState(system, pid, state);
assume(system.validSystem()); // TODO : Remove assumption.
return;
}
// Continue to next sub-op.
state := new ProcessState.nextSubOp(state);
} else if (state.currentSubOp == 1) {
// Validate timestamps
if !(forall o :: o in state.readSet ==> state.readSet[o] == system.objTimeStamps[o]) {
// Write detected (timestamp changed), aborting.
state := new ProcessState.abortTx(state);
system := new TMSystem.updateState(system, pid, state);
assume(system.validSystem()); // TODO : Remove assumption.
return;
}
// Can (and will) commit !
// The writeset can now be read safely by others so we can remove the dirty mark.
system := new TMSystem.clearDirty(system, state.writeSet);
// Continue to next sub-op.
state := new ProcessState.nextSubOp(state);
} else if (state.currentSubOp == 2) {
// Update timestamps
system := new TMSystem.updateTimestamps(system, state.writeSet);
// Continue to next sub-op.
state := new ProcessState.nextSubOp(state);
} else if (state.currentSubOp == 3) {
// Release locks
system := new TMSystem.releaseLocks(system, state.writeSet);
// Commited. Continue to next transaction.
state := new ProcessState.nextTx(state);
} else {
}
} else if (state.currentOp == -1) {
// Abort
if(state.currentSubOp == 0) {
// Restore written values (equivalent to removing dirty marks here).
system := new TMSystem.clearDirty(system, state.writeSet);
// Continue to next sub-op.
state := new ProcessState.nextSubOp(state);
} else if (state.currentSubOp == 1) {
// Update timestamps
system := new TMSystem.updateTimestamps(system, state.writeSet);
// Continue to next sub-op.
state := new ProcessState.nextSubOp(state);
} else if (state.currentSubOp == 2) {
// Release locks
system := new TMSystem.releaseLocks(system, state.writeSet);
// Restart transaction.
state := new ProcessState.restartTx(state);
} else {
}
} else if (state.currentOp >= 0 && state.currentOp < |tx.ops|) {
// Read/Write op
var op := tx.ops[state.currentOp];
var o := op.memObject;
// Init object timestamp if not present
if(o !in system.objTimeStamps) {
system := new TMSystem.initTimestamp(system, o);
}
if(op.isWrite) {
// Write
if(state.currentSubOp == 0) {
if(!(op.memObject in state.writeSet)) {
// trylock
if(o in system.lockedObjs) {
// Failed locking, aborting.
state := new ProcessState.abortTx(state);
} else {
// Aquire lock. Continue to next sub-op.
system := new TMSystem.acquireLock(system, o);
state := new ProcessState.addToWriteSet(state, o);
state := new ProcessState.nextSubOp(state);
}
} else {
// Already in writeset, continue to next subOp.
state := new ProcessState.nextSubOp(state);
}
} else if (state.currentSubOp == 1) {
// Do the write (equivalent to marking as dirty). Continue to next op.
system := new TMSystem.markDirty(system, o);
state := new ProcessState.nextOp(state);
} else {
}
} else {
// Read operation
if(state.currentSubOp == 0) {
if(o in state.writeSet || o in state.readSet) {
// Already in writeSet or readSet, fast-skip to next op.
state := new ProcessState.nextOp(state);
} else {
// Read timestamp and add to readSet. Continue to next sub-op.
state := new ProcessState.addToReadSet(state, o, system.objTimeStamps[o]);
state := new ProcessState.nextSubOp(state);
}
} else if (state.currentSubOp == 1) {
if(o in system.lockedObjs) {
// Object is locked, aborting.
state := new ProcessState.abortTx(state);
} else {
// All good. Continue to next op.
state := new ProcessState.nextOp(state);
}
} else {
}
}
} else {
}
// Save the new state.
system := new TMSystem.updateState(system, pid, state);
assume(system.validSystem()); // TODO : Remove assumption.
}
}
|
693 | groupTheory_tmp_tmppmmxvu8h_assignment1.dfy | /*
Student name: Mark Valman
Id: 342439593
*/
/* Question/Exercise 1 of 4 */
lemma Q1_logical_equivalence_as_a_conjunction_of_two_implications__PROOF_BY_TRUTH_TABLE__in_a_comment(L: bool, R: bool)
ensures (L <==> R) <==> (L ==> R) && (!L ==> !R)
{
/*
This lemma states that logical equivalence (L <==> R) can be proved in two steps:
(1) that L implies R, and that (2) the negation of L implies the negation of R.
As can be seen here (by the curly braces "{" on line 4 and "}" below this comment), Dafny accepts this claim we no problem.
Your goal in this exercise is to use the truth tables we've learned for conjunction and negation in lecture01.dfy,
for logical implication in lecture02.dfy, and for logical equivalence (bi-directional implication) in lecture03.dfy,
to prove correctness of this claim (such that the final column will have T on each line).
See as an example for this kind of exercise the truth table in lines 13-21 of tutorial03.dfy;
there, however, the stated property was not correct (as we ended with the truth value T only on 6 of the 8 lines)
YOUR_SOLUTION_SHOULD_BE_WRITTEN_HERE (inside this comment, to the human reader, not to Dafny):
L R !L !R "L ==> R" "!L ==> !R" "(L ==> R) && (!L ==> !R)" "(L <==> R)" "(L <==> R) <==> (L ==> R) && (!L ==> !R)"
F F T T T T T T T
F T T F T F F F T
T F F T F T F F T
T T F F T T T T T
*/
}
/* Question/Exercise 2 of 4 */
lemma Q2_DistributivityOfSetUnionOverSetIntersection(A: set, B: set, C: set)
ensures A+(B*C) == (A+B)*(A+C)
/*
In this exercise you are expected to write a *full* proof for the lemma;
as an example, see the proof of "DistributivityOfSetIntersectionOverSetUnion"
starting on line 167 of lecture04.dfy and continuing on lines 3-44 of tutorial04.dfy;
note that the proof must be fully justified for the human reader,
with labels to assertions and the relevant reveal statements where needed,
as can be seen in the "Distributivity2a" lemma from the tutorial
(in contrast to the lemma "Distributivity1a" from the lecture, where we did not add labels);
in case of syntax errors, you solution will NOT be checked.
YOUR_SOLUTION_SHOULD_BE_WRITTEN_BELOW_THIS_LINE, between curly braces "{" and "}" */
{
var L,R:= A+(B*C),(A+B)*(A+C);
forall x| x in L ensures x in R
{
assert 1: x in A+(B*C);
assert 2: x in A||(x in B && x in C) by {reveal 1; }
if x in A
{
assert 3: x in A+(B*C) by {reveal 1; }
assert 4: (x in A || x in B ) && (x in A || x in C) by {reveal 3;}
assert 5: x in (A+B)*(A+C) by {reveal 4;}
}
else
{
assert 6: x in (B*C);
assert 7: (x in A || x in B ) && (x in A || x in C) by {reveal 6,3;}
assert 8: x in (A+B)*(A+C) by {reveal 7; }
assert 9: x in R by {reveal 8;}
}
}
forall x| x in R ensures x in L
{
assert 9: x in (A+B)*(A+C);
assert 10: (x in A|| x in B)&& (x in A|| x in C) by {reveal 9; }
assert 11: x in A || (x in B && x in C) by {reveal 10; }
assert 12: x in A + (B*C) by {reveal 11; }
assert 13: x in L by {reveal 12; }
}
}
/* Question/Exercise 3 of 4 */
lemma Q3_SetUnionIsAssociative(A: iset, B: iset, C: iset)
ensures (A + B) + C == A + (B + C)
/*
when taking the union of three (possibly-infinite) sets, the order of the operations does not matter;
this property is known as associativity;
this is the same in the addition of integers:
assert forall x:int, y: int, z: int :: x+(y+z) == (x+y)+z;
(whereas for sutraction it does not hold: assert 10-(4-1) == 10-3 == 7 != 5 == 6-1 == (10-4)-1;)
As in exercise 2 above, you are expected to provide a *full* proof, in Dafny, with no errors.
YOUR_SOLUTION_SHOULD_BE_WRITTEN_BELOW_THIS_LINE, between curly braces "{" and "}" */
{
var L,R := (A + B) + C, A + (B + C);
forall x | x in L ensures x in R
{
assert 1: x in (A + B) + C;
assert 2: (x in A || x in B) || x in C by {reveal 1; }
assert 3: x in A || (x in B || x in C) by {reveal 2; }
assert 4: x in A + (B + C) by {reveal 3; }
assert 5: x in R by {reveal 4; }
}
forall x | x in R ensures x in L
{
assert 6: x in A + (B + C);
assert 7: x in A || (x in B || x in C ) by {reveal 6; }
assert 8: (x in A|| x in B) || x in C by {reveal 7; }
assert 9: x in (A + B) + C by {reveal 8; }
assert 10: x in L by {reveal 9; }
}
}
/* Question/Exercise 4 of 4 */
/*
Recall from "SquareOfIntegersIsNotMonotonic" in lecture05.dfy how a lemma that returns results
can be used to disprove a claim by providing evidence for its negation;
similarly, your goal here is to choose values for A,B,C and demonstrate (using assertions or the "calc" construct)
how when performing the set difference operation twice, the order of operations DOES matter!
YOUR_SOLUTION_SHOULD_BE_WRITTEN_BELOW_THIS_LINE, between curly braces "{" and "}" */
lemma preparation_for_Q4_SetDifferenceIs_NOT_Associative()
ensures !forall A: set<int>, B: set<int>, C: set<int> :: (A - B) - C == A - (B - C)
{
assert exists A: set<int>, B: set<int>, C: set<int> :: (A - B) - C != A - (B - C) by {
var A, B, C := Q4_Evidence_That_SetDifferenceIs_NOT_Associative();
assert (A - B) - C != A - (B - C);
}
}
lemma Q4_Evidence_That_SetDifferenceIs_NOT_Associative() returns (A: set<int>, B: set<int>, C: set<int>)
ensures (A - B) - C != A - (B - C)
{
A:= {6,3,7};
B:= {1,6};
C:= {3,2,5};
assert (A - B) - C != A - (B - C);
calc
{
(A - B) - C != A - (B - C);
==
({6,3,7} - {1,6}) - {3,2,5} != {6,3,7} - ({1,6} - {3,2,5});
==
( {7} != {3,7} );
==
true;
}
}
| /*
Student name: Mark Valman
Id: 342439593
*/
/* Question/Exercise 1 of 4 */
lemma Q1_logical_equivalence_as_a_conjunction_of_two_implications__PROOF_BY_TRUTH_TABLE__in_a_comment(L: bool, R: bool)
ensures (L <==> R) <==> (L ==> R) && (!L ==> !R)
{
/*
This lemma states that logical equivalence (L <==> R) can be proved in two steps:
(1) that L implies R, and that (2) the negation of L implies the negation of R.
As can be seen here (by the curly braces "{" on line 4 and "}" below this comment), Dafny accepts this claim we no problem.
Your goal in this exercise is to use the truth tables we've learned for conjunction and negation in lecture01.dfy,
for logical implication in lecture02.dfy, and for logical equivalence (bi-directional implication) in lecture03.dfy,
to prove correctness of this claim (such that the final column will have T on each line).
See as an example for this kind of exercise the truth table in lines 13-21 of tutorial03.dfy;
there, however, the stated property was not correct (as we ended with the truth value T only on 6 of the 8 lines)
YOUR_SOLUTION_SHOULD_BE_WRITTEN_HERE (inside this comment, to the human reader, not to Dafny):
L R !L !R "L ==> R" "!L ==> !R" "(L ==> R) && (!L ==> !R)" "(L <==> R)" "(L <==> R) <==> (L ==> R) && (!L ==> !R)"
F F T T T T T T T
F T T F T F F F T
T F F T F T F F T
T T F F T T T T T
*/
}
/* Question/Exercise 2 of 4 */
lemma Q2_DistributivityOfSetUnionOverSetIntersection(A: set, B: set, C: set)
ensures A+(B*C) == (A+B)*(A+C)
/*
In this exercise you are expected to write a *full* proof for the lemma;
as an example, see the proof of "DistributivityOfSetIntersectionOverSetUnion"
starting on line 167 of lecture04.dfy and continuing on lines 3-44 of tutorial04.dfy;
note that the proof must be fully justified for the human reader,
with labels to assertions and the relevant reveal statements where needed,
as can be seen in the "Distributivity2a" lemma from the tutorial
(in contrast to the lemma "Distributivity1a" from the lecture, where we did not add labels);
in case of syntax errors, you solution will NOT be checked.
YOUR_SOLUTION_SHOULD_BE_WRITTEN_BELOW_THIS_LINE, between curly braces "{" and "}" */
{
var L,R:= A+(B*C),(A+B)*(A+C);
forall x| x in L ensures x in R
{
if x in A
{
}
else
{
}
}
forall x| x in R ensures x in L
{
}
}
/* Question/Exercise 3 of 4 */
lemma Q3_SetUnionIsAssociative(A: iset, B: iset, C: iset)
ensures (A + B) + C == A + (B + C)
/*
when taking the union of three (possibly-infinite) sets, the order of the operations does not matter;
this property is known as associativity;
this is the same in the addition of integers:
(whereas for sutraction it does not hold: assert 10-(4-1) == 10-3 == 7 != 5 == 6-1 == (10-4)-1;)
As in exercise 2 above, you are expected to provide a *full* proof, in Dafny, with no errors.
YOUR_SOLUTION_SHOULD_BE_WRITTEN_BELOW_THIS_LINE, between curly braces "{" and "}" */
{
var L,R := (A + B) + C, A + (B + C);
forall x | x in L ensures x in R
{
}
forall x | x in R ensures x in L
{
}
}
/* Question/Exercise 4 of 4 */
/*
Recall from "SquareOfIntegersIsNotMonotonic" in lecture05.dfy how a lemma that returns results
can be used to disprove a claim by providing evidence for its negation;
similarly, your goal here is to choose values for A,B,C and demonstrate (using assertions or the "calc" construct)
how when performing the set difference operation twice, the order of operations DOES matter!
YOUR_SOLUTION_SHOULD_BE_WRITTEN_BELOW_THIS_LINE, between curly braces "{" and "}" */
lemma preparation_for_Q4_SetDifferenceIs_NOT_Associative()
ensures !forall A: set<int>, B: set<int>, C: set<int> :: (A - B) - C == A - (B - C)
{
var A, B, C := Q4_Evidence_That_SetDifferenceIs_NOT_Associative();
}
}
lemma Q4_Evidence_That_SetDifferenceIs_NOT_Associative() returns (A: set<int>, B: set<int>, C: set<int>)
ensures (A - B) - C != A - (B - C)
{
A:= {6,3,7};
B:= {1,6};
C:= {3,2,5};
calc
{
(A - B) - C != A - (B - C);
==
({6,3,7} - {1,6}) - {3,2,5} != {6,3,7} - ({1,6} - {3,2,5});
==
( {7} != {3,7} );
==
true;
}
}
|
694 | groupTheory_tmp_tmppmmxvu8h_tutorial2.dfy | ghost method M1()
{
assert 1 != 3;
// assert 1 == 2;
assume 1 == 2;
assert 1 == 2;
}
lemma IntersectionIsSubsetOfBoth(A: set, B: set, C: set)
requires C == A*B
ensures C <= A && C <= B
{}
lemma BothSetsAreSubsetsOfTheirUnion(A: set, B: set, C: set)
requires C == A+B
ensures A <= C && B <= C
{}
const s0 := {3,8,1}
//var s2 := {4,5}
lemma M2()
{
var s1 := {2,4,6,8};
assert |s1| == 4;
//s0 := {4,1,2};
s1 := {};
assert |s1| == 0;
assert s1 <= s0;
}
lemma TheEmptySetIsASubsetOfAnySet(A: set, B: set)
requires A == {}
ensures A <= B // same as writing: B >= A
{}
lemma AnySetIsASubsetOfItself(A: set)
ensures A <= A
{}
lemma TheIntersectionOfTwoSetsIsASubsetOfTheirUnion(A: set, B: set, C: set, D: set)
requires C == A*B && D == A+B
ensures C <= D
{
assert C <= A by { assert C == A*B; IntersectionIsSubsetOfBoth(A, B, C); }
assert A <= D by { assert D == A+B; BothSetsAreSubsetsOfTheirUnion(A, B, D); }
}
| ghost method M1()
{
// assert 1 == 2;
assume 1 == 2;
}
lemma IntersectionIsSubsetOfBoth(A: set, B: set, C: set)
requires C == A*B
ensures C <= A && C <= B
{}
lemma BothSetsAreSubsetsOfTheirUnion(A: set, B: set, C: set)
requires C == A+B
ensures A <= C && B <= C
{}
const s0 := {3,8,1}
//var s2 := {4,5}
lemma M2()
{
var s1 := {2,4,6,8};
//s0 := {4,1,2};
s1 := {};
}
lemma TheEmptySetIsASubsetOfAnySet(A: set, B: set)
requires A == {}
ensures A <= B // same as writing: B >= A
{}
lemma AnySetIsASubsetOfItself(A: set)
ensures A <= A
{}
lemma TheIntersectionOfTwoSetsIsASubsetOfTheirUnion(A: set, B: set, C: set, D: set)
requires C == A*B && D == A+B
ensures C <= D
{
}
|
695 | groupTheory_tmp_tmppmmxvu8h_yair_yair2.dfy |
///////////////////////////
// Lemma to prove Transitive
// Got A<B, B<C.
// Prove A<C
///////////////////////////
predicate IsSubset(A: set, B: set) // <=
{
forall n :: n in A ==> n in B // same as the next line
//forall n :: if n in A then n in B else true // same as "A <= B"
}
// lemma - משפט
// subsetIsTransitive - lemma name.
// (A: set, B: set, C: set) - parameters using in lemma.
// "A" - parameter name, ": set " - parameter type (set = group).
lemma subsetIsTransitive(A: set, B: set, C: set)
// requires - הנתון/הדרישה של הטענה
// "Pre1" - label,require התוית של
// "IsSubset" - function name. "(A, B)" function parameters
requires Pre1 : IsSubset(A, B)
requires Pre2 : IsSubset(B, C)
// ensures - ״מבטיח לי״- צריך להוכיח
ensures IsSubset(A, C)
// Start of ensure - תחילת ההוכחה
{
// forall - לכל X
// "x in A" - כך שx שייך ל A,
// ensures x in C - מבטיח שX שייך לC
forall x | x in A ensures x in C {
// assert - טענה + label "3"
assert 3: x in A;
// can't just tell x<B, we prove it by "by"
// "reveal" - לחסוף. To reveal why we used this assert.
// reveal by: "3" - x in A. "Pre1" - IsSubset(A, B)
assert 4: x in B by { reveal 3, Pre1; }
assert x in C by { reveal 4, Pre2; }
}
}
|
///////////////////////////
// Lemma to prove Transitive
// Got A<B, B<C.
// Prove A<C
///////////////////////////
predicate IsSubset(A: set, B: set) // <=
{
forall n :: n in A ==> n in B // same as the next line
//forall n :: if n in A then n in B else true // same as "A <= B"
}
// lemma - משפט
// subsetIsTransitive - lemma name.
// (A: set, B: set, C: set) - parameters using in lemma.
// "A" - parameter name, ": set " - parameter type (set = group).
lemma subsetIsTransitive(A: set, B: set, C: set)
// requires - הנתון/הדרישה של הטענה
// "Pre1" - label,require התוית של
// "IsSubset" - function name. "(A, B)" function parameters
requires Pre1 : IsSubset(A, B)
requires Pre2 : IsSubset(B, C)
// ensures - ״מבטיח לי״- צריך להוכיח
ensures IsSubset(A, C)
// Start of ensure - תחילת ההוכחה
{
// forall - לכל X
// "x in A" - כך שx שייך ל A,
// ensures x in C - מבטיח שX שייך לC
forall x | x in A ensures x in C {
// assert - טענה + label "3"
// can't just tell x<B, we prove it by "by"
// "reveal" - לחסוף. To reveal why we used this assert.
// reveal by: "3" - x in A. "Pre1" - IsSubset(A, B)
}
}
|
696 | iron-sync_tmp_tmps49o3tyz_Impl_CommitterCommitModel.dfy | // include "IOModel.i.dfy"
// include "../lib/DataStructures/LinearMutableMap.i.dfy"
// module CommitterCommitModel {
// import opened NativeTypes
// import opened Options
// import opened DiskLayout
// import opened InterpretationDiskOps
// import opened ViewOp
// import JC = JournalCache
// import opened Journal
// import opened JournalBytes
// import opened DiskOpModel
// import SectorType
// import LinearMutableMap
// // import opened StateModel
// import opened IOModel
// function SyncReqs2to1Iterate(
// m: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>,
// it: LinearMutableMap.Iterator<JC.SyncReqStatus>,
// m0: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// : (m' : LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// requires LinearMutableMap.Inv(m)
// requires LinearMutableMap.WFIter(m, it)
// requires LinearMutableMap.Inv(m0)
// requires m0.contents.Keys == it.s
// ensures LinearMutableMap.Inv(m')
// decreases it.decreaser
// {
// if it.next.Done? then
// m0
// else (
// LinearMutableMap.LemmaIterIndexLtCount(m, it);
// LinearMutableMap.CountBound(m);
// SyncReqs2to1Iterate(
// m,
// LinearMutableMap.IterInc(m, it),
// LinearMutableMap.Insert(m0, it.next.key,
// (if it.next.value == JC.State2 then JC.State1 else it.next.value))
// )
// )
// }
// function {:opaque} SyncReqs2to1(m: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// : (m' : LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// requires LinearMutableMap.Inv(m)
// ensures LinearMutableMap.Inv(m')
// {
// SyncReqs2to1Iterate(m,
// LinearMutableMap.IterStart(m),
// LinearMutableMap.Constructor(128))
// }
// lemma SyncReqs2to1Correct(m: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// requires LinearMutableMap.Inv(m)
// ensures SyncReqs2to1(m).contents == JC.syncReqs2to1(m.contents)
// {
// reveal_SyncReqs2to1();
// var it := LinearMutableMap.IterStart(m);
// var m0 := LinearMutableMap.Constructor(128);
// while !it.next.Done?
// invariant LinearMutableMap.Inv(m)
// invariant LinearMutableMap.WFIter(m, it)
// invariant LinearMutableMap.Inv(m0)
// invariant m0.contents.Keys == it.s
// invariant forall id | id in it.s ::
// m0.contents[id] == (if m.contents[id] == JC.State2 then JC.State1 else m.contents[id])
// invariant SyncReqs2to1(m) == SyncReqs2to1Iterate(m, it, m0)
// decreases it.decreaser
// {
// LinearMutableMap.LemmaIterIndexLtCount(m, it);
// LinearMutableMap.CountBound(m);
// m0 := LinearMutableMap.Insert(m0, it.next.key,
// (if it.next.value == JC.State2 then JC.State1 else it.next.value));
// it := LinearMutableMap.IterInc(m, it);
// }
// }
// function SyncReqs3to2Iterate(
// m: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>,
// it: LinearMutableMap.Iterator<JC.SyncReqStatus>,
// m0: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// : (m' : LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// requires LinearMutableMap.Inv(m)
// requires LinearMutableMap.WFIter(m, it)
// requires LinearMutableMap.Inv(m0)
// requires m0.contents.Keys == it.s
// ensures LinearMutableMap.Inv(m')
// decreases it.decreaser
// {
// if it.next.Done? then
// m0
// else (
// LinearMutableMap.LemmaIterIndexLtCount(m, it);
// LinearMutableMap.CountBound(m);
// SyncReqs3to2Iterate(
// m,
// LinearMutableMap.IterInc(m, it),
// LinearMutableMap.Insert(m0, it.next.key,
// (if it.next.value == JC.State3 then JC.State2 else it.next.value))
// )
// )
// }
// function {:opaque} SyncReqs3to2(m: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// : (m' : LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// requires LinearMutableMap.Inv(m)
// ensures LinearMutableMap.Inv(m')
// {
// SyncReqs3to2Iterate(m,
// LinearMutableMap.IterStart(m),
// LinearMutableMap.Constructor(128))
// }
// lemma SyncReqs3to2Correct(m: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// requires LinearMutableMap.Inv(m)
// ensures SyncReqs3to2(m).contents == JC.syncReqs3to2(m.contents)
// {
// reveal_SyncReqs3to2();
// var it := LinearMutableMap.IterStart(m);
// var m0 := LinearMutableMap.Constructor(128);
// while !it.next.Done?
// invariant LinearMutableMap.Inv(m)
// invariant LinearMutableMap.WFIter(m, it)
// invariant LinearMutableMap.Inv(m0)
// invariant m0.contents.Keys == it.s
// invariant forall id | id in it.s ::
// m0.contents[id] == (if m.contents[id] == JC.State3 then JC.State2 else m.contents[id])
// invariant SyncReqs3to2(m) == SyncReqs3to2Iterate(m, it, m0)
// decreases it.decreaser
// {
// LinearMutableMap.LemmaIterIndexLtCount(m, it);
// LinearMutableMap.CountBound(m);
// m0 := LinearMutableMap.Insert(m0, it.next.key,
// (if it.next.value == JC.State3 then JC.State2 else it.next.value));
// it := LinearMutableMap.IterInc(m, it);
// }
// }
// function {:opaque} WriteOutJournal(cm: CM, io: IO)
// : (res : (CM, IO))
// requires io.IOInit?
// requires CommitterModel.WF(cm)
// requires JournalistModel.I(cm.journalist).inMemoryJournalFrozen != []
// || JournalistModel.I(cm.journalist).inMemoryJournal != []
// {
// var writtenJournalLen :=
// JournalistModel.getWrittenJournalLen(cm.journalist);
// var doingFrozen :=
// JournalistModel.hasFrozenJournal(cm.journalist);
// var (journalist', j) :=
// if doingFrozen then
// JournalistModel.packageFrozenJournal(cm.journalist)
// else
// JournalistModel.packageInMemoryJournal(cm.journalist);
// var start := start_pos_add(
// cm.superblock.journalStart,
// writtenJournalLen);
// var len := |j| as uint64 / 4096;
// var contiguous := start + len <= NumJournalBlocks();
// var io' := if contiguous then
// IOReqWrite(io.id, D.ReqWrite(JournalPoint(start), j))
// else (
// var cut := (NumJournalBlocks() - start) * 4096;
// IOReqWrite2(io.id, io.id2,
// D.ReqWrite(JournalPoint(start), j[..cut]),
// D.ReqWrite(JournalPoint(0), j[cut..]))
// );
// var outstandingJournalWrites' := if contiguous
// then cm.outstandingJournalWrites + {io.id}
// else cm.outstandingJournalWrites + {io.id, io.id2};
// var frozenJournalPosition' := if doingFrozen
// then JournalistModel.getWrittenJournalLen(journalist')
// else cm.frozenJournalPosition;
// var syncReqs' := if doingFrozen
// then cm.syncReqs
// else SyncReqs3to2(cm.syncReqs);
// var cm' := cm
// .(outstandingJournalWrites := outstandingJournalWrites')
// .(journalist := journalist')
// .(frozenJournalPosition := frozenJournalPosition')
// .(syncReqs := syncReqs');
// (cm', io')
// }
// lemma WriteOutJournalCorrect(cm: CM, io: IO)
// requires WriteOutJournal.requires(cm, io)
// requires cm.superblockWrite.None?
// ensures var (cm', io') := WriteOutJournal(cm, io);
// && CommitterModel.WF(cm')
// && ValidDiskOp(diskOp(io'))
// && IDiskOp(diskOp(io')).bdop.NoDiskOp?
// && JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp)
// {
// var (cm', io') := WriteOutJournal(cm, io);
// reveal_WriteOutJournal();
// var writtenJournalLen :=
// JournalistModel.getWrittenJournalLen(cm.journalist);
// var doingFrozen :=
// JournalistModel.hasFrozenJournal(cm.journalist);
// var (journalist', j) :=
// if doingFrozen then
// JournalistModel.packageFrozenJournal(cm.journalist)
// else
// JournalistModel.packageInMemoryJournal(cm.journalist);
// var start := start_pos_add(
// cm.superblock.journalStart,
// writtenJournalLen);
// var jr := JournalRangeOfByteSeq(j).value;
// var len := |j| as uint64 / 4096;
// var contiguous := start + len <= NumJournalBlocks();
// assert |jr| == len as int;
// if contiguous {
// assert LocOfReqWrite(diskOp(io').reqWrite)
// == JournalRangeLocation(start, len);
// assert ValidDiskOp(diskOp(io'));
// } else {
// assert LocOfReqWrite(diskOp(io').reqWrite1)
// == JournalRangeLocation(start, NumJournalBlocks() - start);
// assert LocOfReqWrite(diskOp(io').reqWrite2)
// == JournalRangeLocation(0, len - (NumJournalBlocks() - start));
// JournalBytesSplit(j, len as int,
// NumJournalBlocks() as int - start as int);
// assert ValidDiskOp(diskOp(io'));
// }
// SyncReqs3to2Correct(cm.syncReqs);
// assert JC.WriteBackJournalReq(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp,
// jr);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp,
// JC.WriteBackJournalReqStep(jr));
// }
// predicate writeOutSuperblockAdvanceLog(cm: CM, io: IO,
// cm': CM, io': IO)
// requires io.IOInit?
// requires CommitterModel.WF(cm)
// {
// var writtenJournalLen :=
// JournalistModel.getWrittenJournalLen(cm.journalist);
// var newSuperblock := SectorType.Superblock(
// JC.IncrementSuperblockCounter(cm.superblock.counter),
// cm.superblock.journalStart,
// writtenJournalLen,
// cm.superblock.indirectionTableLoc
// );
// var loc := if cm.whichSuperblock == 0 then Superblock2Location() else Superblock1Location();
// && cm'.superblockWrite.Some?
// && var id := cm'.superblockWrite.value;
// && RequestWrite(io, loc, SSM.SectorSuperblock(newSuperblock),
// id, io')
// && cm' == cm
// .(newSuperblock := Some(newSuperblock))
// .(superblockWrite := Some(id))
// .(commitStatus := JC.CommitAdvanceLog)
// }
// lemma writeOutSuperblockAdvanceLogCorrect(cm: CM, io: IO,
// cm': CM, io': IO)
// requires io.IOInit?
// requires CommitterModel.WF(cm)
// requires writeOutSuperblockAdvanceLog(cm, io, cm', io')
// requires cm.status == StatusReady
// requires cm.commitStatus.CommitNone?
// requires cm.outstandingJournalWrites == {}
// requires JournalistModel.I(cm.journalist).inMemoryJournalFrozen == []
// ensures CommitterModel.WF(cm')
// ensures ValidDiskOp(diskOp(io'))
// ensures IDiskOp(diskOp(io')).bdop.NoDiskOp?
// ensures JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp)
// {
// var writtenJournalLen :=
// JournalistModel.getWrittenJournalLen(cm.journalist);
// var newSuperblock := SectorType.Superblock(
// JC.IncrementSuperblockCounter(cm.superblock.counter),
// cm.superblock.journalStart,
// writtenJournalLen,
// cm.superblock.indirectionTableLoc
// );
// assert JC.WFSuperblock(newSuperblock);
// var loc := if cm.whichSuperblock == 0 then Superblock2Location() else Superblock1Location();
// var id := cm'.superblockWrite.value;
// RequestWriteCorrect(io, loc, SSM.SectorSuperblock(newSuperblock),
// id, io');
// assert ValidDiskOp(diskOp(io'));
// assert JC.WriteBackSuperblockReq_AdvanceLog(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp,
// JC.WriteBackSuperblockReq_AdvanceLog_Step);
// }
// predicate {:opaque} writeOutSuperblockAdvanceLocation(cm: CM, io: IO,
// cm': CM, io': IO)
// requires io.IOInit?
// requires CommitterModel.Inv(cm)
// requires cm.status == StatusReady
// requires cm.frozenLoc.Some?
// {
// var writtenJournalLen :=
// JournalistModel.getWrittenJournalLen(cm.journalist);
// var newSuperblock := SectorType.Superblock(
// JC.IncrementSuperblockCounter(cm.superblock.counter),
// start_pos_add(
// cm.superblock.journalStart,
// cm.frozenJournalPosition),
// writtenJournalLen - cm.frozenJournalPosition,
// cm.frozenLoc.value
// );
// var loc := if cm.whichSuperblock == 0 then Superblock2Location() else Superblock1Location();
// && cm'.superblockWrite.Some?
// && var id := cm'.superblockWrite.value;
// && RequestWrite(io, loc, SSM.SectorSuperblock(newSuperblock),
// id, io')
// && cm' == cm
// .(newSuperblock := Some(newSuperblock))
// .(superblockWrite := Some(id))
// .(commitStatus := JC.CommitAdvanceLocation)
// }
// lemma writeOutSuperblockAdvanceLocationCorrect(cm: CM, io: IO,
// cm': CM, io': IO)
// requires io.IOInit?
// requires CommitterModel.Inv(cm)
// requires cm.status == StatusReady
// requires cm.frozenLoc.Some?
// requires cm.commitStatus.CommitNone?
// requires cm.outstandingJournalWrites == {}
// requires writeOutSuperblockAdvanceLocation(cm, io, cm', io')
// requires JournalistModel.I(cm.journalist).inMemoryJournalFrozen == []
// ensures CommitterModel.WF(cm')
// ensures ValidDiskOp(diskOp(io'))
// ensures IDiskOp(diskOp(io')).bdop.NoDiskOp?
// ensures JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp)
// {
// reveal_writeOutSuperblockAdvanceLocation();
// var writtenJournalLen :=
// JournalistModel.getWrittenJournalLen(cm.journalist);
// var newSuperblock := SectorType.Superblock(
// JC.IncrementSuperblockCounter(cm.superblock.counter),
// start_pos_add(
// cm.superblock.journalStart,
// cm.frozenJournalPosition) as uint64,
// (writtenJournalLen - cm.frozenJournalPosition) as uint64,
// cm.frozenLoc.value
// );
// assert JC.WFSuperblock(newSuperblock);
// var loc := if cm.whichSuperblock == 0 then Superblock2Location() else Superblock1Location();
// var id := cm'.superblockWrite.value;
// RequestWriteCorrect(io, loc, SSM.SectorSuperblock(newSuperblock),
// id, io');
// assert ValidDiskOp(diskOp(io'));
// assert JC.WriteBackSuperblockReq_AdvanceLocation(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp,
// JC.WriteBackSuperblockReq_AdvanceLocation_Step);
// }
// function {:opaque} freeze(cm: CM) : (cm': CM)
// requires CommitterModel.WF(cm)
// {
// var writtenJournalLen :=
// JournalistModel.getWrittenJournalLen(cm.journalist);
// cm.(frozenLoc := None)
// .(journalist := JournalistModel.freeze(cm.journalist))
// .(frozenJournalPosition := writtenJournalLen)
// .(isFrozen := true)
// .(syncReqs := SyncReqs3to2(cm.syncReqs))
// }
// lemma freezeCorrect(cm: CM)
// requires CommitterModel.WF(cm)
// requires cm.superblockWrite.None?
// // Mostly we'll probably just do this with cm.frozenLoc == None
// // but more generally we can do it whenever we have:
// requires cm.status == StatusReady
// requires cm.frozenLoc != Some(cm.superblock.indirectionTableLoc)
// requires JournalistModel.I(cm.journalist).replayJournal == []
// ensures var cm' := freeze(cm);
// && CommitterModel.WF(cm')
// && JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// FreezeOp)
// {
// reveal_freeze();
// var cm' := freeze(cm);
// SyncReqs3to2Correct(cm.syncReqs);
// assert JC.Freeze(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// FreezeOp);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// FreezeOp,
// JC.FreezeStep);
// }
// function {:opaque} receiveFrozenLoc(
// cm: CM, loc: Location) : (cm': CM)
// {
// cm.(frozenLoc := Some(loc))
// }
// lemma receiveFrozenLocCorrect(cm: CM, loc: Location)
// requires CommitterModel.WF(cm)
// requires cm.status == StatusReady
// requires cm.isFrozen
// requires !cm.frozenLoc.Some?
// requires ValidIndirectionTableLocation(loc)
// ensures var cm' := receiveFrozenLoc(cm, loc);
// && CommitterModel.WF(cm')
// && JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// SendFrozenLocOp(loc))
// {
// reveal_receiveFrozenLoc();
// var cm' := receiveFrozenLoc(cm, loc);
// assert JC.ReceiveFrozenLoc(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// SendFrozenLocOp(loc));
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// SendFrozenLocOp(loc),
// JC.ReceiveFrozenLocStep);
// }
// // == pushSync ==
// function {:opaque} freeId<A>(syncReqs: LinearMutableMap.LinearHashMap<A>) : (id: uint64)
// requires LinearMutableMap.Inv(syncReqs)
// ensures id != 0 ==> id !in syncReqs.contents
// {
// var maxId := LinearMutableMap.MaxKey(syncReqs);
// if maxId == 0xffff_ffff_ffff_ffff then (
// 0
// ) else (
// maxId + 1
// )
// }
// function pushSync(cm: CM) : (CM, uint64)
// requires CommitterModel.WF(cm)
// {
// var id := freeId(cm.syncReqs);
// if id == 0 || cm.syncReqs.count as int >= 0x1_0000_0000_0000_0000 / 8 then (
// (cm, 0)
// ) else (
// var cm' := cm.(syncReqs := LinearMutableMap.Insert(cm.syncReqs, id, JC.State3));
// (cm', id)
// )
// }
// lemma pushSyncCorrect(cm: CM)
// requires CommitterModel.WF(cm)
// ensures var (cm', id) := pushSync(cm);
// && CommitterModel.WF(cm')
// && JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// if id == 0 then JournalInternalOp else PushSyncOp(id as int))
// {
// var (cm', id) := pushSync(cm);
// if id == 0 || cm.syncReqs.count as int >= 0x1_0000_0000_0000_0000 / 8 {
// assert JC.NoOp(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// JournalInternalOp);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// JournalInternalOp,
// JC.NoOpStep);
// } else {
// assert JC.PushSyncReq(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// PushSyncOp(id as int), id);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// PushSyncOp(id as int),
// JC.PushSyncReqStep(id));
// }
// }
// // == popSync ==
// function {:opaque} popSync(cm: CM, id: uint64) : (cm' : CM)
// requires CommitterModel.WF(cm)
// {
// cm.(syncReqs := LinearMutableMap.Remove(cm.syncReqs, id))
// }
// lemma popSyncCorrect(cm: CM, id: uint64)
// requires CommitterModel.WF(cm)
// requires id in cm.syncReqs.contents
// requires cm.syncReqs.contents[id] == JC.State1
// ensures var cm' := popSync(cm, id);
// && CommitterModel.WF(cm')
// && JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// PopSyncOp(id as int))
// {
// var cm' := popSync(cm, id);
// reveal_popSync();
// assert JC.PopSyncReq(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// PopSyncOp(id as int), id);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// PopSyncOp(id as int),
// JC.PopSyncReqStep(id));
// }
// // == AdvanceLog ==
// predicate {:opaque} tryAdvanceLog(cm: CM, io: IO,
// cm': CM, io': IO)
// requires CommitterModel.WF(cm)
// requires io.IOInit?
// {
// var hasFrozen := JournalistModel.hasFrozenJournal(cm.journalist);
// var hasInMem := JournalistModel.hasInMemoryJournal(cm.journalist);
// if cm.superblockWrite.None? then (
// if hasFrozen || hasInMem then (
// (cm', io') == WriteOutJournal(cm, io)
// ) else if cm.outstandingJournalWrites == {} then (
// writeOutSuperblockAdvanceLog(cm, io, cm', io')
// ) else (
// && cm' == cm
// && io' == io
// )
// ) else (
// && cm' == cm
// && io' == io
// )
// }
// lemma tryAdvanceLogCorrect(cm: CM, io: IO,
// cm': CM, io': IO)
// requires CommitterModel.Inv(cm)
// requires io.IOInit?
// requires cm.status.StatusReady?
// requires tryAdvanceLog(cm, io, cm', io')
// ensures CommitterModel.WF(cm')
// ensures ValidDiskOp(diskOp(io'))
// ensures IDiskOp(diskOp(io')).bdop.NoDiskOp?
// ensures JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp)
// {
// reveal_tryAdvanceLog();
// var hasFrozen := JournalistModel.hasFrozenJournal(cm.journalist);
// var hasInMem := JournalistModel.hasInMemoryJournal(cm.journalist);
// if cm.superblockWrite.None? {
// if hasFrozen || hasInMem {
// WriteOutJournalCorrect(cm, io);
// } else if (cm.outstandingJournalWrites == {}) {
// writeOutSuperblockAdvanceLogCorrect(cm, io, cm', io');
// } else {
// assert JC.NoOp( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp);
// assert JC.NextStep( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp, JC.NoOpStep);
// }
// } else {
// assert JC.NoOp( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp);
// assert JC.NextStep( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp, JC.NoOpStep);
// }
// }
// predicate {:opaque} tryAdvanceLocation(cm: CM, io: IO,
// cm': CM, io': IO)
// requires CommitterModel.Inv(cm)
// requires io.IOInit?
// requires cm.status == StatusReady
// requires cm.frozenLoc.Some?
// {
// var hasFrozen := JournalistModel.hasFrozenJournal(cm.journalist);
// var hasInMem := JournalistModel.hasInMemoryJournal(cm.journalist);
// if cm.superblockWrite.None? then (
// if hasFrozen || hasInMem then (
// (cm', io') == WriteOutJournal(cm, io)
// ) else if cm.outstandingJournalWrites == {} then (
// writeOutSuperblockAdvanceLocation(cm, io, cm', io')
// ) else (
// && cm' == cm
// && io' == io
// )
// ) else (
// && cm' == cm
// && io' == io
// )
// }
// lemma tryAdvanceLocationCorrect(cm: CM, io: IO,
// cm': CM, io': IO)
// requires CommitterModel.Inv(cm)
// requires io.IOInit?
// requires cm.status.StatusReady?
// requires cm.frozenLoc.Some?
// requires tryAdvanceLocation(cm, io, cm', io')
// ensures CommitterModel.WF(cm')
// ensures ValidDiskOp(diskOp(io'))
// ensures IDiskOp(diskOp(io')).bdop.NoDiskOp?
// ensures JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp)
// {
// reveal_tryAdvanceLocation();
// var hasFrozen := JournalistModel.hasFrozenJournal(cm.journalist);
// var hasInMem := JournalistModel.hasInMemoryJournal(cm.journalist);
// if cm.superblockWrite.None? {
// if hasFrozen || hasInMem {
// WriteOutJournalCorrect(cm, io);
// } else if (cm.outstandingJournalWrites == {}) {
// writeOutSuperblockAdvanceLocationCorrect(cm, io, cm', io');
// } else {
// assert JC.NoOp( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp);
// assert JC.NextStep( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp, JC.NoOpStep);
// }
// } else {
// assert JC.NoOp( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp);
// assert JC.NextStep( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp, JC.NoOpStep);
// }
// }
// function {:opaque} writeBackSuperblockResp(
// cm: CommitterModel.CM) : CommitterModel.CM
// requires CommitterModel.Inv(cm)
// {
// if cm.status.StatusReady? &&
// cm.commitStatus.CommitAdvanceLocation? then (
// cm.(superblockWrite := None)
// .(superblock := cm.newSuperblock.value)
// .(newSuperblock := None)
// .(whichSuperblock := if cm.whichSuperblock == 0 then 1 else 0)
// .(syncReqs := SyncReqs2to1(cm.syncReqs))
// .(journalist :=
// JournalistModel.updateWrittenJournalLen(
// cm.journalist,
// JournalistModel.getWrittenJournalLen(cm.journalist)
// - cm.frozenJournalPosition
// )
// )
// .(frozenJournalPosition := 0)
// .(frozenLoc := None)
// .(isFrozen := false)
// .(commitStatus := JC.CommitNone)
// )
// else if cm.status.StatusReady? &&
// cm.commitStatus.CommitAdvanceLog? then (
// cm.(superblockWrite := None)
// .(superblock := cm.newSuperblock.value)
// .(newSuperblock := None)
// .(whichSuperblock := if cm.whichSuperblock == 0 then 1 else 0)
// .(syncReqs := SyncReqs2to1(cm.syncReqs))
// .(commitStatus := JC.CommitNone)
// )
// else (
// cm
// )
// }
// lemma writeBackSuperblockRespCorrect(
// cm: CommitterModel.CM, io: IO)
// requires CommitterModel.Inv(cm)
// requires ValidDiskOp(diskOp(io))
// requires IDiskOp(diskOp(io)).jdop.RespWriteSuperblockOp?
// requires Some(io.id) == cm.superblockWrite
// ensures var cm' := writeBackSuperblockResp(cm);
// && CommitterModel.WF(cm')
// && JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io)).jdop,
// if cm.status.StatusReady? && cm.commitStatus.CommitAdvanceLocation? then CleanUpOp else JournalInternalOp
// )
// {
// reveal_writeBackSuperblockResp();
// var cm' := writeBackSuperblockResp(cm);
// SyncReqs2to1Correct(cm.syncReqs);
// if cm.status.StatusReady? &&
// cm.commitStatus.CommitAdvanceLocation? {
// assert JC.WriteBackSuperblockResp(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io)).jdop,
// CleanUpOp);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io)).jdop,
// CleanUpOp,
// JC.WriteBackSuperblockRespStep);
// }
// else if cm.status.StatusReady? &&
// cm.commitStatus.CommitAdvanceLog? {
// assert JC.WriteBackSuperblockResp(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io)).jdop,
// JournalInternalOp);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io)).jdop,
// JournalInternalOp,
// JC.WriteBackSuperblockRespStep);
// }
// else {
// assert JC.NoOp(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io)).jdop,
// JournalInternalOp);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io)).jdop,
// JournalInternalOp,
// JC.NoOpStep);
// }
// }
// }
| // include "IOModel.i.dfy"
// include "../lib/DataStructures/LinearMutableMap.i.dfy"
// module CommitterCommitModel {
// import opened NativeTypes
// import opened Options
// import opened DiskLayout
// import opened InterpretationDiskOps
// import opened ViewOp
// import JC = JournalCache
// import opened Journal
// import opened JournalBytes
// import opened DiskOpModel
// import SectorType
// import LinearMutableMap
// // import opened StateModel
// import opened IOModel
// function SyncReqs2to1Iterate(
// m: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>,
// it: LinearMutableMap.Iterator<JC.SyncReqStatus>,
// m0: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// : (m' : LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// requires LinearMutableMap.Inv(m)
// requires LinearMutableMap.WFIter(m, it)
// requires LinearMutableMap.Inv(m0)
// requires m0.contents.Keys == it.s
// ensures LinearMutableMap.Inv(m')
// decreases it.decreaser
// {
// if it.next.Done? then
// m0
// else (
// LinearMutableMap.LemmaIterIndexLtCount(m, it);
// LinearMutableMap.CountBound(m);
// SyncReqs2to1Iterate(
// m,
// LinearMutableMap.IterInc(m, it),
// LinearMutableMap.Insert(m0, it.next.key,
// (if it.next.value == JC.State2 then JC.State1 else it.next.value))
// )
// )
// }
// function {:opaque} SyncReqs2to1(m: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// : (m' : LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// requires LinearMutableMap.Inv(m)
// ensures LinearMutableMap.Inv(m')
// {
// SyncReqs2to1Iterate(m,
// LinearMutableMap.IterStart(m),
// LinearMutableMap.Constructor(128))
// }
// lemma SyncReqs2to1Correct(m: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// requires LinearMutableMap.Inv(m)
// ensures SyncReqs2to1(m).contents == JC.syncReqs2to1(m.contents)
// {
// reveal_SyncReqs2to1();
// var it := LinearMutableMap.IterStart(m);
// var m0 := LinearMutableMap.Constructor(128);
// while !it.next.Done?
// invariant LinearMutableMap.Inv(m)
// invariant LinearMutableMap.WFIter(m, it)
// invariant LinearMutableMap.Inv(m0)
// invariant m0.contents.Keys == it.s
// invariant forall id | id in it.s ::
// m0.contents[id] == (if m.contents[id] == JC.State2 then JC.State1 else m.contents[id])
// invariant SyncReqs2to1(m) == SyncReqs2to1Iterate(m, it, m0)
// decreases it.decreaser
// {
// LinearMutableMap.LemmaIterIndexLtCount(m, it);
// LinearMutableMap.CountBound(m);
// m0 := LinearMutableMap.Insert(m0, it.next.key,
// (if it.next.value == JC.State2 then JC.State1 else it.next.value));
// it := LinearMutableMap.IterInc(m, it);
// }
// }
// function SyncReqs3to2Iterate(
// m: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>,
// it: LinearMutableMap.Iterator<JC.SyncReqStatus>,
// m0: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// : (m' : LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// requires LinearMutableMap.Inv(m)
// requires LinearMutableMap.WFIter(m, it)
// requires LinearMutableMap.Inv(m0)
// requires m0.contents.Keys == it.s
// ensures LinearMutableMap.Inv(m')
// decreases it.decreaser
// {
// if it.next.Done? then
// m0
// else (
// LinearMutableMap.LemmaIterIndexLtCount(m, it);
// LinearMutableMap.CountBound(m);
// SyncReqs3to2Iterate(
// m,
// LinearMutableMap.IterInc(m, it),
// LinearMutableMap.Insert(m0, it.next.key,
// (if it.next.value == JC.State3 then JC.State2 else it.next.value))
// )
// )
// }
// function {:opaque} SyncReqs3to2(m: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// : (m' : LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// requires LinearMutableMap.Inv(m)
// ensures LinearMutableMap.Inv(m')
// {
// SyncReqs3to2Iterate(m,
// LinearMutableMap.IterStart(m),
// LinearMutableMap.Constructor(128))
// }
// lemma SyncReqs3to2Correct(m: LinearMutableMap.LinearHashMap<JC.SyncReqStatus>)
// requires LinearMutableMap.Inv(m)
// ensures SyncReqs3to2(m).contents == JC.syncReqs3to2(m.contents)
// {
// reveal_SyncReqs3to2();
// var it := LinearMutableMap.IterStart(m);
// var m0 := LinearMutableMap.Constructor(128);
// while !it.next.Done?
// invariant LinearMutableMap.Inv(m)
// invariant LinearMutableMap.WFIter(m, it)
// invariant LinearMutableMap.Inv(m0)
// invariant m0.contents.Keys == it.s
// invariant forall id | id in it.s ::
// m0.contents[id] == (if m.contents[id] == JC.State3 then JC.State2 else m.contents[id])
// invariant SyncReqs3to2(m) == SyncReqs3to2Iterate(m, it, m0)
// decreases it.decreaser
// {
// LinearMutableMap.LemmaIterIndexLtCount(m, it);
// LinearMutableMap.CountBound(m);
// m0 := LinearMutableMap.Insert(m0, it.next.key,
// (if it.next.value == JC.State3 then JC.State2 else it.next.value));
// it := LinearMutableMap.IterInc(m, it);
// }
// }
// function {:opaque} WriteOutJournal(cm: CM, io: IO)
// : (res : (CM, IO))
// requires io.IOInit?
// requires CommitterModel.WF(cm)
// requires JournalistModel.I(cm.journalist).inMemoryJournalFrozen != []
// || JournalistModel.I(cm.journalist).inMemoryJournal != []
// {
// var writtenJournalLen :=
// JournalistModel.getWrittenJournalLen(cm.journalist);
// var doingFrozen :=
// JournalistModel.hasFrozenJournal(cm.journalist);
// var (journalist', j) :=
// if doingFrozen then
// JournalistModel.packageFrozenJournal(cm.journalist)
// else
// JournalistModel.packageInMemoryJournal(cm.journalist);
// var start := start_pos_add(
// cm.superblock.journalStart,
// writtenJournalLen);
// var len := |j| as uint64 / 4096;
// var contiguous := start + len <= NumJournalBlocks();
// var io' := if contiguous then
// IOReqWrite(io.id, D.ReqWrite(JournalPoint(start), j))
// else (
// var cut := (NumJournalBlocks() - start) * 4096;
// IOReqWrite2(io.id, io.id2,
// D.ReqWrite(JournalPoint(start), j[..cut]),
// D.ReqWrite(JournalPoint(0), j[cut..]))
// );
// var outstandingJournalWrites' := if contiguous
// then cm.outstandingJournalWrites + {io.id}
// else cm.outstandingJournalWrites + {io.id, io.id2};
// var frozenJournalPosition' := if doingFrozen
// then JournalistModel.getWrittenJournalLen(journalist')
// else cm.frozenJournalPosition;
// var syncReqs' := if doingFrozen
// then cm.syncReqs
// else SyncReqs3to2(cm.syncReqs);
// var cm' := cm
// .(outstandingJournalWrites := outstandingJournalWrites')
// .(journalist := journalist')
// .(frozenJournalPosition := frozenJournalPosition')
// .(syncReqs := syncReqs');
// (cm', io')
// }
// lemma WriteOutJournalCorrect(cm: CM, io: IO)
// requires WriteOutJournal.requires(cm, io)
// requires cm.superblockWrite.None?
// ensures var (cm', io') := WriteOutJournal(cm, io);
// && CommitterModel.WF(cm')
// && ValidDiskOp(diskOp(io'))
// && IDiskOp(diskOp(io')).bdop.NoDiskOp?
// && JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp)
// {
// var (cm', io') := WriteOutJournal(cm, io);
// reveal_WriteOutJournal();
// var writtenJournalLen :=
// JournalistModel.getWrittenJournalLen(cm.journalist);
// var doingFrozen :=
// JournalistModel.hasFrozenJournal(cm.journalist);
// var (journalist', j) :=
// if doingFrozen then
// JournalistModel.packageFrozenJournal(cm.journalist)
// else
// JournalistModel.packageInMemoryJournal(cm.journalist);
// var start := start_pos_add(
// cm.superblock.journalStart,
// writtenJournalLen);
// var jr := JournalRangeOfByteSeq(j).value;
// var len := |j| as uint64 / 4096;
// var contiguous := start + len <= NumJournalBlocks();
// assert |jr| == len as int;
// if contiguous {
// assert LocOfReqWrite(diskOp(io').reqWrite)
// == JournalRangeLocation(start, len);
// assert ValidDiskOp(diskOp(io'));
// } else {
// assert LocOfReqWrite(diskOp(io').reqWrite1)
// == JournalRangeLocation(start, NumJournalBlocks() - start);
// assert LocOfReqWrite(diskOp(io').reqWrite2)
// == JournalRangeLocation(0, len - (NumJournalBlocks() - start));
// JournalBytesSplit(j, len as int,
// NumJournalBlocks() as int - start as int);
// assert ValidDiskOp(diskOp(io'));
// }
// SyncReqs3to2Correct(cm.syncReqs);
// assert JC.WriteBackJournalReq(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp,
// jr);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp,
// JC.WriteBackJournalReqStep(jr));
// }
// predicate writeOutSuperblockAdvanceLog(cm: CM, io: IO,
// cm': CM, io': IO)
// requires io.IOInit?
// requires CommitterModel.WF(cm)
// {
// var writtenJournalLen :=
// JournalistModel.getWrittenJournalLen(cm.journalist);
// var newSuperblock := SectorType.Superblock(
// JC.IncrementSuperblockCounter(cm.superblock.counter),
// cm.superblock.journalStart,
// writtenJournalLen,
// cm.superblock.indirectionTableLoc
// );
// var loc := if cm.whichSuperblock == 0 then Superblock2Location() else Superblock1Location();
// && cm'.superblockWrite.Some?
// && var id := cm'.superblockWrite.value;
// && RequestWrite(io, loc, SSM.SectorSuperblock(newSuperblock),
// id, io')
// && cm' == cm
// .(newSuperblock := Some(newSuperblock))
// .(superblockWrite := Some(id))
// .(commitStatus := JC.CommitAdvanceLog)
// }
// lemma writeOutSuperblockAdvanceLogCorrect(cm: CM, io: IO,
// cm': CM, io': IO)
// requires io.IOInit?
// requires CommitterModel.WF(cm)
// requires writeOutSuperblockAdvanceLog(cm, io, cm', io')
// requires cm.status == StatusReady
// requires cm.commitStatus.CommitNone?
// requires cm.outstandingJournalWrites == {}
// requires JournalistModel.I(cm.journalist).inMemoryJournalFrozen == []
// ensures CommitterModel.WF(cm')
// ensures ValidDiskOp(diskOp(io'))
// ensures IDiskOp(diskOp(io')).bdop.NoDiskOp?
// ensures JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp)
// {
// var writtenJournalLen :=
// JournalistModel.getWrittenJournalLen(cm.journalist);
// var newSuperblock := SectorType.Superblock(
// JC.IncrementSuperblockCounter(cm.superblock.counter),
// cm.superblock.journalStart,
// writtenJournalLen,
// cm.superblock.indirectionTableLoc
// );
// assert JC.WFSuperblock(newSuperblock);
// var loc := if cm.whichSuperblock == 0 then Superblock2Location() else Superblock1Location();
// var id := cm'.superblockWrite.value;
// RequestWriteCorrect(io, loc, SSM.SectorSuperblock(newSuperblock),
// id, io');
// assert ValidDiskOp(diskOp(io'));
// assert JC.WriteBackSuperblockReq_AdvanceLog(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp,
// JC.WriteBackSuperblockReq_AdvanceLog_Step);
// }
// predicate {:opaque} writeOutSuperblockAdvanceLocation(cm: CM, io: IO,
// cm': CM, io': IO)
// requires io.IOInit?
// requires CommitterModel.Inv(cm)
// requires cm.status == StatusReady
// requires cm.frozenLoc.Some?
// {
// var writtenJournalLen :=
// JournalistModel.getWrittenJournalLen(cm.journalist);
// var newSuperblock := SectorType.Superblock(
// JC.IncrementSuperblockCounter(cm.superblock.counter),
// start_pos_add(
// cm.superblock.journalStart,
// cm.frozenJournalPosition),
// writtenJournalLen - cm.frozenJournalPosition,
// cm.frozenLoc.value
// );
// var loc := if cm.whichSuperblock == 0 then Superblock2Location() else Superblock1Location();
// && cm'.superblockWrite.Some?
// && var id := cm'.superblockWrite.value;
// && RequestWrite(io, loc, SSM.SectorSuperblock(newSuperblock),
// id, io')
// && cm' == cm
// .(newSuperblock := Some(newSuperblock))
// .(superblockWrite := Some(id))
// .(commitStatus := JC.CommitAdvanceLocation)
// }
// lemma writeOutSuperblockAdvanceLocationCorrect(cm: CM, io: IO,
// cm': CM, io': IO)
// requires io.IOInit?
// requires CommitterModel.Inv(cm)
// requires cm.status == StatusReady
// requires cm.frozenLoc.Some?
// requires cm.commitStatus.CommitNone?
// requires cm.outstandingJournalWrites == {}
// requires writeOutSuperblockAdvanceLocation(cm, io, cm', io')
// requires JournalistModel.I(cm.journalist).inMemoryJournalFrozen == []
// ensures CommitterModel.WF(cm')
// ensures ValidDiskOp(diskOp(io'))
// ensures IDiskOp(diskOp(io')).bdop.NoDiskOp?
// ensures JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp)
// {
// reveal_writeOutSuperblockAdvanceLocation();
// var writtenJournalLen :=
// JournalistModel.getWrittenJournalLen(cm.journalist);
// var newSuperblock := SectorType.Superblock(
// JC.IncrementSuperblockCounter(cm.superblock.counter),
// start_pos_add(
// cm.superblock.journalStart,
// cm.frozenJournalPosition) as uint64,
// (writtenJournalLen - cm.frozenJournalPosition) as uint64,
// cm.frozenLoc.value
// );
// assert JC.WFSuperblock(newSuperblock);
// var loc := if cm.whichSuperblock == 0 then Superblock2Location() else Superblock1Location();
// var id := cm'.superblockWrite.value;
// RequestWriteCorrect(io, loc, SSM.SectorSuperblock(newSuperblock),
// id, io');
// assert ValidDiskOp(diskOp(io'));
// assert JC.WriteBackSuperblockReq_AdvanceLocation(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp,
// JC.WriteBackSuperblockReq_AdvanceLocation_Step);
// }
// function {:opaque} freeze(cm: CM) : (cm': CM)
// requires CommitterModel.WF(cm)
// {
// var writtenJournalLen :=
// JournalistModel.getWrittenJournalLen(cm.journalist);
// cm.(frozenLoc := None)
// .(journalist := JournalistModel.freeze(cm.journalist))
// .(frozenJournalPosition := writtenJournalLen)
// .(isFrozen := true)
// .(syncReqs := SyncReqs3to2(cm.syncReqs))
// }
// lemma freezeCorrect(cm: CM)
// requires CommitterModel.WF(cm)
// requires cm.superblockWrite.None?
// // Mostly we'll probably just do this with cm.frozenLoc == None
// // but more generally we can do it whenever we have:
// requires cm.status == StatusReady
// requires cm.frozenLoc != Some(cm.superblock.indirectionTableLoc)
// requires JournalistModel.I(cm.journalist).replayJournal == []
// ensures var cm' := freeze(cm);
// && CommitterModel.WF(cm')
// && JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// FreezeOp)
// {
// reveal_freeze();
// var cm' := freeze(cm);
// SyncReqs3to2Correct(cm.syncReqs);
// assert JC.Freeze(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// FreezeOp);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// FreezeOp,
// JC.FreezeStep);
// }
// function {:opaque} receiveFrozenLoc(
// cm: CM, loc: Location) : (cm': CM)
// {
// cm.(frozenLoc := Some(loc))
// }
// lemma receiveFrozenLocCorrect(cm: CM, loc: Location)
// requires CommitterModel.WF(cm)
// requires cm.status == StatusReady
// requires cm.isFrozen
// requires !cm.frozenLoc.Some?
// requires ValidIndirectionTableLocation(loc)
// ensures var cm' := receiveFrozenLoc(cm, loc);
// && CommitterModel.WF(cm')
// && JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// SendFrozenLocOp(loc))
// {
// reveal_receiveFrozenLoc();
// var cm' := receiveFrozenLoc(cm, loc);
// assert JC.ReceiveFrozenLoc(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// SendFrozenLocOp(loc));
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// SendFrozenLocOp(loc),
// JC.ReceiveFrozenLocStep);
// }
// // == pushSync ==
// function {:opaque} freeId<A>(syncReqs: LinearMutableMap.LinearHashMap<A>) : (id: uint64)
// requires LinearMutableMap.Inv(syncReqs)
// ensures id != 0 ==> id !in syncReqs.contents
// {
// var maxId := LinearMutableMap.MaxKey(syncReqs);
// if maxId == 0xffff_ffff_ffff_ffff then (
// 0
// ) else (
// maxId + 1
// )
// }
// function pushSync(cm: CM) : (CM, uint64)
// requires CommitterModel.WF(cm)
// {
// var id := freeId(cm.syncReqs);
// if id == 0 || cm.syncReqs.count as int >= 0x1_0000_0000_0000_0000 / 8 then (
// (cm, 0)
// ) else (
// var cm' := cm.(syncReqs := LinearMutableMap.Insert(cm.syncReqs, id, JC.State3));
// (cm', id)
// )
// }
// lemma pushSyncCorrect(cm: CM)
// requires CommitterModel.WF(cm)
// ensures var (cm', id) := pushSync(cm);
// && CommitterModel.WF(cm')
// && JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// if id == 0 then JournalInternalOp else PushSyncOp(id as int))
// {
// var (cm', id) := pushSync(cm);
// if id == 0 || cm.syncReqs.count as int >= 0x1_0000_0000_0000_0000 / 8 {
// assert JC.NoOp(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// JournalInternalOp);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// JournalInternalOp,
// JC.NoOpStep);
// } else {
// assert JC.PushSyncReq(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// PushSyncOp(id as int), id);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// PushSyncOp(id as int),
// JC.PushSyncReqStep(id));
// }
// }
// // == popSync ==
// function {:opaque} popSync(cm: CM, id: uint64) : (cm' : CM)
// requires CommitterModel.WF(cm)
// {
// cm.(syncReqs := LinearMutableMap.Remove(cm.syncReqs, id))
// }
// lemma popSyncCorrect(cm: CM, id: uint64)
// requires CommitterModel.WF(cm)
// requires id in cm.syncReqs.contents
// requires cm.syncReqs.contents[id] == JC.State1
// ensures var cm' := popSync(cm, id);
// && CommitterModel.WF(cm')
// && JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// PopSyncOp(id as int))
// {
// var cm' := popSync(cm, id);
// reveal_popSync();
// assert JC.PopSyncReq(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// PopSyncOp(id as int), id);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// JournalDisk.NoDiskOp,
// PopSyncOp(id as int),
// JC.PopSyncReqStep(id));
// }
// // == AdvanceLog ==
// predicate {:opaque} tryAdvanceLog(cm: CM, io: IO,
// cm': CM, io': IO)
// requires CommitterModel.WF(cm)
// requires io.IOInit?
// {
// var hasFrozen := JournalistModel.hasFrozenJournal(cm.journalist);
// var hasInMem := JournalistModel.hasInMemoryJournal(cm.journalist);
// if cm.superblockWrite.None? then (
// if hasFrozen || hasInMem then (
// (cm', io') == WriteOutJournal(cm, io)
// ) else if cm.outstandingJournalWrites == {} then (
// writeOutSuperblockAdvanceLog(cm, io, cm', io')
// ) else (
// && cm' == cm
// && io' == io
// )
// ) else (
// && cm' == cm
// && io' == io
// )
// }
// lemma tryAdvanceLogCorrect(cm: CM, io: IO,
// cm': CM, io': IO)
// requires CommitterModel.Inv(cm)
// requires io.IOInit?
// requires cm.status.StatusReady?
// requires tryAdvanceLog(cm, io, cm', io')
// ensures CommitterModel.WF(cm')
// ensures ValidDiskOp(diskOp(io'))
// ensures IDiskOp(diskOp(io')).bdop.NoDiskOp?
// ensures JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp)
// {
// reveal_tryAdvanceLog();
// var hasFrozen := JournalistModel.hasFrozenJournal(cm.journalist);
// var hasInMem := JournalistModel.hasInMemoryJournal(cm.journalist);
// if cm.superblockWrite.None? {
// if hasFrozen || hasInMem {
// WriteOutJournalCorrect(cm, io);
// } else if (cm.outstandingJournalWrites == {}) {
// writeOutSuperblockAdvanceLogCorrect(cm, io, cm', io');
// } else {
// assert JC.NoOp( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp);
// assert JC.NextStep( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp, JC.NoOpStep);
// }
// } else {
// assert JC.NoOp( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp);
// assert JC.NextStep( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp, JC.NoOpStep);
// }
// }
// predicate {:opaque} tryAdvanceLocation(cm: CM, io: IO,
// cm': CM, io': IO)
// requires CommitterModel.Inv(cm)
// requires io.IOInit?
// requires cm.status == StatusReady
// requires cm.frozenLoc.Some?
// {
// var hasFrozen := JournalistModel.hasFrozenJournal(cm.journalist);
// var hasInMem := JournalistModel.hasInMemoryJournal(cm.journalist);
// if cm.superblockWrite.None? then (
// if hasFrozen || hasInMem then (
// (cm', io') == WriteOutJournal(cm, io)
// ) else if cm.outstandingJournalWrites == {} then (
// writeOutSuperblockAdvanceLocation(cm, io, cm', io')
// ) else (
// && cm' == cm
// && io' == io
// )
// ) else (
// && cm' == cm
// && io' == io
// )
// }
// lemma tryAdvanceLocationCorrect(cm: CM, io: IO,
// cm': CM, io': IO)
// requires CommitterModel.Inv(cm)
// requires io.IOInit?
// requires cm.status.StatusReady?
// requires cm.frozenLoc.Some?
// requires tryAdvanceLocation(cm, io, cm', io')
// ensures CommitterModel.WF(cm')
// ensures ValidDiskOp(diskOp(io'))
// ensures IDiskOp(diskOp(io')).bdop.NoDiskOp?
// ensures JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io')).jdop,
// JournalInternalOp)
// {
// reveal_tryAdvanceLocation();
// var hasFrozen := JournalistModel.hasFrozenJournal(cm.journalist);
// var hasInMem := JournalistModel.hasInMemoryJournal(cm.journalist);
// if cm.superblockWrite.None? {
// if hasFrozen || hasInMem {
// WriteOutJournalCorrect(cm, io);
// } else if (cm.outstandingJournalWrites == {}) {
// writeOutSuperblockAdvanceLocationCorrect(cm, io, cm', io');
// } else {
// assert JC.NoOp( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp);
// assert JC.NextStep( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp, JC.NoOpStep);
// }
// } else {
// assert JC.NoOp( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp);
// assert JC.NextStep( CommitterModel.I(cm), CommitterModel.I(cm'), JournalDisk.NoDiskOp, JournalInternalOp, JC.NoOpStep);
// }
// }
// function {:opaque} writeBackSuperblockResp(
// cm: CommitterModel.CM) : CommitterModel.CM
// requires CommitterModel.Inv(cm)
// {
// if cm.status.StatusReady? &&
// cm.commitStatus.CommitAdvanceLocation? then (
// cm.(superblockWrite := None)
// .(superblock := cm.newSuperblock.value)
// .(newSuperblock := None)
// .(whichSuperblock := if cm.whichSuperblock == 0 then 1 else 0)
// .(syncReqs := SyncReqs2to1(cm.syncReqs))
// .(journalist :=
// JournalistModel.updateWrittenJournalLen(
// cm.journalist,
// JournalistModel.getWrittenJournalLen(cm.journalist)
// - cm.frozenJournalPosition
// )
// )
// .(frozenJournalPosition := 0)
// .(frozenLoc := None)
// .(isFrozen := false)
// .(commitStatus := JC.CommitNone)
// )
// else if cm.status.StatusReady? &&
// cm.commitStatus.CommitAdvanceLog? then (
// cm.(superblockWrite := None)
// .(superblock := cm.newSuperblock.value)
// .(newSuperblock := None)
// .(whichSuperblock := if cm.whichSuperblock == 0 then 1 else 0)
// .(syncReqs := SyncReqs2to1(cm.syncReqs))
// .(commitStatus := JC.CommitNone)
// )
// else (
// cm
// )
// }
// lemma writeBackSuperblockRespCorrect(
// cm: CommitterModel.CM, io: IO)
// requires CommitterModel.Inv(cm)
// requires ValidDiskOp(diskOp(io))
// requires IDiskOp(diskOp(io)).jdop.RespWriteSuperblockOp?
// requires Some(io.id) == cm.superblockWrite
// ensures var cm' := writeBackSuperblockResp(cm);
// && CommitterModel.WF(cm')
// && JC.Next(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io)).jdop,
// if cm.status.StatusReady? && cm.commitStatus.CommitAdvanceLocation? then CleanUpOp else JournalInternalOp
// )
// {
// reveal_writeBackSuperblockResp();
// var cm' := writeBackSuperblockResp(cm);
// SyncReqs2to1Correct(cm.syncReqs);
// if cm.status.StatusReady? &&
// cm.commitStatus.CommitAdvanceLocation? {
// assert JC.WriteBackSuperblockResp(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io)).jdop,
// CleanUpOp);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io)).jdop,
// CleanUpOp,
// JC.WriteBackSuperblockRespStep);
// }
// else if cm.status.StatusReady? &&
// cm.commitStatus.CommitAdvanceLog? {
// assert JC.WriteBackSuperblockResp(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io)).jdop,
// JournalInternalOp);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io)).jdop,
// JournalInternalOp,
// JC.WriteBackSuperblockRespStep);
// }
// else {
// assert JC.NoOp(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io)).jdop,
// JournalInternalOp);
// assert JC.NextStep(
// CommitterModel.I(cm),
// CommitterModel.I(cm'),
// IDiskOp(diskOp(io)).jdop,
// JournalInternalOp,
// JC.NoOpStep);
// }
// }
// }
|
697 | iron-sync_tmp_tmps49o3tyz_concurrency_docs_code_ShardedStateMachine.dfy | // General form of a ShardedStateMachine
// To instantiate one, fill in the 'Shard' type, the 'glue' function
// provide the 'Next' predicate and the invariant 'Inv',
// and then meet various proof obligations in the form of lemmas.
abstract module ShardedStateMachine {
/*
* A ShardedStateMachine contains a 'Shard' type that represents
* a shard of the state machine.
*/
type Shard
predicate valid_shard(a: Shard)
/*
* There must be some notion that lets us put two shards together.
*/
function glue(a: Shard, b: Shard) : Shard
/*
* The 'glue' operation must respect monoidal laws.
*/
lemma glue_commutative(a: Shard, b: Shard)
ensures glue(a, b) == glue(b, a)
lemma glue_associative(a: Shard, b: Shard, c: Shard)
ensures glue(glue(a, b), c) == glue(a, glue(b, c))
function unit() : Shard
ensures valid_shard(unit())
lemma glue_unit(a: Shard)
ensures glue(a, unit()) == a
/*
* The invariant is meant to be a predicate over a 'whole' shard,
* that is, all the pieces glued together at once.
*/
predicate Inv(s: Shard)
/*
* 'Next' predicate of our state machine.
*/
predicate Next(shard: Shard, shard': Shard)
lemma NextPreservesValid(s: Shard, s': Shard)
requires valid_shard(s)
requires Next(s, s')
ensures valid_shard(s')
lemma NextAdditive(s: Shard, s': Shard, t: Shard)
requires Next(s, s')
requires valid_shard(glue(s, t))
requires Next(glue(s, t), glue(s', t))
/*
* The operation must preserve the state machine invariant.
*/
lemma NextPreservesInv(s: Shard, s': Shard)
requires Inv(s)
requires Next(s, s')
ensures Inv(s')
}
| // General form of a ShardedStateMachine
// To instantiate one, fill in the 'Shard' type, the 'glue' function
// provide the 'Next' predicate and the invariant 'Inv',
// and then meet various proof obligations in the form of lemmas.
abstract module ShardedStateMachine {
/*
* A ShardedStateMachine contains a 'Shard' type that represents
* a shard of the state machine.
*/
type Shard
predicate valid_shard(a: Shard)
/*
* There must be some notion that lets us put two shards together.
*/
function glue(a: Shard, b: Shard) : Shard
/*
* The 'glue' operation must respect monoidal laws.
*/
lemma glue_commutative(a: Shard, b: Shard)
ensures glue(a, b) == glue(b, a)
lemma glue_associative(a: Shard, b: Shard, c: Shard)
ensures glue(glue(a, b), c) == glue(a, glue(b, c))
function unit() : Shard
ensures valid_shard(unit())
lemma glue_unit(a: Shard)
ensures glue(a, unit()) == a
/*
* The invariant is meant to be a predicate over a 'whole' shard,
* that is, all the pieces glued together at once.
*/
predicate Inv(s: Shard)
/*
* 'Next' predicate of our state machine.
*/
predicate Next(shard: Shard, shard': Shard)
lemma NextPreservesValid(s: Shard, s': Shard)
requires valid_shard(s)
requires Next(s, s')
ensures valid_shard(s')
lemma NextAdditive(s: Shard, s': Shard, t: Shard)
requires Next(s, s')
requires valid_shard(glue(s, t))
requires Next(glue(s, t), glue(s', t))
/*
* The operation must preserve the state machine invariant.
*/
lemma NextPreservesInv(s: Shard, s': Shard)
requires Inv(s)
requires Next(s, s')
ensures Inv(s')
}
|
698 | iron-sync_tmp_tmps49o3tyz_lib_Base_MapRemove.dfy | // Defines a MapRemove1 operation for removing a key from a
// the built-in map<K,V> type, and declares a trusted, compilable
// version.
//
// TODO On principle, it'd be nice to remove our dependence
// on compiling the built-in map<K, V> entirely, and just
// replace them with our own hash tables. There are only
// a few minor usages left.
module {:extern} MapRemove_s {
function {:opaque} MapRemove1<K,V>(m:map<K,V>, k:K) : (m':map<K,V>)
ensures forall j :: j in m && j != k ==> j in m'
ensures forall j :: j in m' ==> j in m && j != k
ensures forall j :: j in m' ==> m'[j] == m[j]
ensures |m'.Keys| <= |m.Keys|
ensures k in m ==> |m'| == |m| - 1
ensures k !in m ==> |m'| == |m|
{
var m' := map j | j in m && j != k :: m[j];
assert m'.Keys == m.Keys - {k};
m'
}
method {:extern "MapRemove__s_Compile", "ComputeMapRemove1"}
ComputeMapRemove1<K,V>(m: map<K,V>, k:K)
returns (m' : map<K,V>)
ensures m' == MapRemove1(m, k)
}
| // Defines a MapRemove1 operation for removing a key from a
// the built-in map<K,V> type, and declares a trusted, compilable
// version.
//
// TODO On principle, it'd be nice to remove our dependence
// on compiling the built-in map<K, V> entirely, and just
// replace them with our own hash tables. There are only
// a few minor usages left.
module {:extern} MapRemove_s {
function {:opaque} MapRemove1<K,V>(m:map<K,V>, k:K) : (m':map<K,V>)
ensures forall j :: j in m && j != k ==> j in m'
ensures forall j :: j in m' ==> j in m && j != k
ensures forall j :: j in m' ==> m'[j] == m[j]
ensures |m'.Keys| <= |m.Keys|
ensures k in m ==> |m'| == |m| - 1
ensures k !in m ==> |m'| == |m|
{
var m' := map j | j in m && j != k :: m[j];
m'
}
method {:extern "MapRemove__s_Compile", "ComputeMapRemove1"}
ComputeMapRemove1<K,V>(m: map<K,V>, k:K)
returns (m' : map<K,V>)
ensures m' == MapRemove1(m, k)
}
|
699 | ironsync-osdi2023_tmp_tmpx80antoe_lib_Marshalling_Math.dfy | // Based on IronFleet's math library.
// I pulled out only the functions we need for the marshalling code,
// and in a few cases rewrote the proof from scratch to avoid pulling in
// a lot of dependencies.
module Math {
function {:opaque} power2(exp: nat) : nat
ensures power2(exp) > 0;
{
if (exp==0) then
1
else
2*power2(exp-1)
}
lemma lemma_2toX()
ensures power2(8) == 256;
ensures power2(16) == 65536;
ensures power2(19) == 524288;
ensures power2(24) == 16777216;
ensures power2(32) == 4294967296;
ensures power2(60) == 1152921504606846976;
ensures power2(64) == 18446744073709551616;
{
reveal_power2();
}
lemma lemma_power2_adds(e1:nat, e2:nat)
decreases e2;
ensures power2(e1 + e2) == power2(e1) * power2(e2);
{
reveal_power2();
if (e2 == 0) {
} else {
lemma_power2_adds(e1, e2-1);
}
}
lemma lemma_2toX32()
ensures power2(0) == 0x1;
ensures power2(1) == 0x2;
ensures power2(2) == 0x4;
ensures power2(3) == 0x8;
ensures power2(4) == 0x10;
ensures power2(5) == 0x20;
ensures power2(6) == 0x40;
ensures power2(7) == 0x80;
ensures power2(8) == 0x100;
ensures power2(9) == 0x200;
ensures power2(10) == 0x400;
ensures power2(11) == 0x800;
ensures power2(12) == 0x1000;
ensures power2(13) == 0x2000;
ensures power2(14) == 0x4000;
ensures power2(15) == 0x8000;
ensures power2(16) == 0x10000;
ensures power2(17) == 0x20000;
ensures power2(18) == 0x40000;
ensures power2(19) == 0x80000;
ensures power2(20) == 0x100000;
ensures power2(21) == 0x200000;
ensures power2(22) == 0x400000;
ensures power2(23) == 0x800000;
ensures power2(24) == 0x1000000;
ensures power2(25) == 0x2000000;
ensures power2(26) == 0x4000000;
ensures power2(27) == 0x8000000;
ensures power2(28) == 0x10000000;
ensures power2(29) == 0x20000000;
ensures power2(30) == 0x40000000;
ensures power2(31) == 0x80000000;
ensures power2(32) == 0x100000000;
{
reveal_power2();
}
lemma bounded_mul_eq_0(x: int, m: int)
requires -m < m*x < m
ensures x == 0
{
}
// This is often used as part of the axiomatic definition of division
// in a lot of formalizations of mathematics. Oddly, it isn't built-in to dafny
// and we have to prove it in sort of a roundabout way.
lemma lemma_div_ind(x: int, d: int)
requires d > 0
ensures x / d + 1 == (x + d) / d
{
assert d * (x / d + 1)
== (x/d)*d + d
== x - (x % d) + d;
assert d * ((x + d) / d)
== (x + d) - ((x + d) % d);
assert 0 <= x % d < d;
assert 0 <= (x + d) % d < d;
assert d * (x / d + 1) - d * ((x + d) / d)
== ((x + d) % d) - (x % d);
assert -d < d * (x / d + 1) - d * ((x + d) / d) < d;
assert -d < d * ((x / d + 1) - ((x + d) / d)) < d;
bounded_mul_eq_0((x / d + 1) - ((x + d) / d), d);
}
lemma lemma_add_mul_div(a: int, b: int, d: int)
requires d > 0
ensures (a + b*d) / d == a/d + b
decreases if b > 0 then b else -b
{
if (b == 0) {
} else if (b > 0) {
lemma_add_mul_div(a, b-1, d);
lemma_div_ind(a + (b-1)*d, d);
} else {
lemma_add_mul_div(a, b+1, d);
lemma_div_ind(a + b*d, d);
}
}
lemma lemma_div_multiples_vanish_fancy(x:int, b:int, d:int)
requires 0<d;
requires 0<=b<d;
ensures (d*x + b)/d == x;
decreases if x > 0 then x else -x
{
if (x == 0) {
} else if (x > 0) {
lemma_div_multiples_vanish_fancy(x-1, b, d);
lemma_div_ind(d*(x-1) + b, d);
} else {
lemma_div_multiples_vanish_fancy(x+1, b, d);
lemma_div_ind(d*x + b, d);
}
}
lemma lemma_div_by_multiple(b:int, d:int)
requires 0 < d;
ensures (b*d) / d == b;
{
lemma_div_multiples_vanish_fancy(b, 0, d);
}
lemma lemma_mod_multiples_basic(x:int, m:int)
requires m > 0;
ensures (x * m) % m == 0;
{
assert (x*m)%m == x*m - ((x*m)/m)*m;
lemma_div_by_multiple(x, m);
assert (x*m)/m == x;
assert x*m - ((x*m)/m)*m == x*m - x*m
== 0;
}
lemma lemma_div_by_multiple_is_strongly_ordered(x:int, y:int, m:int, z:int)
requires x < y;
requires y == m * z;
requires z > 0;
ensures x / z < y / z;
{
lemma_mod_multiples_basic(m, z);
if (x / z <= m-1) {
} else {
lemma_div_by_multiple_is_strongly_ordered(x, y-z, m-1, z);
}
}
lemma lemma_power2_div_is_sub(x:int, y:int)
requires 0 <= x <= y;
ensures power2(y - x) == power2(y) / power2(x)
>= 0;
{
calc {
power2(y) / power2(x);
{ lemma_power2_adds(y-x, x); }
(power2(y-x)*power2(x)) / power2(x);
{ lemma_div_by_multiple(power2(y-x), power2(x)); }
power2(y-x);
}
}
lemma lemma_div_denominator(x:int,c:nat,d:nat)
requires 0 <= x;
requires 0<c;
requires 0<d;
ensures c * d != 0;
ensures (x/c)/d == x / (c*d);
{
if (x < c*d) {
assert x/(c*d) == 0;
assert x/c < d;
assert (x/c)/d == 0;
} else {
calc {
(x / c) / d;
((x - c*d + c*d) / c) / d;
{
lemma_add_mul_div(x-c*d, d, c);
}
((x - c*d) / c + d) / d;
{
lemma_div_ind((x - c*d) / c, d);
}
((x - c*d) / c) / d + 1;
{
lemma_div_denominator(x - c*d, c, d);
}
((x - c*d) / (c*d)) + 1;
{
lemma_div_ind(x - c*d, c*d);
}
x / (c*d);
}
}
}
}
| // Based on IronFleet's math library.
// I pulled out only the functions we need for the marshalling code,
// and in a few cases rewrote the proof from scratch to avoid pulling in
// a lot of dependencies.
module Math {
function {:opaque} power2(exp: nat) : nat
ensures power2(exp) > 0;
{
if (exp==0) then
1
else
2*power2(exp-1)
}
lemma lemma_2toX()
ensures power2(8) == 256;
ensures power2(16) == 65536;
ensures power2(19) == 524288;
ensures power2(24) == 16777216;
ensures power2(32) == 4294967296;
ensures power2(60) == 1152921504606846976;
ensures power2(64) == 18446744073709551616;
{
reveal_power2();
}
lemma lemma_power2_adds(e1:nat, e2:nat)
ensures power2(e1 + e2) == power2(e1) * power2(e2);
{
reveal_power2();
if (e2 == 0) {
} else {
lemma_power2_adds(e1, e2-1);
}
}
lemma lemma_2toX32()
ensures power2(0) == 0x1;
ensures power2(1) == 0x2;
ensures power2(2) == 0x4;
ensures power2(3) == 0x8;
ensures power2(4) == 0x10;
ensures power2(5) == 0x20;
ensures power2(6) == 0x40;
ensures power2(7) == 0x80;
ensures power2(8) == 0x100;
ensures power2(9) == 0x200;
ensures power2(10) == 0x400;
ensures power2(11) == 0x800;
ensures power2(12) == 0x1000;
ensures power2(13) == 0x2000;
ensures power2(14) == 0x4000;
ensures power2(15) == 0x8000;
ensures power2(16) == 0x10000;
ensures power2(17) == 0x20000;
ensures power2(18) == 0x40000;
ensures power2(19) == 0x80000;
ensures power2(20) == 0x100000;
ensures power2(21) == 0x200000;
ensures power2(22) == 0x400000;
ensures power2(23) == 0x800000;
ensures power2(24) == 0x1000000;
ensures power2(25) == 0x2000000;
ensures power2(26) == 0x4000000;
ensures power2(27) == 0x8000000;
ensures power2(28) == 0x10000000;
ensures power2(29) == 0x20000000;
ensures power2(30) == 0x40000000;
ensures power2(31) == 0x80000000;
ensures power2(32) == 0x100000000;
{
reveal_power2();
}
lemma bounded_mul_eq_0(x: int, m: int)
requires -m < m*x < m
ensures x == 0
{
}
// This is often used as part of the axiomatic definition of division
// in a lot of formalizations of mathematics. Oddly, it isn't built-in to dafny
// and we have to prove it in sort of a roundabout way.
lemma lemma_div_ind(x: int, d: int)
requires d > 0
ensures x / d + 1 == (x + d) / d
{
== (x/d)*d + d
== x - (x % d) + d;
== (x + d) - ((x + d) % d);
== ((x + d) % d) - (x % d);
bounded_mul_eq_0((x / d + 1) - ((x + d) / d), d);
}
lemma lemma_add_mul_div(a: int, b: int, d: int)
requires d > 0
ensures (a + b*d) / d == a/d + b
{
if (b == 0) {
} else if (b > 0) {
lemma_add_mul_div(a, b-1, d);
lemma_div_ind(a + (b-1)*d, d);
} else {
lemma_add_mul_div(a, b+1, d);
lemma_div_ind(a + b*d, d);
}
}
lemma lemma_div_multiples_vanish_fancy(x:int, b:int, d:int)
requires 0<d;
requires 0<=b<d;
ensures (d*x + b)/d == x;
{
if (x == 0) {
} else if (x > 0) {
lemma_div_multiples_vanish_fancy(x-1, b, d);
lemma_div_ind(d*(x-1) + b, d);
} else {
lemma_div_multiples_vanish_fancy(x+1, b, d);
lemma_div_ind(d*x + b, d);
}
}
lemma lemma_div_by_multiple(b:int, d:int)
requires 0 < d;
ensures (b*d) / d == b;
{
lemma_div_multiples_vanish_fancy(b, 0, d);
}
lemma lemma_mod_multiples_basic(x:int, m:int)
requires m > 0;
ensures (x * m) % m == 0;
{
lemma_div_by_multiple(x, m);
== 0;
}
lemma lemma_div_by_multiple_is_strongly_ordered(x:int, y:int, m:int, z:int)
requires x < y;
requires y == m * z;
requires z > 0;
ensures x / z < y / z;
{
lemma_mod_multiples_basic(m, z);
if (x / z <= m-1) {
} else {
lemma_div_by_multiple_is_strongly_ordered(x, y-z, m-1, z);
}
}
lemma lemma_power2_div_is_sub(x:int, y:int)
requires 0 <= x <= y;
ensures power2(y - x) == power2(y) / power2(x)
>= 0;
{
calc {
power2(y) / power2(x);
{ lemma_power2_adds(y-x, x); }
(power2(y-x)*power2(x)) / power2(x);
{ lemma_div_by_multiple(power2(y-x), power2(x)); }
power2(y-x);
}
}
lemma lemma_div_denominator(x:int,c:nat,d:nat)
requires 0 <= x;
requires 0<c;
requires 0<d;
ensures c * d != 0;
ensures (x/c)/d == x / (c*d);
{
if (x < c*d) {
} else {
calc {
(x / c) / d;
((x - c*d + c*d) / c) / d;
{
lemma_add_mul_div(x-c*d, d, c);
}
((x - c*d) / c + d) / d;
{
lemma_div_ind((x - c*d) / c, d);
}
((x - c*d) / c) / d + 1;
{
lemma_div_denominator(x - c*d, c, d);
}
((x - c*d) / (c*d)) + 1;
{
lemma_div_ind(x - c*d, c*d);
}
x / (c*d);
}
}
}
}
|