diff --git a/DynamoDbEncryption/dafny/DynamoDbEncryption/src/CompoundBeacon.dfy b/DynamoDbEncryption/dafny/DynamoDbEncryption/src/CompoundBeacon.dfy index e8fc00e35..bb1dd0883 100644 --- a/DynamoDbEncryption/dafny/DynamoDbEncryption/src/CompoundBeacon.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbEncryption/src/CompoundBeacon.dfy @@ -20,6 +20,7 @@ module CompoundBeacon { import UTF8 import Seq import SortedSets + import StandardLibrary.Sequence type Prefix = x : string | 0 < |x| witness * @@ -289,7 +290,7 @@ module CompoundBeacon { // return all the fields involved in this beacon function method GetFields(virtualFields : VirtualFieldMap) : seq { - Seq.Flatten(Seq.Map((p : BeaconPart) => p.GetFields(virtualFields), parts)) + Sequence.Flatten(Seq.Map((p : BeaconPart) => p.GetFields(virtualFields), parts)) } // calculate value for a single piece of a compound beacon query string @@ -315,9 +316,9 @@ module CompoundBeacon { Failure(E("CompoundBeacon " + base.name + " can only be queried as a string, not as " + AttrTypeToStr(value))) else var parts := Split(value.S, split); - var partsUsed :- Seq.MapWithResult(s => getPartFromPrefix(s), parts); + var partsUsed :- Sequence.MapWithResult(s => getPartFromPrefix(s), parts); var _ :- ValidatePartOrder(partsUsed, value.S); - var beaconParts :- Seq.MapWithResult(s => FindAndCalcPart(s, keys), parts); + var beaconParts :- Sequence.MapWithResult(s => FindAndCalcPart(s, keys), parts); var lastIsPrefix :- justPrefix(Seq.Last(parts)); if !forEquality && lastIsPrefix then var result := Join(beaconParts[..|parts|-1] + [Seq.Last(parts)], [split]); @@ -534,7 +535,7 @@ module CompoundBeacon { requires pos < |parts| { var partNumbers : seq := seq(|parts|, (i : nat) => i as nat); - var _ :- Seq.MapWithResult((p : int) requires 0 <= p < |parts| => CheckOnePrefixPart(pos, p), seq(|parts|, i => i)); + var _ :- Sequence.MapWithResult((p : int) requires 0 <= p < |parts| => CheckOnePrefixPart(pos, p), seq(|parts|, i => i)); Success(true) } diff --git a/DynamoDbEncryption/dafny/DynamoDbEncryption/src/DynamoToStruct.dfy b/DynamoDbEncryption/dafny/DynamoDbEncryption/src/DynamoToStruct.dfy index 616dfe404..4306dfb2a 100644 --- a/DynamoDbEncryption/dafny/DynamoDbEncryption/src/DynamoToStruct.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbEncryption/src/DynamoToStruct.dfy @@ -18,6 +18,7 @@ module DynamoToStruct { import Seq import Norm = DynamoDbNormalizeNumber import SE = StructuredEncryptionUtil + import StandardLibrary.Sequence type Error = AwsCryptographyDbEncryptionSdkDynamoDbTypes.Error @@ -437,7 +438,7 @@ module DynamoToStruct { :- Need(|asSet| == |ns|, "Number Set had duplicate values"); Seq.LemmaNoDuplicatesCardinalityOfSet(ns); - var normList :- Seq.MapWithResult(n => Norm.NormalizeNumber(n), ns); + var normList :- Sequence.MapWithResult(n => Norm.NormalizeNumber(n), ns); var asSet := Seq.ToSet(normList); :- Need(|asSet| == |normList|, "Number Set had duplicate values after normalization."); @@ -454,6 +455,10 @@ module DynamoToStruct { function method BinarySetAttrToBytes(bs: BinarySetAttributeValue): (ret: Result, string>) ensures ret.Success? ==> Seq.HasNoDuplicates(bs) + ensures ret.Success? ==> + && U32ToBigEndian(|bs|).Success? + && LENGTH_LEN <= |ret.value| + && ret.value[..LENGTH_LEN] == U32ToBigEndian(|bs|).value { var asSet := Seq.ToSet(bs); :- Need(|asSet| == |bs|, "Binary Set had duplicate values"); @@ -561,14 +566,17 @@ module DynamoToStruct { // String Set or Number Set to Bytes function method {:tailrecursion} {:opaque} CollectString( setToSerialize : StringSetAttributeValue, + pos : nat := 0, serialized : seq := []) : Result, string> + requires pos <= |setToSerialize| + decreases |setToSerialize| - pos { - if |setToSerialize| == 0 then + if |setToSerialize| == pos then Success(serialized) else - var entry :- EncodeString(setToSerialize[0]); - CollectString(setToSerialize[1..], serialized + entry) + var entry :- EncodeString(setToSerialize[pos]); + CollectString(setToSerialize, pos+1, serialized + entry) } @@ -596,13 +604,19 @@ module DynamoToStruct { } // Binary Set to Bytes - function method {:tailrecursion} CollectBinary(setToSerialize : BinarySetAttributeValue, serialized : seq := []) : Result, string> + function method {:tailrecursion} CollectBinary( + setToSerialize : BinarySetAttributeValue, + pos : nat := 0, + serialized : seq := [] + ) : Result, string> + requires pos <= |setToSerialize| + decreases |setToSerialize| - pos { - if |setToSerialize| == 0 then + if |setToSerialize| == pos then Success(serialized) else - var item :- SerializeBinaryValue(setToSerialize[0]); - CollectBinary(setToSerialize[1..], serialized + item) + var item :- SerializeBinaryValue(setToSerialize[pos]); + CollectBinary(setToSerialize, pos+1, serialized + item) } // List to Bytes @@ -641,6 +655,7 @@ module DynamoToStruct { if |listToSerialize| == 0 then Success(serialized) else + // Can't do the `pos` optimization, because I can't appease the `decreases` var val :- AttrToBytes(listToSerialize[0], true, depth+1); CollectList(listToSerialize[1..], depth, serialized + val) } @@ -705,24 +720,27 @@ module DynamoToStruct { //# Entries in a serialized Map MUST be ordered by key value, //# ordered in ascending [UTF-16 binary order](./string-ordering.md#utf-16-binary-order). var keys := SortedSets.ComputeSetToOrderedSequence2(mapToSerialize.Keys, CharLess); - CollectOrderedMapSubset(keys, mapToSerialize, serialized) + CollectOrderedMapSubset(keys, mapToSerialize, 0, serialized) } function method {:tailrecursion} {:opaque} CollectOrderedMapSubset( keys : seq, mapToSerialize : map>, + pos : nat := 0, serialized : seq := [] ) : (ret : Result, string>) + requires pos <= |keys| requires forall k <- keys :: k in mapToSerialize ensures (ret.Success? && |keys| == 0) ==> (ret.value == serialized) ensures (ret.Success? && |keys| == 0) ==> (|ret.value| == |serialized|) + decreases |keys| - pos { - if |keys| == 0 then + if |keys| == pos then Success(serialized) else - var data :- SerializeMapItem(keys[0], mapToSerialize[keys[0]]); - CollectOrderedMapSubset(keys[1..], mapToSerialize, serialized + data) + var data :- SerializeMapItem(keys[pos], mapToSerialize[keys[pos]]); + CollectOrderedMapSubset(keys, mapToSerialize, pos+1, serialized + data) } function method BoolToUint8(b : bool) : uint8 diff --git a/DynamoDbEncryption/dafny/DynamoDbEncryption/src/FilterExpr.dfy b/DynamoDbEncryption/dafny/DynamoDbEncryption/src/FilterExpr.dfy index 2f2a2cff9..5a0d25ca0 100644 --- a/DynamoDbEncryption/dafny/DynamoDbEncryption/src/FilterExpr.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbEncryption/src/FilterExpr.dfy @@ -1271,25 +1271,27 @@ module DynamoDBFilterExpr { // If only surrogates are involved, comparison is normal // if one surrogate is involved, the surrogate is larger // results undefined if not valid UTF16 encodings, but the idea of 'less' is also undefined for invalid encodings. - predicate method {:tailrecursion} UnicodeLess(a : string, b : string) + predicate method {:tailrecursion} UnicodeLess(a : string, b : string, pos : nat := 0) + requires pos <= |a| + requires pos <= |b| + decreases |a| - pos { - if |a| == 0 && |b| == 0 then + if |a| == pos && |b| == pos then false - else if |a| == 0 then + else if |a| == pos then true - else if |b| == 0 then + else if |b| == pos then false + else if a[pos] == b[pos] then + UnicodeLess(a, b, pos+1) // correct independent of surrogate status else - if a[0] == b[0] then - UnicodeLess(a[1..], b[1..]) // correct independent of surrogate status - else - var aIsHighSurrogate := IsHighSurrogate(a[0]); - var bIsHighSurrogate := IsHighSurrogate(b[0]); + var aIsHighSurrogate := IsHighSurrogate(a[pos]); + var bIsHighSurrogate := IsHighSurrogate(b[pos]); if aIsHighSurrogate == bIsHighSurrogate then - a[0] < b[0] + a[pos] < b[pos] else bIsHighSurrogate - // we know aIsHighSurrogate != bIsHighSurrogate and a[0] != b[0] + // we know aIsHighSurrogate != bIsHighSurrogate and a[pos] != b[pos] // so if bIsHighSurrogate then a is less // and if aIsHighSurrogate then a is greater } diff --git a/DynamoDbEncryption/dafny/DynamoDbEncryption/src/Virtual.dfy b/DynamoDbEncryption/dafny/DynamoDbEncryption/src/Virtual.dfy index c3cdbf5f6..1dae70d08 100644 --- a/DynamoDbEncryption/dafny/DynamoDbEncryption/src/Virtual.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbEncryption/src/Virtual.dfy @@ -23,11 +23,12 @@ module DdbVirtualFields { import DDB = ComAmazonawsDynamodbTypes import Seq import TermLoc + import StandardLibrary.Sequence function method ParseVirtualFieldConfig(vf : VirtualField) : Result { - var parts :- Seq.MapWithResult((p : VirtualPart) => ParseVirtualPartConfig(p), vf.parts); + var parts :- Sequence.MapWithResult((p : VirtualPart) => ParseVirtualPartConfig(p), vf.parts); Success(VirtField(vf.name, parts)) } diff --git a/DynamoDbEncryption/dafny/DynamoDbEncryption/test/Beacon.dfy b/DynamoDbEncryption/dafny/DynamoDbEncryption/test/Beacon.dfy index a812cdc0e..e0579c635 100644 --- a/DynamoDbEncryption/dafny/DynamoDbEncryption/test/Beacon.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbEncryption/test/Beacon.dfy @@ -18,6 +18,11 @@ module TestBaseBeacon { import DDB = ComAmazonawsDynamodbTypes import SE = AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes + const x123 : UTF8.ValidUTF8Bytes := + var s := [0x31, 0x32, 0x33]; + assert s == UTF8.EncodeAscii("123"); + s + method {:test} TestBeacon() { var primitives :- expect Primitives.AtomicPrimitives(); @@ -31,7 +36,7 @@ module TestBaseBeacon { expect bytes[7] == 0x80; str :- expect b.hash([], key := [1,2]); expect str == "80"; - bytes :- expect bb.getHmac(UTF8.EncodeAscii("123"), key := [1,2]); + bytes :- expect bb.getHmac(x123, key := [1,2]); expect bytes[7] == 0x61; } diff --git a/DynamoDbEncryption/dafny/DynamoDbEncryption/test/DynamoDbEncryptionBranchKeyIdSupplierTest.dfy b/DynamoDbEncryption/dafny/DynamoDbEncryption/test/DynamoDbEncryptionBranchKeyIdSupplierTest.dfy index 74e12264f..63cb0b9c3 100644 --- a/DynamoDbEncryption/dafny/DynamoDbEncryption/test/DynamoDbEncryptionBranchKeyIdSupplierTest.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbEncryption/test/DynamoDbEncryptionBranchKeyIdSupplierTest.dfy @@ -27,7 +27,12 @@ module DynamoDbEncryptionBranchKeyIdSupplierTest { // These tests require a keystore populated with a key with this Id const BRANCH_KEY_ID := "75789115-1deb-4fe3-a2ec-be9e885d1945" - const BRANCH_KEY_ID_UTF8 := UTF8.EncodeAscii(BRANCH_KEY_ID) + + const BRANCH_KEY_ID_UTF8 : UTF8.ValidUTF8Bytes := + var s := [0x37, 0x35, 0x37, 0x38, 0x39, 0x31, 0x31, 0x35, 0x2d, 0x31, 0x64, 0x65, 0x62, 0x2d, 0x34, 0x66, 0x65, 0x33, 0x2d, 0x61, 0x32, 0x65, 0x63, 0x2d, 0x62, 0x65, 0x39, 0x65, 0x38, 0x38, 0x35, 0x64, 0x31, 0x39, 0x34, 0x35]; + assert s == UTF8.EncodeAscii(BRANCH_KEY_ID); + s + const ALTERNATE_BRANCH_KEY_ID := "4bb57643-07c1-419e-92ad-0df0df149d7c" // Constants for TestBranchKeySupplier @@ -41,10 +46,23 @@ module DynamoDbEncryptionBranchKeyIdSupplierTest { const CASE_B_BYTES: seq := STRING_TYPE_ID + [0x43, 0x41, 0x53, 0x45, 0x5f, 0x42] const BRANCH_KEY_ID_A := BRANCH_KEY_ID const BRANCH_KEY_ID_B := ALTERNATE_BRANCH_KEY_ID - const EC_PARTITION_NAME := UTF8.EncodeAscii("aws-crypto-partition-name") + + const EC_PARTITION_NAME : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x77, 0x73, 0x2d, 0x63, 0x72, 0x79, 0x70, 0x74, 0x6f, 0x2d, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2d, 0x6e, 0x61, 0x6d, 0x65]; + assert s == UTF8.EncodeAscii("aws-crypto-partition-name"); + s + const RESERVED_PREFIX := "aws-crypto-attr." - const KEY_ATTR_NAME := UTF8.EncodeAscii(RESERVED_PREFIX + BRANCH_KEY) - const BRANCH_KEY_NAME := UTF8.EncodeAscii(BRANCH_KEY) + + const KEY_ATTR_NAME : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x77, 0x73, 0x2d, 0x63, 0x72, 0x79, 0x70, 0x74, 0x6f, 0x2d, 0x61, 0x74, 0x74, 0x72, 0x2e, 0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x4b, 0x65, 0x79]; + assert s == UTF8.EncodeAscii(RESERVED_PREFIX + BRANCH_KEY); + s + + const BRANCH_KEY_NAME : UTF8.ValidUTF8Bytes := + var s := [0x62, 0x72, 0x61, 0x6e, 0x63, 0x68, 0x4b, 0x65, 0x79]; + assert s == UTF8.EncodeAscii(BRANCH_KEY); + s method {:test} {:vcs_split_on_every_assert} TestHappyCase() { diff --git a/DynamoDbEncryption/dafny/DynamoDbEncryption/test/DynamoDbGetEncryptedDataKeyDescriptionTest.dfy b/DynamoDbEncryption/dafny/DynamoDbEncryption/test/DynamoDbGetEncryptedDataKeyDescriptionTest.dfy index 2f5cb02a8..91633e3db 100644 --- a/DynamoDbEncryption/dafny/DynamoDbEncryption/test/DynamoDbGetEncryptedDataKeyDescriptionTest.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbEncryption/test/DynamoDbGetEncryptedDataKeyDescriptionTest.dfy @@ -14,20 +14,55 @@ module DynamoDbGetEncryptedDataKeyDescriptionTest { import EdkWrapping import AlgorithmSuites + const abc : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x62, 0x63]; + assert s == UTF8.EncodeAscii("abc"); + s + + const def : UTF8.ValidUTF8Bytes := + var s := [0x64, 0x65, 0x66]; + assert s == UTF8.EncodeAscii("def"); + s + + const awskms : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x77, 0x73, 0x2d, 0x6b, 0x6d, 0x73]; + assert s == UTF8.EncodeAscii("aws-kms"); + s + + const keyproviderInfo : UTF8.ValidUTF8Bytes := + var s := [0x6b, 0x65, 0x79, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x49, 0x6e, 0x66, 0x6f]; + assert s == UTF8.EncodeAscii("keyproviderInfo"); + s + + const aws_kms_hierarchy : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x77, 0x73, 0x2d, 0x6b, 0x6d, 0x73, 0x2d, 0x68, 0x69, 0x65, 0x72, 0x61, 0x72, 0x63, 0x68, 0x79]; + assert s == UTF8.EncodeAscii("aws-kms-hierarchy"); + s + + const raw_rsa : UTF8.ValidUTF8Bytes := + var s := [0x72, 0x61, 0x77, 0x2d, 0x72, 0x73, 0x61]; + assert s == UTF8.EncodeAscii("raw-rsa"); + s + + const aws_kms_rsa : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x77, 0x73, 0x2d, 0x6b, 0x6d, 0x73, 0x2d, 0x72, 0x73, 0x61]; + assert s == UTF8.EncodeAscii("aws-kms-rsa"); + s + // THIS IS A TESTING RESOURCE DO NOT USE IN A PRODUCTION ENVIRONMENT const testVersion : Version := 1 const testFlavor0 : Flavor := 0 const testFlavor1 : Flavor := 1 const testMsgID : MessageID := [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32] const testLegend : Legend := [0x65, 0x73] - const testEncContext : CMPEncryptionContext := map[EncodeAscii("abc") := EncodeAscii("def")] + const testEncContext : CMPEncryptionContext := map[abc := def] const testAwsKmsDataKey := CMP.EncryptedDataKey( - keyProviderId := EncodeAscii("aws-kms") , - keyProviderInfo := EncodeAscii("keyproviderInfo"), + keyProviderId := awskms , + keyProviderInfo := keyproviderInfo, ciphertext := [1, 2, 3, 4, 5]) const testAwsKmsHDataKey := CMP.EncryptedDataKey( - keyProviderId := EncodeAscii("aws-kms-hierarchy") , - keyProviderInfo := EncodeAscii("keyproviderInfo"), + keyProviderId := aws_kms_hierarchy, + keyProviderInfo := keyproviderInfo, ciphertext := [ 64, 92, 115, 7, 85, 121, 112, 79, 69, 12, 82, 25, 67, 34, 11, 66, 93, 45, 40, 23, 90, 61, 16, 28, 59, 114, 52, 122, @@ -40,12 +75,12 @@ module DynamoDbGetEncryptedDataKeyDescriptionTest { 114, 76, 18, 103, 84, 34, 123, 1, 125, 61, 33, 13, 18, 81, 24, 53, 53, 26, 60, 52, 85, 81, 96, 85, 72]) const testRawRsaDataKey := CMP.EncryptedDataKey( - keyProviderId := EncodeAscii("raw-rsa") , + keyProviderId := raw_rsa, keyProviderInfo := [1, 2, 3, 4, 5], ciphertext := [6, 7, 8, 9]) const testAwsKmsRsaDataKey := CMP.EncryptedDataKey( - keyProviderId := EncodeAscii("aws-kms-rsa") , - keyProviderInfo := EncodeAscii("keyproviderInfo"), + keyProviderId := aws_kms_rsa, + keyProviderInfo := keyproviderInfo, ciphertext := [1, 2, 3, 4, 5]) method CreatePartialHeader(version : Version, flavor : Flavor, msgID : MessageID, legend : Legend, encContext : CMPEncryptionContext, dataKeyList : CMPEncryptedDataKeyList) diff --git a/DynamoDbEncryption/dafny/DynamoDbEncryptionTransforms/src/DynamoDbMiddlewareSupport.dfy b/DynamoDbEncryption/dafny/DynamoDbEncryptionTransforms/src/DynamoDbMiddlewareSupport.dfy index 8d2ce7e19..ab5524224 100644 --- a/DynamoDbEncryption/dafny/DynamoDbEncryptionTransforms/src/DynamoDbMiddlewareSupport.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbEncryptionTransforms/src/DynamoDbMiddlewareSupport.dfy @@ -121,7 +121,10 @@ module DynamoDbMiddlewareSupport { .MapFailure(e => AwsCryptographyDbEncryptionSdkDynamoDb(e)) } - const HierarchicalKeyringId := UTF8.EncodeAscii("aws-kms-hierarchy") + const HierarchicalKeyringId : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x77, 0x73, 0x2d, 0x6b, 0x6d, 0x73, 0x2d, 0x68, 0x69, 0x65, 0x72, 0x61, 0x72, 0x63, 0x68, 0x79]; + assert s == UTF8.EncodeAscii("aws-kms-hierarchy"); + s // Return Beacon Key ID for use in beaconizing records to be written function method GetKeyIdFromHeader(config : ValidTableConfig, output : AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes.EncryptItemOutput) : diff --git a/DynamoDbEncryption/dafny/DynamoDbEncryptionTransforms/src/Index.dfy b/DynamoDbEncryption/dafny/DynamoDbEncryptionTransforms/src/Index.dfy index 77f3b7aab..4d2ca0cbc 100644 --- a/DynamoDbEncryption/dafny/DynamoDbEncryptionTransforms/src/Index.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbEncryptionTransforms/src/Index.dfy @@ -58,13 +58,19 @@ module requires tableName in config.tableEncryptionConfigs ensures SearchModifies(config, tableName) <= TheModifies(config) - function method {:tailrecursion} AddSignedBeaconActions(names : seq, actions : ET.AttributeActions) : ET.AttributeActions + function method {:tailrecursion} AddSignedBeaconActions( + names : seq, + actions : ET.AttributeActions, + pos : nat := 0 + ) : ET.AttributeActions requires forall k <- names :: DDB.IsValid_AttributeName(k) + requires pos <= |names| + decreases |names| - pos { - if |names| == 0 then + if |names| == pos then actions else - AddSignedBeaconActions(names[1..], actions[names[0] := SET.SIGN_ONLY]) + AddSignedBeaconActions(names, actions[names[0] := SET.SIGN_ONLY], pos+1) } predicate method IsConfigured(config : AwsCryptographyDbEncryptionSdkDynamoDbTypes.DynamoDbTableEncryptionConfig, name : string) @@ -79,11 +85,11 @@ module { forall tableName <- configs :: ValidTableConfig?(configs[tableName]) } - predicate {:opaque} CorrectlyTransferedStructure?( + predicate {:opaque} CorrectlyTransferredStructure?( internalConfigs: map, config: AwsCryptographyDbEncryptionSdkDynamoDbTypes.DynamoDbTablesEncryptionConfig ) - ensures 0 == |internalConfigs| ==> CorrectlyTransferedStructure?(internalConfigs, config) + ensures 0 == |internalConfigs| ==> CorrectlyTransferredStructure?(internalConfigs, config) { forall tableName <- internalConfigs :: @@ -142,7 +148,7 @@ module invariant forall k <- m' :: m'[k] == config.tableEncryptionConfigs[k] invariant forall internalConfig <- internalConfigs.Values :: internalConfig.logicalTableName in allLogicalTableNames - invariant CorrectlyTransferedStructure?(internalConfigs, config) + invariant CorrectlyTransferredStructure?(internalConfigs, config) invariant AllTableConfigsValid?(internalConfigs) invariant ValidConfig?(Config(internalConfigs)) @@ -223,10 +229,10 @@ module assert internalConfig.physicalTableName == tableName; } - assert CorrectlyTransferedStructure?(internalConfigs, config) by { - reveal CorrectlyTransferedStructure?(); + assert CorrectlyTransferredStructure?(internalConfigs, config) by { + reveal CorrectlyTransferredStructure?(); reveal ConfigsMatch(); - assert CorrectlyTransferedStructure?(internalConfigs - {tableName}, config); + assert CorrectlyTransferredStructure?(internalConfigs - {tableName}, config); assert ConfigsMatch(tableName, internalConfig, inputConfig); } diff --git a/DynamoDbEncryption/dafny/DynamoDbItemEncryptor/src/Util.dfy b/DynamoDbEncryption/dafny/DynamoDbItemEncryptor/src/Util.dfy index 1c1233d71..7bea3d88b 100644 --- a/DynamoDbEncryption/dafny/DynamoDbItemEncryptor/src/Util.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbItemEncryptor/src/Util.dfy @@ -35,9 +35,20 @@ module DynamoDbItemEncryptorUtil { x < y } - const TABLE_NAME : UTF8.ValidUTF8Bytes := UTF8.EncodeAscii("aws-crypto-table-name") - const PARTITION_NAME : UTF8.ValidUTF8Bytes := UTF8.EncodeAscii("aws-crypto-partition-name") - const SORT_NAME : UTF8.ValidUTF8Bytes := UTF8.EncodeAscii("aws-crypto-sort-name") + const TABLE_NAME : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x77, 0x73, 0x2d, 0x63, 0x72, 0x79, 0x70, 0x74, 0x6f, 0x2d, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x2d, 0x6e, 0x61, 0x6d, 0x65]; + assert s == UTF8.EncodeAscii("aws-crypto-table-name"); + s + + const PARTITION_NAME : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x77, 0x73, 0x2d, 0x63, 0x72, 0x79, 0x70, 0x74, 0x6f, 0x2d, 0x70, 0x61, 0x72, 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2d, 0x6e, 0x61, 0x6d, 0x65]; + assert s == UTF8.EncodeAscii("aws-crypto-partition-name"); + s + + const SORT_NAME : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x77, 0x73, 0x2d, 0x63, 0x72, 0x79, 0x70, 0x74, 0x6f, 0x2d, 0x73, 0x6f, 0x72, 0x74, 0x2d, 0x6e, 0x61, 0x6d, 0x65]; + assert s == UTF8.EncodeAscii("aws-crypto-sort-name"); + s const SELECTOR_TABLE_NAME : DDB.AttributeName := "aws_dbe_table_name" const SELECTOR_PARTITION_NAME : DDB.AttributeName := "aws_dbe_partition_name" @@ -180,25 +191,30 @@ module DynamoDbItemEncryptorUtil { keys : seq, context : MPL.EncryptionContext, legend : string, + keyPos : nat := 0, + legendPos : nat := 0, attrMap : DDB.AttributeMap := map[] ) : Result requires forall k <- keys :: k in context + requires keyPos <= |keys| + requires legendPos <= |legend| + decreases |keys| - keyPos { - if |keys| == 0 then - if |legend| == 0 then + if |keys| == keyPos then + if |legend| == legendPos then Success(attrMap) else Failure("Encryption Context Legend is too long.") else - var key : UTF8.ValidUTF8Bytes := keys[0]; + var key : UTF8.ValidUTF8Bytes := keys[keyPos]; if SE.EC_ATTR_PREFIX < key then - :- Need(0 < |legend|, "Encryption Context Legend is too short."); + :- Need(legendPos < |legend|, "Encryption Context Legend is too short."); var attrName :- GetAttributeName(key); - var attrValue :- GetAttrValue(context[key], legend[0]); - GetV2AttrMap(keys[1..], context, legend[1..], attrMap[attrName := attrValue]) + var attrValue :- GetAttrValue(context[key], legend[legendPos]); + GetV2AttrMap(keys, context, legend, keyPos+1, legendPos+1, attrMap[attrName := attrValue]) else - GetV2AttrMap(keys[1..], context, legend, attrMap) + GetV2AttrMap(keys, context, legend, keyPos+1, legendPos, attrMap) } function method GetAttributeName(ddbAttrKey: UTF8.ValidUTF8Bytes) diff --git a/DynamoDbEncryption/dafny/DynamoDbItemEncryptor/test/DynamoDBItemEncryptorTest.dfy b/DynamoDbEncryption/dafny/DynamoDbItemEncryptor/test/DynamoDBItemEncryptorTest.dfy index aa4dfaa65..6ec3f8a4c 100644 --- a/DynamoDbEncryption/dafny/DynamoDbItemEncryptor/test/DynamoDBItemEncryptorTest.dfy +++ b/DynamoDbEncryption/dafny/DynamoDbItemEncryptor/test/DynamoDBItemEncryptorTest.dfy @@ -25,7 +25,10 @@ module DynamoDbItemEncryptorTest { // encrypt => encrypted fields changed, others did not // various errors - const PublicKeyUtf8 : UTF8.ValidUTF8Bytes := UTF8.EncodeAscii("aws-crypto-public-key") + const PublicKeyUtf8 : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x77, 0x73, 0x2d, 0x63, 0x72, 0x79, 0x70, 0x74, 0x6f, 0x2d, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x2d, 0x6b, 0x65, 0x79]; + assert s == UTF8.EncodeAscii("aws-crypto-public-key"); + s function method DDBS(x : string) : DDB.AttributeValue { DDB.AttributeValue.S(x) diff --git a/DynamoDbEncryption/dafny/StructuredEncryption/src/AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations.dfy b/DynamoDbEncryption/dafny/StructuredEncryption/src/AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations.dfy index e2aef0b72..a5204fc19 100644 --- a/DynamoDbEncryption/dafny/StructuredEncryption/src/AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations.dfy +++ b/DynamoDbEncryption/dafny/StructuredEncryption/src/AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations.dfy @@ -114,20 +114,20 @@ module AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations refines Abst function method {:opaque} GetBinary(data : AuthList, path : Path): (result: Result) ensures result.Success? ==> exists x :: x in data && x.key == path { - var data := FindAuth(data, path); + var pos := FindAuth(data, path); - if data.None? then + if pos.None? then Failure(E("The field name " + Paths.PathToString(path) + " is required.")) - else if data.value.data.typeId != BYTES_TYPE_ID then + else if data[pos.value].data.typeId != BYTES_TYPE_ID then Failure(E(Paths.PathToString(path) + " must be a binary Terminal.")) - else if data.value.action != DO_NOT_SIGN then + else if data[pos.value].action != DO_NOT_SIGN then Failure(E(Paths.PathToString(path) + " must be DO_NOT_SIGN.")) else - Success(data.value.data) + Success(data[pos.value].data) } // Return the sum of the sizes of the given fields - function method {:opaque} SumValueSize(fields : CanonCryptoList) + function {:opaque} SumValueSize(fields : CanonCryptoList) : nat { if |fields| == 0 then @@ -136,6 +136,17 @@ module AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations refines Abst |fields[0].data.value| + SumValueSize(fields[1..]) else SumValueSize(fields[1..]) + } by method { + reveal SumValueSize(); + var sum : nat := 0; + for i := |fields| downto 0 + invariant sum == SumValueSize(fields[i..]) + { + if fields[i].action == ENCRYPT_AND_SIGN { + sum := |fields[i].data.value| + sum; + } + } + return sum; } function method {:opaque} GetAlgorithmSuiteId(alg : Option) @@ -258,28 +269,66 @@ module AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations refines Abst output := GetV2EncryptionContext2(contextAttrs); } - function method {:opaque} Find(haystack : CryptoList, needle : Path) : Result + function {:opaque} Find(haystack : CryptoList, needle : Path, start : nat := 0) : (res : Result) + requires start <= |haystack| + requires forall i | 0 <= i < start :: haystack[i].key != needle + ensures (exists x <- haystack :: x.key == needle) <==> res.Success? + ensures (forall x <- haystack :: x.key != needle) <==> res.Failure? + ensures (forall x <- haystack :: x.key != needle) <==> res == Failure(E("Not Found")) + ensures res.Success? ==> + && 0 <= res.value < |haystack| + && haystack[res.value].key == needle + && (forall i | 0 <= i < res.value :: haystack[i].key != needle) + decreases |haystack| - start { - if |haystack| == 0 then + if |haystack| == start then Failure(E("Not Found")) - else if haystack[0].key == needle - then Success(haystack[0]) + else if haystack[start].key == needle then + Success(start) else - Find(haystack[1..], needle) + Find(haystack, needle, start + 1) + } + by method { + for i := 0 to |haystack| + invariant forall x <- haystack[..i] :: x.key != needle + { + if haystack[i].key == needle { + return Success(i); + } + } + return Failure(E("Not Found")); } - function method {:opaque} FindAuth(haystack : AuthList, needle : Path) : (result : Option) - ensures result.Some? ==> exists x :: x in haystack && x.key == needle + function {:opaque} FindAuth(haystack : AuthList, needle : Path, start : nat := 0) : (res : Option) + requires start <= |haystack| + requires forall i | 0 <= i < start :: haystack[i].key != needle + ensures (exists x <- haystack :: x.key == needle) <==> res.Some? + ensures (forall x <- haystack :: x.key != needle) <==> res == None + ensures res.Some? ==> + && 0 <= res.value < |haystack| + && haystack[res.value].key == needle + && (forall i | 0 <= i < res.value :: haystack[i].key != needle) + decreases |haystack| - start { - if |haystack| == 0 then + if |haystack| == start then None - else if haystack[0].key == needle - then Some(haystack[0]) + else if haystack[start].key == needle then + Some(start) else - FindAuth(haystack[1..], needle) + FindAuth(haystack, needle, start + 1) + } + by method { + for i := 0 to |haystack| + invariant forall x <- haystack[..i] :: x.key != needle + { + if haystack[i].key == needle { + return Some(i); + } + } + return None; } - function method {:opaque} CountEncrypted(list : CanonCryptoList) : nat + function {:opaque} CountEncrypted(list : CanonCryptoList) : nat { if |list| == 0 then 0 @@ -287,6 +336,17 @@ module AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations refines Abst 1 + CountEncrypted(list[1..]) else CountEncrypted(list[1..]) + } by method { + reveal CountEncrypted(); + var result : nat := 0; + for i := |list| downto 0 + invariant result == CountEncrypted(list[i..]) + { + if list[i].action == ENCRYPT_AND_SIGN { + result := result + 1; + } + } + return result; } method {:vcs_split_on_every_assert} GetV2EncryptionContext2(fields : CryptoList) @@ -344,7 +404,7 @@ module AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations refines Abst var fieldUtf8 := keys[i]; var fieldStr := fieldMap[fieldUtf8]; var item :- Find(fields, fieldMap[fieldUtf8]); - var attr : StructuredDataTerminal := item.data; + var attr : StructuredDataTerminal := fields[item].data; var attrStr : ValidUTF8Bytes; var legendChar : char; if attr.typeId == NULL { @@ -384,19 +444,18 @@ module AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations refines Abst keys : seq, plaintextStructure: StructuredDataMap, cryptoSchema: CryptoSchemaMap, - ghost origKeys : seq := keys, + pos : nat := 0, acc : CryptoList := [] ) : (ret : Result) + requires 0 <= pos <= |keys| + requires |acc| == pos requires forall k <- keys :: k in plaintextStructure requires forall k <- keys :: k in cryptoSchema requires forall k <- acc :: |k.key| == 1 - requires CryptoListHasNoDuplicates(acc) - requires |acc| + |keys| == |origKeys| - requires keys == origKeys[|acc|..] - requires forall i | 0 <= i < |acc| :: acc[i].key == Paths.StringToUniPath(origKeys[i]) + requires forall i | 0 <= i < |acc| :: acc[i].key == Paths.StringToUniPath(keys[i]) requires Seq.HasNoDuplicates(keys) - requires Seq.HasNoDuplicates(origKeys) + decreases |keys| - pos ensures ret.Success? ==> && (forall k <- ret.value :: |k.key| == 1) @@ -404,14 +463,14 @@ module AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations refines Abst { reveal Seq.HasNoDuplicates(); Paths.StringToUniPathUnique(); - if |keys| == 0 then + if |keys| == pos then Success(acc) else - var key := keys[0]; + var key := keys[pos]; var path := Paths.StringToUniPath(key); var item := CryptoItem(key := path, data := plaintextStructure[key], action := cryptoSchema[key]); var newAcc := acc + [item]; - BuildCryptoMap2(keys[1..], plaintextStructure, cryptoSchema, origKeys, newAcc) + BuildCryptoMap2(keys, plaintextStructure, cryptoSchema, pos+1, newAcc) } function method BuildCryptoMap(plaintextStructure: StructuredDataMap, cryptoSchema: CryptoSchemaMap) : @@ -429,33 +488,33 @@ module AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations refines Abst keys : seq, plaintextStructure: StructuredDataMap, authSchema: AuthenticateSchemaMap, - ghost origKeys : seq := keys, + pos : nat := 0, acc : AuthList := [] ) : (ret : Result) + requires 0 <= pos <= |keys| + requires |acc| == pos requires forall k <- keys :: k in plaintextStructure requires forall k <- keys :: k in authSchema requires forall k <- acc :: |k.key| == 1 + requires forall i | 0 <= i < |acc| :: acc[i].key == Paths.StringToUniPath(keys[i]) requires AuthListHasNoDuplicates(acc) - requires |acc| + |keys| == |origKeys| - requires keys == origKeys[|acc|..] - requires forall i | 0 <= i < |acc| :: acc[i].key == Paths.StringToUniPath(origKeys[i]) requires Seq.HasNoDuplicates(keys) - requires Seq.HasNoDuplicates(origKeys) + decreases |keys| - pos ensures ret.Success? ==> && (forall k <- ret.value :: |k.key| == 1) && AuthListHasNoDuplicates(ret.value) { reveal Seq.HasNoDuplicates(); - if |keys| == 0 then + if |keys| == pos then Success(acc) else - var key := keys[0]; + var key := keys[pos]; var path := Paths.StringToUniPath(key); var item := AuthItem(key := path, data := plaintextStructure[key], action := authSchema[key]); var newAcc := acc + [item]; - BuildAuthMap2(keys[1..], plaintextStructure, authSchema, origKeys, newAcc) + BuildAuthMap2(keys, plaintextStructure, authSchema, pos+1, newAcc) } function method BuildAuthMap(plaintextStructure: StructuredDataMap, authSchema: AuthenticateSchemaMap) @@ -469,24 +528,28 @@ module AwsCryptographyDbEncryptionSdkStructuredEncryptionOperations refines Abst BuildAuthMap2(keys, plaintextStructure, authSchema) } - function method UnBuildCryptoMap(list : CryptoList, dataSoFar : StructuredDataMap := map[], actionsSoFar : CryptoSchemaMap := map[]) : + function method {:tailrecursion} UnBuildCryptoMap(list : CryptoList, pos : nat := 0, dataSoFar : StructuredDataMap := map[], actionsSoFar : CryptoSchemaMap := map[]) : (res : Result<(StructuredDataMap, CryptoSchemaMap), Error>) + requires 0 <= pos <= |list| + requires |dataSoFar| == pos + requires |actionsSoFar| <= pos requires forall k <- actionsSoFar :: k in dataSoFar requires (forall v :: v in actionsSoFar.Values ==> IsAuthAttr(v)) requires forall k <- list :: |k.key| == 1 + decreases |list| - pos ensures res.Success? ==> && (forall k <- res.value.1 :: k in res.value.0) && (forall v :: v in res.value.1.Values ==> IsAuthAttr(v)) { - if |list| == 0 then + if |list| == pos then Success((dataSoFar, actionsSoFar)) else - var key :- Paths.UniPathToString(list[0].key); + var key :- Paths.UniPathToString(list[pos].key); :- Need(key !in dataSoFar, E("Duplicate Key " + key)); - if IsAuthAttr(list[0].action) then - UnBuildCryptoMap(list[1..], dataSoFar[key := list[0].data], actionsSoFar[key := list[0].action]) + if IsAuthAttr(list[pos].action) then + UnBuildCryptoMap(list, pos+1, dataSoFar[key := list[pos].data], actionsSoFar[key := list[pos].action]) else - UnBuildCryptoMap(list[1..], dataSoFar[key := list[0].data], actionsSoFar) + UnBuildCryptoMap(list, pos+1, dataSoFar[key := list[pos].data], actionsSoFar) } lemma EncryptStructureOutputHasSinglePaths(origData : CryptoList, finalData : CryptoList) diff --git a/DynamoDbEncryption/dafny/StructuredEncryption/src/Canonize.dfy b/DynamoDbEncryption/dafny/StructuredEncryption/src/Canonize.dfy index 306f63521..e8b0eb240 100644 --- a/DynamoDbEncryption/dafny/StructuredEncryption/src/Canonize.dfy +++ b/DynamoDbEncryption/dafny/StructuredEncryption/src/Canonize.dfy @@ -139,25 +139,28 @@ module {:options "/functionSyntax:4" } Canonize { opaque function {:tailrecursion} ResolveLegend( fields : CanonAuthList, legend : Header.Legend, - ghost origFields : CanonAuthList, - acc : CanonCryptoList + pos : nat := 0, + legendPos : nat := 0, + acc : CanonCryptoList := [] ) : (ret : Result) - requires |fields| + |acc| == |origFields| - requires forall i | 0 <= i < |acc| :: Same(origFields[i], acc[i]) - requires forall i | |acc| <= i < |origFields| :: origFields[i] == fields[i-|acc|] + requires 0 <= pos <= |fields| + requires 0 <= legendPos <= |legend| + requires pos == |acc| + requires forall i | 0 <= i < pos :: Same(fields[i], acc[i]) ensures ret.Success? ==> - && |origFields| == |ret.value| - && forall i | 0 <= i < |origFields| :: Same(origFields[i], ret.value[i]) + && |fields| == |ret.value| + && forall i | 0 <= i < |fields| :: Same(fields[i], ret.value[i]) + decreases |fields| - pos { - if |fields| == 0 then - :- Need(|legend| == 0, E("Schema changed : something that was signed is now unsigned.")); + if |fields| == pos then + :- Need(|legend| == legendPos, E("Schema changed : something that was signed is now unsigned.")); Success(acc) - else if fields[0].action == DO_NOT_SIGN then - ResolveLegend(fields[1..], legend, origFields, acc + [MakeCryptoItem(fields[0], DO_NOTHING)]) + else if fields[pos].action == DO_NOT_SIGN then + ResolveLegend(fields, legend, pos+1, legendPos, acc + [MakeCryptoItem(fields[pos], DO_NOTHING)]) else - :- Need(0 < |legend|, E("Schema changed : something that was unsigned is now signed.")); - ResolveLegend(fields[1..], legend[1..], origFields, acc + [MakeCryptoItem(fields[0], LegendToAction(legend[0]))]) + :- Need(legendPos < |legend|, E("Schema changed : something that was unsigned is now signed.")); + ResolveLegend(fields, legend, pos+1, legendPos+1, acc + [MakeCryptoItem(fields[pos], LegendToAction(legend[legendPos]))]) } opaque function ForEncrypt(tableName : GoodString, data : CryptoList) @@ -562,7 +565,7 @@ module {:options "/functionSyntax:4" } Canonize { reveal CanonAuthMatchesAuthList(); reveal CanonCryptoMatchesAuthList(); reveal IsCryptoSorted(); - var canonResolved :- ResolveLegend(canonSorted, legend, canonSorted, []); + var canonResolved :- ResolveLegend(canonSorted, legend); assert (forall k <- data :: AuthExistsInCanonCrypto(k, canonResolved)) by { reveal AuthExistsInCanonCrypto(); @@ -622,17 +625,19 @@ module {:options "/functionSyntax:4" } Canonize { && x.action == y.action } - function UnCanon(input : CanonCryptoList) : (ret : CryptoList) - ensures - && |ret| == |input| - && (forall i | 0 <= i < |input| :: SameUnCanon(input[i], ret[i])) + function {:tailrecursion} UnCanon(input : CanonCryptoList, pos : nat := 0, acc : CryptoList := []) : (ret : CryptoList) + requires 0 <= pos <= |input| + requires pos == |acc| + requires forall i | 0 <= i < |acc| :: SameUnCanon(input[i], acc[i]) + ensures |ret| == |input| + ensures forall i | 0 <= i < |input| :: SameUnCanon(input[i], ret[i]) + decreases |input| - pos { - if |input| == 0 then - [] + if |input| == pos then + acc else - var newItem := CryptoItem(key := input[0].origKey, data := input[0].data, action := input[0].action); - assert SameUnCanon(input[0], newItem); - [newItem] + UnCanon(input[1..]) + var newItem := CryptoItem(key := input[pos].origKey, data := input[pos].data, action := input[pos].action); + UnCanon(input, pos+1, acc + [newItem]) } lemma Update2ImpliesUpdate3() @@ -795,17 +800,34 @@ module {:options "/functionSyntax:4" } Canonize { && CryptoListHasNoDuplicates(finalData) } - opaque function RemoveHeaderPaths(xs : CryptoList) : (ret : CryptoList) + opaque function {:tailrecursion} RemoveHeaderPaths(xs : CryptoList, pos : nat := 0, acc : CryptoList := []) : (ret : CryptoList) requires CryptoListHasNoDuplicates(xs) + requires 0 <= pos <= |xs| + requires !exists x :: x in acc && x.key in [HeaderPath, FooterPath] + requires !exists x :: x in acc && x.key == HeaderPath + requires !exists x :: x in acc && x.key == FooterPath + requires forall x <- acc :: x in xs[..pos] + requires forall i | 0 <= i < pos :: (xs[i].key in [HeaderPath, FooterPath] || xs[i] in acc) + // should file a ticket, because this is FALSE but verifies + // requires forall x <- xs :: (x.key in [HeaderPath, FooterPath] || x in acc) + requires CryptoListHasNoDuplicates(acc) + ensures !exists x :: x in ret && x.key in [HeaderPath, FooterPath] ensures !exists x :: x in ret && x.key == HeaderPath ensures !exists x :: x in ret && x.key == FooterPath ensures forall x <- ret :: x in xs - ensures forall x <- xs :: x.key in [HeaderPath, FooterPath] || x in ret + ensures forall x <- xs :: (x.key in [HeaderPath, FooterPath] || x in ret) ensures CryptoListHasNoDuplicates(ret) + + decreases |xs| - pos { - if |xs| == 0 then [] - else (if xs[0].key in [HeaderPath, FooterPath] then [] else [xs[0]]) + RemoveHeaderPaths(xs[1..]) + if |xs| == pos then + acc + else if xs[pos].key in [HeaderPath, FooterPath] then + RemoveHeaderPaths(xs, pos+1, acc) + else + assert xs[pos] !in acc; + RemoveHeaderPaths(xs, pos+1, acc + [xs[pos]]) } opaque function RemoveHeaders(input : CryptoList, ghost origData : AuthList) diff --git a/DynamoDbEncryption/dafny/StructuredEncryption/src/Crypt.dfy b/DynamoDbEncryption/dafny/StructuredEncryption/src/Crypt.dfy index f8c178764..54609b0ec 100644 --- a/DynamoDbEncryption/dafny/StructuredEncryption/src/Crypt.dfy +++ b/DynamoDbEncryption/dafny/StructuredEncryption/src/Crypt.dfy @@ -49,6 +49,11 @@ module StructuredEncryptionCrypt { keyR.MapFailure(e => AwsCryptographyPrimitives(e)) } + const AwsDbeField : UTF8.ValidUTF8Bytes := + var s := [0x41, 0x77, 0x73, 0x44, 0x62, 0x65, 0x46, 0x69, 0x65, 0x6c, 0x64]; + assert s == UTF8.EncodeAscii("AwsDbeField"); + s + function method FieldKeyNonce(offset : uint32) : (ret : Bytes) ensures |ret| == 16 // NOT NonceSize @@ -61,17 +66,24 @@ module StructuredEncryptionCrypt { //# | 0x2c | 1 | 44, the length of the eventual FieldKey | //# | offset | 4 | 32 bit integer representation of offset | ensures ret == - UTF8.EncodeAscii("AwsDbeField") + AwsDbeField + [(KeySize+NonceSize) as uint8] + UInt32ToSeq(offset) { - UTF8.EncodeAscii("AwsDbeField") + AwsDbeField + [(KeySize+NonceSize) as uint8] // length + UInt32ToSeq(offset) } - const LABEL_COMMITMENT_KEY := UTF8.EncodeAscii("AWS_DBE_COMMIT_KEY") - const LABEL_ENCRYPTION_KEY := UTF8.EncodeAscii("AWS_DBE_DERIVE_KEY") + const LABEL_COMMITMENT_KEY : UTF8.ValidUTF8Bytes := + var s := [0x41, 0x57, 0x53, 0x5f, 0x44, 0x42, 0x45, 0x5f, 0x43, 0x4f, 0x4d, 0x4d, 0x49, 0x54, 0x5f, 0x4b, 0x45, 0x59]; + assert s == UTF8.EncodeAscii("AWS_DBE_COMMIT_KEY"); + s + + const LABEL_ENCRYPTION_KEY : UTF8.ValidUTF8Bytes := + var s := [0x41, 0x57, 0x53, 0x5f, 0x44, 0x42, 0x45, 0x5f, 0x44, 0x45, 0x52, 0x49, 0x56, 0x45, 0x5f, 0x4b, 0x45, 0x59]; + assert s == UTF8.EncodeAscii("AWS_DBE_DERIVE_KEY"); + s // suitable for header field method GetCommitKey( diff --git a/DynamoDbEncryption/dafny/StructuredEncryption/src/Footer.dfy b/DynamoDbEncryption/dafny/StructuredEncryption/src/Footer.dfy index 6c7f537ee..e994c382e 100644 --- a/DynamoDbEncryption/dafny/StructuredEncryption/src/Footer.dfy +++ b/DynamoDbEncryption/dafny/StructuredEncryption/src/Footer.dfy @@ -140,16 +140,28 @@ module StructuredEncryptionFooter { } } + const ENCRYPTED : UTF8.ValidUTF8Bytes := + var s := [0x45, 0x4e, 0x43, 0x52, 0x59, 0x50, 0x54, 0x45, 0x44]; + assert s == UTF8.EncodeAscii("ENCRYPTED"); + s + + const PLAINTEXT : UTF8.ValidUTF8Bytes := + var s := [0x50, 0x4c, 0x41, 0x49, 0x4e, 0x54, 0x45, 0x58, 0x54]; + assert s == UTF8.EncodeAscii("PLAINTEXT"); + s + + + // Given a StructuredDataTerminal, return the canonical value for the type, for use in the footer checksum calculations function method GetCanonicalType(value : StructuredDataTerminal, isEncrypted : bool) : Result { if isEncrypted then :- Need(2 <= |value.value| < UINT64_LIMIT, E("Bad length.")); - Success(UInt64ToSeq((|value.value| - 2) as uint64) + UTF8.EncodeAscii("ENCRYPTED")) + Success(UInt64ToSeq((|value.value| - 2) as uint64) + ENCRYPTED) else :- Need(|value.value| < UINT64_LIMIT, E("Bad length.")); - Success(UInt64ToSeq((|value.value|) as uint64) + UTF8.EncodeAscii("PLAINTEXT") + value.typeId) + Success(UInt64ToSeq((|value.value|) as uint64) + PLAINTEXT + value.typeId) } function method GetCanonicalEncryptedField(fieldName : CanonicalPath, value : StructuredDataTerminal) @@ -169,14 +181,14 @@ module StructuredEncryptionFooter { && ret.value == fieldName + UInt64ToSeq((|value.value| - 2) as uint64) - + UTF8.EncodeAscii("ENCRYPTED") + + ENCRYPTED + value.value // this is 2 bytes of unencrypted type, followed by encrypted value { :- Need(2 <= |value.value| < UINT64_LIMIT, E("Bad length.")); Success( fieldName + UInt64ToSeq((|value.value| - 2) as uint64) - + UTF8.EncodeAscii("ENCRYPTED") + + ENCRYPTED + value.value ) } @@ -198,7 +210,7 @@ module StructuredEncryptionFooter { && ret.value == fieldName + UInt64ToSeq((|value.value|) as uint64) - + UTF8.EncodeAscii("PLAINTEXT") + + PLAINTEXT + value.typeId + value.value { @@ -206,7 +218,7 @@ module StructuredEncryptionFooter { Success( fieldName + UInt64ToSeq((|value.value|) as uint64) - + UTF8.EncodeAscii("PLAINTEXT") + + PLAINTEXT + value.typeId + value.value ) @@ -222,20 +234,46 @@ module StructuredEncryptionFooter { GetCanonicalPlaintextField(data.key, data.data) } - function method CanonContent ( - data : CanonCryptoList, // remaining fields to be canonized - canonized : Bytes := [] // output - ) : Result + function CanonContent (data : CanonCryptoList) + : Result { if |data| == 0 then - Success(canonized) + Success([]) else if data[0].action == DO_NOTHING then - CanonContent(data[1..], canonized) + CanonContent(data[1..]) else + var tail :- CanonContent(data[1..]); var newPart :- GetCanonicalItem(data[0]); - CanonContent(data[1..], canonized + newPart) + Success(newPart + tail) + } by method { + var i: nat := |data|; + var vectors : Bytes := []; + + while i != 0 + decreases i + invariant Success(vectors) == CanonContent(data[i..]) + { + i := i - 1; + if data[i].action != DO_NOTHING { + var test := GetCanonicalItem(data[i]); + if test.Failure? { + ghost var j := i; + while j != 0 + decreases j + invariant Failure(test.error) == CanonContent(data[j..]) + { + j := j - 1; + } + return Failure(test.error); + } + vectors := test.value + vectors; + } + } + + return Success(vectors); } + function method CanonRecord ( data : CanonCryptoList, header : Bytes, @@ -365,13 +403,21 @@ module StructuredEncryptionFooter { } } - function method SerializeTags(tags : seq) + function SerializeTags(tags : seq) : Bytes { if |tags| == 0 then [] else tags[0] + SerializeTags(tags[1..]) + } by method { + var result : Bytes := []; + for i := |tags| downto 0 + invariant result == SerializeTags(tags[i..]) + { + result := tags[i] + result; + } + return result; } function method SerializeSig(sig : Option) diff --git a/DynamoDbEncryption/dafny/StructuredEncryption/src/Header.dfy b/DynamoDbEncryption/dafny/StructuredEncryption/src/Header.dfy index 66169eea6..abd39ddd3 100644 --- a/DynamoDbEncryption/dafny/StructuredEncryption/src/Header.dfy +++ b/DynamoDbEncryption/dafny/StructuredEncryption/src/Header.dfy @@ -387,18 +387,21 @@ module StructuredEncryptionHeader { // Create a Legend for the given attrs of the Schema function method {:tailrecursion} MakeLegend2( data : CanonCryptoList, + pos : nat := 0, serialized : Legend := EmptyLegend ) : (ret : Result) + requires 0 <= pos <= |data| + decreases |data| - pos { - if |data| == 0 then + if |data| == pos then Success(serialized) - else if IsAuthAttr(data[0].action) then + else if IsAuthAttr(data[pos].action) then :- Need((|serialized| + 1) < UINT16_LIMIT, E("Legend Too Long.")); - var legendChar := GetActionLegend(data[0].action); - MakeLegend2(data[1..], serialized + [legendChar]) + var legendChar := GetActionLegend(data[pos].action); + MakeLegend2(data, pos+1, serialized + [legendChar]) else - MakeLegend2(data[1..], serialized) + MakeLegend2(data, pos+1, serialized) } // CryptoAction to bytes. One byte for signed, zero bytes for unsigned @@ -432,15 +435,17 @@ module StructuredEncryptionHeader { } // How many elements of Schema are included in the signature? - function method CountAuthAttrs(data : CanonCryptoList) + function method CountAuthAttrs(data : CanonCryptoList, pos : nat := 0, acc : nat := 0) : nat + requires 0 <= pos <= |data| + decreases |data| - pos { - if |data| == 0 then - 0 - else if IsAuthAttr(data[0].action) then - 1 + CountAuthAttrs(data[1..]) + if |data| == pos then + acc + else if IsAuthAttr(data[pos].action) then + CountAuthAttrs(data, pos+1, acc+1) else - CountAuthAttrs(data[1..]) + CountAuthAttrs(data, pos+1, acc) } // Legend to Bytes @@ -470,11 +475,12 @@ module StructuredEncryptionHeader { && ret.value.0 == data[2..ret.value.1] { :- Need(2 <= |data|, E("Unexpected end of header data.")); - var len := SeqToUInt16(data[0..2]); + var len := SeqPosToUInt16(data, 0); var size := len as nat + 2; :- Need(size <= |data|, E("Unexpected end of header data.")); - :- Need(forall x <- data[2..size] :: ValidLegendByte(x), E("Invalid byte in stored legend")); - Success((data[2..size], size)) + var legend := data[2..size]; + :- Need(forall x <- legend :: ValidLegendByte(x), E("Invalid byte in stored legend")); + Success((legend, size)) } // Bytes to Encryption Context @@ -484,77 +490,77 @@ module StructuredEncryptionHeader { && ret.value.1 <= |data| ensures ( && 2 <= |data| - && GetContext2(SeqToUInt16(data[0..2]) as nat, data, data[2..], (map[], 2)).Success? + && GetContext2(SeqPosToUInt16(data, 0) as nat, data, (map[], 2)).Success? ) ==> ret.Success? { :- Need(2 <= |data|, E("Unexpected end of header data.")); - var count := SeqToUInt16(data[0..2]) as nat; - var context :- GetContext2(count, data, data[2..], (map[], 2)); + var count := SeqPosToUInt16(data, 0) as nat; + var context :- GetContext2(count, data, (map[], 2)); Success(context) } // Bytes to one Key Value pair - function method GetOneKVPair(data : Bytes) + function method GetOneKVPair(data : Bytes, pos : nat) : (ret : Result<(CMPUtf8Bytes, CMPUtf8Bytes, nat), Error>) ensures ret.Success? ==> - && ret.value.2 <= |data| - && SerializeOneKVPair(ret.value.0, ret.value.1) == data[..ret.value.2] + && ret.value.2 + pos <= |data| ensures ( - && 2 <= |data| - && var keyLen := SeqToUInt16(data[0..2]) as nat; - && keyLen + 4 <= |data| - && UTF8.ValidUTF8Seq(data[2..keyLen+2]) - && var valueLen := SeqToUInt16(data[keyLen+2..keyLen+4]) as nat; - && keyLen + valueLen + 4 <= |data| - && UTF8.ValidUTF8Seq(data[keyLen+4..keyLen + valueLen + 4]) - ) <==> ret.Success? && SerializeOneKVPair(ret.value.0, ret.value.1) == data[..ret.value.2] - { - :- Need(2 <= |data|, E("Unexpected end of header data.")); - var keyLen := SeqToUInt16(data[0..2]) as nat; - :- Need(keyLen + 4 <= |data|, E("Unexpected end of header data.")); - var key := data[2..keyLen+2]; + && 2 + pos <= |data| + && var keyLen := SeqPosToUInt16(data, pos) as nat; + && keyLen + 4 + pos <= |data| + && UTF8.ValidUTF8Seq(data[2+pos..keyLen+2+pos]) + && var valueLen := SeqPosToUInt16(data, keyLen+2+pos) as nat; + && keyLen + valueLen + 4 + pos <= |data| + && UTF8.ValidUTF8Seq(data[keyLen+4+pos..keyLen + valueLen + 4 + pos]) + ) <==> ret.Success? && SerializeOneKVPair(ret.value.0, ret.value.1) == data[pos..pos+ret.value.2] + { + :- Need(2 + pos <= |data|, E("Unexpected end of header data.")); + var keyLen := SeqPosToUInt16(data, pos) as nat; + :- Need(keyLen + 4 +pos <= |data|, E("Unexpected end of header data.")); + var key := data[2+pos..keyLen+2+pos]; :- Need(UTF8.ValidUTF8Seq(key), E("Invalid UTF8 found in header.")); - var valueLen := SeqToUInt16(data[keyLen+2..keyLen+4]) as nat; + var valueLen := SeqPosToUInt16(data, keyLen+2+pos) as nat; var kvLen := 2 + keyLen + 2 + valueLen; - :- Need(kvLen <= |data|, E("Unexpected end of header data.")); - var value := data[keyLen+4..kvLen]; + :- Need(kvLen + pos <= |data|, E("Unexpected end of header data.")); + var value := data[keyLen+4+pos..kvLen+pos]; :- Need(UTF8.ValidUTF8Seq(value), E("Invalid UTF8 found in header.")); Success((key, value, kvLen)) } - predicate method {:tailrecursion} BytesLess(a: Bytes, b : Bytes) { + predicate method {:tailrecursion} BytesLess(a: Bytes, b : Bytes, pos : nat := 0) + requires 0 <= pos <= |a| + requires 0 <= pos <= |b| + decreases |a| - pos + { if a == b then false - else if |a| == 0 then + else if |a| == pos then true - else if |b| == 0 then + else if |b| == pos then false - else if a[0] != b[0] then - a[0] < b[0] + else if a[pos] != b[pos] then + a[pos] < b[pos] else - BytesLess(a[1..], b[1..]) + BytesLess(a, b, pos+1) } // For "count" items, Deserialize key value pairs into an Encryption Context function method {:tailrecursion} GetContext2( count : nat, - origData : Bytes, data : Bytes, deserialized : (CMPEncryptionContext, nat), prevKey : CMPUtf8Bytes := []) : (ret : Result<(CMPEncryptionContext, nat), Error>) - requires deserialized.1 <= |origData| - requires deserialized.1 + |data| == |origData| - requires data == origData[deserialized.1..] + requires deserialized.1 <= |data| ensures ret.Success? ==> - && ret.value.1 <= |origData| - && (count > 0 ==> GetOneKVPair(data).Success?) + && ret.value.1 <= |data| + && (count > 0 ==> GetOneKVPair(data, deserialized.1).Success?) { if count == 0 then Success(deserialized) else :- Need(|deserialized.0| + 1 < UINT16_LIMIT, E("Too much context")); - var kv :- GetOneKVPair(data); + var kv :- GetOneKVPair(data, deserialized.1); //= specification/structured-encryption/header.md#key-value-pair-entries //# This sequence MUST NOT contain duplicate entries. // if the previous key is always less than the current key, there can be no duplicates @@ -563,7 +569,7 @@ module StructuredEncryptionHeader { //# These entries MUST have entries sorted, by key, //# in ascending order according to the UTF-8 encoded binary value. :- Need(BytesLess(prevKey, kv.0), E("Context keys out of order.")); - GetContext2(count-1, origData, data[2+|kv.0|+2+|kv.1|..], (deserialized.0[kv.0 := kv.1], deserialized.1 + kv.2), kv.0) + GetContext2(count-1, data, (deserialized.0[kv.0 := kv.1], deserialized.1 + kv.2), kv.0) } // Encryption Context to Bytes @@ -641,30 +647,30 @@ module StructuredEncryptionHeader { } // Bytes to Data Key - function method {:vcs_split_on_every_assert} GetOneDataKey(data : Bytes) + function method {:vcs_split_on_every_assert} GetOneDataKey(data : Bytes, pos : nat) : (ret : Result<(CMPEncryptedDataKey, nat), Error>) ensures ret.Success? ==> - && ret.value.1 <= |data| + && ret.value.1 + pos <= |data| && |SerializeOneDataKey(ret.value.0)| == ret.value.1 - && SerializeOneDataKey(ret.value.0) == data[0..ret.value.1] + && SerializeOneDataKey(ret.value.0) == data[pos..pos+ret.value.1] { - :- Need(2 < |data|, E("Unexpected end of header data.")); - var provIdSize := SeqToUInt16(data[0..2]) as nat; - :- Need(provIdSize + 2 < |data|, E("Unexpected end of header data.")); - var provId := data[2..2+provIdSize]; + :- Need(2 + pos < |data|, E("Unexpected end of header data.")); + var provIdSize := SeqPosToUInt16(data, pos) as nat; + :- Need(provIdSize + 2 + pos < |data|, E("Unexpected end of header data.")); + var provId := data[pos+2..pos+2+provIdSize]; :- Need(UTF8.ValidUTF8Seq(provId), E("Invalid UTF8 found in header.")); var part1Size := 2 + provIdSize; - :- Need(part1Size+2 <= |data|, E("Unexpected end of header data.")); - var provInfoSize := SeqToUInt16(data[part1Size..part1Size+2]) as nat; - :- Need(part1Size + provInfoSize + 2 < |data|, E("Unexpected end of header data.")); - var provInfo := data[part1Size+2..part1Size+2+provInfoSize]; + :- Need(part1Size+2 + pos <= |data|, E("Unexpected end of header data.")); + var provInfoSize := SeqPosToUInt16(data, pos+part1Size) as nat; + :- Need(part1Size + provInfoSize + 2 + pos < |data|, E("Unexpected end of header data.")); + var provInfo := data[pos+part1Size+2..pos+part1Size+2+provInfoSize]; var part2Size := part1Size + 2 + provInfoSize; - :- Need(part2Size+2 <= |data|, E("Unexpected end of header data.")); - var cipherSize := SeqToUInt16(data[part2Size..part2Size+2]) as nat; - :- Need(part2Size + cipherSize + 2 <= |data|, E("Unexpected end of header data.")); - var cipher := data[part2Size+2..part2Size+2+cipherSize]; + :- Need(part2Size+2+pos <= |data|, E("Unexpected end of header data.")); + var cipherSize := SeqPosToUInt16(data, pos+part2Size) as nat; + :- Need(part2Size + cipherSize + 2 + pos <= |data|, E("Unexpected end of header data.")); + var cipher := data[pos+part2Size+2..pos+part2Size+2+cipherSize]; var part3Size := part2Size + 2 + cipherSize; var edk := CMP.EncryptedDataKey(keyProviderId := provId, keyProviderInfo := provInfo, ciphertext := cipher); @@ -672,14 +678,21 @@ module StructuredEncryptionHeader { } // for items in "keys", turn Key Value Pairs into Bytes - function method {:tailrecursion} SerializeContext2(keys : seq, x : CMPEncryptionContext) + function method {:tailrecursion} SerializeContext2( + keys : seq, + x : CMPEncryptionContext, + pos : nat := 0, + acc : Bytes := [] + ) : (ret : Bytes) requires forall k <- keys :: k in x + requires 0 <= pos <= |keys| + decreases |keys| - pos { - if |keys| == 0 then - [] + if |keys| == pos then + acc else - SerializeOneKVPair(keys[0], x[keys[0]]) + SerializeContext2(keys[1..], x) + SerializeContext2(keys, x, pos+1, acc + SerializeOneKVPair(keys[pos], x[keys[pos]])) } // Data Key List to Bytes @@ -702,13 +715,19 @@ module StructuredEncryptionHeader { } // Data Keys to Bytes - function method {:tailrecursion} SerializeDataKeys2(x : CMPEncryptedDataKeyListEmptyOK) + function method {:tailrecursion} SerializeDataKeys2( + x : CMPEncryptedDataKeyListEmptyOK, + pos : nat := 0, + acc : Bytes := [] + ) : (ret : Bytes) + requires 0 <= pos <= |x| + decreases |x| - pos { - if |x| == 0 then - [] + if |x| == pos then + acc else - SerializeOneDataKey(x[0]) + SerializeDataKeys2(x[1..]) + SerializeDataKeys2(x, pos+1, acc + SerializeOneDataKey(x[pos])) } // Bytes to Data Key List @@ -719,11 +738,11 @@ module StructuredEncryptionHeader { && 1 <= |data| && 1 <= ret.value.1 && |ret.value.0| == data[0] as nat - && GetDataKeys2(|ret.value.0|, |ret.value.0|, data, data[1..], ([], 1)).Success? + && GetDataKeys2(|ret.value.0|, |ret.value.0|, data, ([], 1)).Success? { :- Need(1 <= |data|, E("Unexpected end of header data.")); var count := data[0] as nat; - var keys :- GetDataKeys2(count, count, data, data[1..], ([], 1)); + var keys :- GetDataKeys2(count, count, data, ([], 1)); if |keys.0| == 0 then Failure(E("At least one Data Key required")) else @@ -734,18 +753,15 @@ module StructuredEncryptionHeader { function method {:tailrecursion} GetDataKeys2( count : nat, origCount : nat, - origData : Bytes, data : Bytes, - deserialized - : (CMPEncryptedDataKeyListEmptyOK, nat)) + deserialized : (CMPEncryptedDataKeyListEmptyOK, nat)) : (ret : Result<(CMPEncryptedDataKeyListEmptyOK, nat), Error>) - requires deserialized.1 <= |origData| - requires deserialized.1 + |data| == |origData| + requires deserialized.1 <= |data| requires origCount == count + |deserialized.0| ensures ret.Success? ==> - && ret.value.1 <= |origData| + && ret.value.1 <= |data| && ret.value.1 >= deserialized.1 - && (count > 0 ==> GetOneDataKey(data).Success?) + && (count > 0 ==> GetOneDataKey(data, deserialized.1).Success?) && |ret.value.0| == origCount { if count == 0 then @@ -754,9 +770,9 @@ module StructuredEncryptionHeader { if |deserialized.0| >= 255 then Failure(E("Too Many Data Keys")) else - var edk :- GetOneDataKey(data); - assert SerializeOneDataKey(edk.0) == data[..edk.1]; - GetDataKeys2(count-1, origCount, origData, data[edk.1..], (deserialized.0 + [edk.0], deserialized.1+edk.1)) + var edk :- GetOneDataKey(data, deserialized.1); + assert SerializeOneDataKey(edk.0) == data[deserialized.1..deserialized.1+edk.1]; + GetDataKeys2(count-1, origCount, data, (deserialized.0 + [edk.0], deserialized.1+edk.1)) } // End code, begin proofs @@ -790,7 +806,7 @@ module StructuredEncryptionHeader { // SerializeOneKVPair ==> GetOneKVPair lemma SerializeOneKVPairRoundTrip(key : CMPUtf8Bytes, value : CMPUtf8Bytes) - ensures GetOneKVPair(SerializeOneKVPair(key, value)).Success? + ensures GetOneKVPair(SerializeOneKVPair(key, value), 0).Success? { var data := SerializeOneKVPair(key, value); assert 2 <= |data|; @@ -805,20 +821,20 @@ module StructuredEncryptionHeader { } // GetOneKVPair ==> SerializeOneKVPair - lemma GetOneKVPairRoundTrip(data : Bytes) - requires GetOneKVPair(data).Success? + lemma GetOneKVPairRoundTrip(data : Bytes, pos : nat) + requires GetOneKVPair(data, pos).Success? ensures - && var cont := GetOneKVPair(data).value; - && SerializeOneKVPair(cont.0, cont.1) == data[..cont.2] + && var cont := GetOneKVPair(data, pos).value; + && SerializeOneKVPair(cont.0, cont.1) == data[pos..cont.2+pos] {} // SerializeOneDataKey ==> GetOneDataKey lemma SerializeOneDataKeyRoundTrip(k : CMPEncryptedDataKey) ensures && var data := SerializeOneDataKey(k); - && GetOneDataKey(data).Success? - && GetOneDataKey(data).value.0 == k - && GetOneDataKey(data).value.1 == |data| + && GetOneDataKey(data, 0).Success? + && GetOneDataKey(data, 0).value.0 == k + && GetOneDataKey(data, 0).value.1 == |data| { var data := SerializeOneDataKey(k); assert 2 <= |data|; @@ -837,9 +853,9 @@ module StructuredEncryptionHeader { // GetOneDataKey ==> SerializeOneDataKey lemma GetOneDataKeyRoundTrip(data : Bytes) - requires GetOneDataKey(data).Success? + requires GetOneDataKey(data, 0).Success? ensures - && var cont := GetOneDataKey(data).value; + && var cont := GetOneDataKey(data, 0).value; && SerializeOneDataKey(cont.0) == data[..cont.1] {} @@ -847,10 +863,10 @@ module StructuredEncryptionHeader { // the unexamined bytes do not change the result lemma GetOneKVPairExt(x : Bytes, y : Bytes) requires x <= y - requires GetOneKVPair(x).Success? + requires GetOneKVPair(x, 0).Success? ensures - && GetOneKVPair(y).Success? - && GetOneKVPair(x).value == GetOneKVPair(y).value + && GetOneKVPair(y, 0).Success? + && GetOneKVPair(x, 0).value == GetOneKVPair(y, 0).value { assert 2 <= |y|; var keyLen := SeqToUInt16(y[0..2]) as nat; diff --git a/DynamoDbEncryption/dafny/StructuredEncryption/src/OptimizedMergeSort.dfy b/DynamoDbEncryption/dafny/StructuredEncryption/src/OptimizedMergeSort.dfy new file mode 100644 index 000000000..2ed337996 --- /dev/null +++ b/DynamoDbEncryption/dafny/StructuredEncryption/src/OptimizedMergeSort.dfy @@ -0,0 +1,727 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +include "../Model/AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes.dfy" + +module {:options "-functionSyntax:4"} OptimizedMergeSort { + + import Relations + import BoundedInts + import InternalModule = Seq.MergeSort + + predicate HasUint64Len(s: seq) { + |s| < BoundedInts.TWO_TO_THE_64 + } + + // The MergeSortBy function implemented as implemented + // does not compile to an optimal implementation + // in any of the Dafny target languages. + // This implementation aims to be significantly more optimal. + // First, it minimizes copies. + // It does this by making 2 arrays of the original sequence + // and then using these 2 as left and right alternatively. + // This can be audited by verifying + // that the arrays are only sliced into a seq in `MergeSortNat`. + // All other slicing is done in ghost state. + // Second, is has a bounded number implementation + // that avoids using `nat`. + + function {:isolate_assertions} MergeSort(s: seq, lessThanOrEq: (T, T) -> bool): (result :seq) + requires Relations.TotalOrdering(lessThanOrEq) + requires HasUint64Len(s) + { + InternalModule.MergeSortBy(s, lessThanOrEq) + } + by method { + if |s| <= 1 { + return s; + } else { + + // The slice x[1..], y[1..] are un-optimized operations in Dafny. + // This means that their usage will result in a lot of data copying. + // Additional, it is very likely that these size of these sequences + // will be less than uint64. + // So writing an optimized version that only works on bounded types + // should further optimized this hot code. + + var left := new T[|s|](i requires 0 <= i < |s| => s[i]); + var right := new T[|s|](i requires 0 <= i < |s| => s[i]); + var lo, hi := 0, right.Length; + + label BEFORE_WORK: + + var boundedLo: BoundedInts.uint64, boundedHi: BoundedInts.uint64 := 0, right.Length as BoundedInts.uint64; + ghost var _ := MergeSortMethod(left, right, lessThanOrEq, boundedLo, boundedHi, Right); + + result := right[..]; + + ghost var other := InternalModule.MergeSortBy(s, lessThanOrEq); + + assert Relations.SortedBy(right[..], lessThanOrEq) by { + assert right[..] == right[lo..hi]; + } + assert multiset(right[..]) == multiset(other) by { + calc { + multiset(right[..]); + == {assert right[..] == right[lo..hi];} + multiset(right[lo..hi]); + == + multiset(old@BEFORE_WORK(left[lo..hi])); + == {assert old@BEFORE_WORK(left[lo..hi]) == s;} + multiset(s); + == + multiset(other); + } + } + + // Implementing a by method can be complicated. + // Because methods can have non-determinism, + // where functions can not. + // This means that Dafny normally wants to know + // that the method and function maintain equality at every step. + // But this is hard for this kind of optimized sorting. + // Because what is the functional state at every step + // and how does it correspond the state of 2 optimized arrays? + // This lemma works around this + // by proving that the outcomes are always deterministic and the same. + // It does this by proving that given a total ordering, + // there is one and only one way to sort a given sequence. + TotalOrderingImpliesSortingIsUnique(right[..], other, lessThanOrEq); + } + } + + // This is included as sugar in case you don't want to ensure your seq HasUint64Len. + function {:isolate_assertions} MergeSortNat(s: seq, lessThanOrEq: (T, T) -> bool): (result :seq) + requires Relations.TotalOrdering(lessThanOrEq) + { + InternalModule.MergeSortBy(s, lessThanOrEq) + } + by method { + if |s| <= 1 { + return s; + } else { + + // The slice x[1..], y[1..] are un-optimized operations in Dafny. + // This means that their usage will result in a lot of data copying. + // Additional, it is very likely that these size of these sequences + // will be less than uint64. + // So writing an optimized version that only works on bounded types + // should further optimized this hot code. + + var left := new T[|s|](i requires 0 <= i < |s| => s[i]); + var right := new T[|s|](i requires 0 <= i < |s| => s[i]); + var lo, hi := 0, right.Length; + + label BEFORE_WORK: + + if HasUint64Len(s) { + var boundedLo: BoundedInts.uint64, boundedHi: BoundedInts.uint64 := 0, right.Length as BoundedInts.uint64; + ghost var _ := MergeSortMethod(left, right, lessThanOrEq, boundedLo, boundedHi, Right); + + result := right[..]; + } else { + // Fallback to `nat` or BigInt. + // This is a little silly, but this ensures + // that the behavior for very large seq will be the same. + // Though it is likely if any such seq existed in the real world, + // the performance improvement here would still not be enough to complete the sort... + ghost var _ := NatMergeSortMethod(left, right, lessThanOrEq, lo, hi, Right); + + result := right[..]; + } + + ghost var other := InternalModule.MergeSortBy(s, lessThanOrEq); + + assert Relations.SortedBy(right[..], lessThanOrEq) by { + assert right[..] == right[lo..hi]; + } + assert multiset(right[..]) == multiset(other) by { + calc { + multiset(right[..]); + == {assert right[..] == right[lo..hi];} + multiset(right[lo..hi]); + == + multiset(old@BEFORE_WORK(left[lo..hi])); + == {assert old@BEFORE_WORK(left[lo..hi]) == s;} + multiset(s); + == + multiset(other); + } + } + + // Implementing a by method can be complicated. + // Because methods can have non-determinism, + // where functions can not. + // This means that Dafny normally wants to know + // that the method and function maintain equality at every step. + // But this is hard for this kind of optimized sorting. + // Because what is the functional state at every step + // and how does it correspond the state of 2 optimized arrays? + // This lemma works around this + // by proving that the outcomes are always deterministic and the same. + // It does this by proving that given a total ordering, + // there is one and only one way to sort a given sequence. + TotalOrderingImpliesSortingIsUnique(right[..], other, lessThanOrEq); + } + } + + datatype PlaceResults = Left | Right | Either + type ResultPlacement = r: PlaceResults | !r.Either? witness * + + // These are bounded implementations of merge sort. + // This further speeds things up + // because math with bounded variables + // is significantly faster that math with big numbers. + + method {:isolate_assertions} MergeSortMethod( + left: array, + right: array, + lessThanOrEq: (T, T) -> bool, + lo: BoundedInts.uint64, + hi: BoundedInts.uint64, + where: PlaceResults + ) + returns (resultPlacement: ResultPlacement) + requires Relations.TotalOrdering(lessThanOrEq) + requires left.Length < BoundedInts.TWO_TO_THE_64 + requires right.Length < BoundedInts.TWO_TO_THE_64 + requires lo < hi <= left.Length as BoundedInts.uint64 + requires hi <= right.Length as BoundedInts.uint64 + requires left != right + // reads left, right + modifies left, right + ensures !where.Either? ==> where == resultPlacement + + // We do not modify anything before lo + ensures left[..lo] == old(left[..lo]) && right[..lo] == old(right[..lo]) + // We do not modify anything above hi + ensures left[hi..] == old(left[hi..]) && right[hi..] == old(right[hi..]) + + ensures multiset(left[lo..hi]) == multiset(old(left[lo..hi])) + ensures resultPlacement.Left? ==> Relations.SortedBy(left[lo..hi], lessThanOrEq) + ensures resultPlacement.Right? ==> Relations.SortedBy(right[lo..hi], lessThanOrEq) + ensures resultPlacement.Right? ==> multiset(right[lo..hi]) == multiset(old(left[lo..hi])) + + decreases hi - lo + { + if hi - lo == 1 { + if where == Right { + right[lo] := left[lo]; + return Right; + } else { + return Left; + } + } + + ghost var beforeWork := multiset(left[lo..hi]); + var mid := ((hi - lo)/2) + lo; + var placement? := MergeSortMethod(left, right, lessThanOrEq, lo, mid, Either); + assert left[mid..hi] == old(left[mid..hi]); + ghost var placement2? := MergeSortMethod(left, right, lessThanOrEq, mid, hi, placement?); + assert placement2? == placement?; + + ghost var preMergeResult := if placement?.Left? then left else right; + calc { + multiset(preMergeResult[lo..hi]); + == { LemmaSplitAt(preMergeResult[..], lo as nat, mid as nat, hi as nat); } + multiset(preMergeResult[lo..mid] + preMergeResult[mid..hi]); + == + multiset(old(left[lo..mid]) + old(left[mid..hi])); + == { LemmaSplitAt(old(left[..]), lo as nat, mid as nat, hi as nat); } + beforeWork; + } + + ghost var mergedResult; + if placement?.Left? { + MergeIntoRight(left := left, right := right, lessThanOrEq := lessThanOrEq, lo := lo, mid := mid, hi := hi); + resultPlacement, mergedResult := Right, right[lo..hi]; + + assert left[hi..] == old(left[hi..]); + assert right[hi..] == old(right[hi..]); + assert left[..lo] == old(left[..lo]); + assert right[..lo] == old(right[..lo]); + } else { + assert placement?.Right?; + MergeIntoRight(left := right, right := left, lessThanOrEq := lessThanOrEq, lo := lo, mid := mid, hi := hi); + resultPlacement, mergedResult := Left, left[lo..hi]; + + assert left[hi..] == old(left[hi..]); + assert right[hi..] == old(right[hi..]); + assert left[..lo] == old(left[..lo]); + assert right[..lo] == old(right[..lo]); + } + + label BEFORE_RETURN: + assert left[hi..] == old(left[hi..]); + assert right[hi..] == old(right[hi..]); + assert left[..lo] == old(left[..lo]); + assert right[..lo] == old(right[..lo]); + if resultPlacement.Left? && where == Right { + // A forall comprehension might seem like a nice fit here, + // however this does not good for two reasons. + // First, Dafny currently creates a range for the full bounds of the bounded number + // see: https://github.com/dafny-lang/dafny/issues/5897 + // Second this would create two loops. + // First loop would create the `lo to hi` range of numbers. + // The second loop would then loop over these elements. + for i := lo to hi + modifies right + invariant left[..lo] == old(left[..lo]) && right[..lo] == old(right[..lo]) + invariant left[hi..] == old(left[hi..]) && right[hi..] == old(right[hi..]) + invariant right[lo..i] == left[lo..i] + { + right[i] := left[i]; + assert right[lo..i] == left[lo..i]; + } + + assert right[lo..hi] == mergedResult by { + assert mergedResult == left[lo..hi]; + } + assert left[..] == old@BEFORE_RETURN(left[..]); + assert right[..lo] == old(right[..lo]); + + resultPlacement := Right; + } + if resultPlacement.Right? && where == Left { + for i := lo to hi + modifies left + invariant left[..lo] == old(left[..lo]) && right[..lo] == old(right[..lo]) + invariant left[hi..] == old(left[hi..]) && right[hi..] == old(right[hi..]) + invariant left[lo..i] == right[lo..i] + { + left[i] := right[i]; + assert right[lo..i] == left[lo..i]; + } + + assert left[lo..hi] == mergedResult by { + assert mergedResult == right[lo..hi]; + } + assert right[..] == old@BEFORE_RETURN(right[..]); + assert left[..lo] == old(left[..lo]); + + resultPlacement := Left; + } + } + + method {:isolate_assertions} MergeIntoRight( + nameonly left: array, + nameonly right: array, + nameonly lessThanOrEq: (T, T) -> bool, + nameonly lo: BoundedInts.uint64, + nameonly mid: BoundedInts.uint64, + nameonly hi: BoundedInts.uint64 + ) + requires Relations.TotalOrdering(lessThanOrEq) + requires + && left.Length < BoundedInts.TWO_TO_THE_64 + && right.Length < BoundedInts.TWO_TO_THE_64 + requires lo <= mid <= hi <= left.Length as BoundedInts.uint64 + requires hi <= right.Length as BoundedInts.uint64 && left != right + // We store "left" in [lo..mid] + requires Relations.SortedBy(left[lo..mid], lessThanOrEq) + // We store "right" in [mid..hi] + requires Relations.SortedBy(left[mid..hi], lessThanOrEq) + // reads left, right + modifies right + // We do not modify anything before lo + ensures right[..lo] == old(right[..lo]) && left[..lo] == old(left[..lo]) + // We do not modify anything above hi + ensures right[hi..] == old(right[hi..]) && left[..lo] == old(left[..lo]) + ensures Relations.SortedBy(right[lo..hi], lessThanOrEq) + ensures multiset(right[lo..hi]) == multiset(old(left[lo..hi])) + ensures multiset(left[lo..hi]) == multiset(old(left[lo..hi])) + { + var leftPosition, rightPosition, iter := lo, mid, lo; + while iter < hi + modifies right + + invariant lo <= leftPosition <= mid <= rightPosition <= hi + invariant leftPosition as nat - lo as nat + rightPosition as nat - mid as nat == iter as nat - lo as nat + invariant right[..lo] == old(right[..lo]) + invariant right[hi..] == old(right[hi..]) + + invariant Relations.SortedBy(left[leftPosition..mid], lessThanOrEq) + invariant Relations.SortedBy(left[rightPosition..hi], lessThanOrEq) + invariant Below(right[lo..iter], left[leftPosition..mid], lessThanOrEq) + invariant Below(right[lo..iter], left[rightPosition..hi], lessThanOrEq) + invariant Relations.SortedBy(right[lo..iter], lessThanOrEq) + invariant multiset(right[lo..iter]) == multiset(left[lo..leftPosition] + left[mid..rightPosition]) + { + + ghost var oldRightPosition, oldIter, oldLeftPosition := rightPosition, iter, leftPosition; + if leftPosition == mid || (rightPosition < hi && lessThanOrEq(left[rightPosition], left[leftPosition])) { + right[iter] := left[rightPosition]; + + PushStillSortedBy(right, lo as nat, iter as nat, lessThanOrEq); + rightPosition, iter := rightPosition + 1, iter + 1; + + BelowIsTransitive(right[lo..iter], left[leftPosition..mid], lessThanOrEq); + + assert 0 < |right[lo..iter]| && 0 < |left[rightPosition..hi]| ==> lessThanOrEq(right[lo..iter][|right[lo..iter]| - 1], left[rightPosition..hi][0]) by { + if 0 == |right[lo..iter]| || 0 == |left[rightPosition..hi]| { + } else { + assert Relations.SortedBy(left[oldRightPosition..hi], lessThanOrEq); + assert lessThanOrEq(left[oldRightPosition..hi][0], left[oldRightPosition..hi][1]); + } + } + BelowIsTransitive(right[lo..iter], left[rightPosition..hi], lessThanOrEq); + + assert multiset(right[lo..iter]) == multiset(left[lo..leftPosition] + left[mid..rightPosition]) by { + // Dafny just wants to be reminded + } + } else { + right[iter] := left[leftPosition]; + + PushStillSortedBy(right, lo as nat, iter as nat, lessThanOrEq); + leftPosition, iter := leftPosition + 1, iter + 1; + + assert 0 < |right[lo..iter]| && 0 < |left[leftPosition..mid]| ==> lessThanOrEq(right[lo..iter][|right[lo..iter]| - 1], left[leftPosition..mid][0]) by { + if 0 == |right[lo..iter]| || 0 == |left[leftPosition..mid]| { + } else { + assert Relations.SortedBy(left[oldLeftPosition..mid], lessThanOrEq); + assert lessThanOrEq(left[oldLeftPosition..mid][0], left[oldLeftPosition..mid][1]); + } + } + BelowIsTransitive(right[lo..iter], left[leftPosition..mid], lessThanOrEq); + assert 0 < |right[lo..iter]| && 0 < |left[rightPosition..hi]| ==> lessThanOrEq(right[lo..iter][|right[lo..iter]| - 1], left[rightPosition..hi][0]) by { + if 0 == |right[lo..iter]| || 0 == |left[rightPosition..hi]| { + } else { + assert right[lo..iter][|right[lo..iter]| - 1] == right[iter - 1]; + assert left[rightPosition..hi][0] == left[rightPosition]; + } + } + BelowIsTransitive(right[lo..iter], left[rightPosition..hi], lessThanOrEq); + + assert multiset(right[lo..iter]) == multiset(left[lo..leftPosition] + left[mid..rightPosition]) by { + // Dafny just wants to be reminded + } + } + } + assert multiset(right[lo..hi]) == multiset(old(left[lo..hi])) by { + assert leftPosition == mid && rightPosition == hi; + LemmaSplitAt(left[..], lo as nat, mid as nat, hi as nat); + assert old(left[lo..hi]) == left[lo..hi] == left[lo..mid] + left[mid..hi]; + } + } + + // Helpers to prove MergeSort + + ghost predicate Below(a: seq, b: seq, lessThanOrEq: (T, T) -> bool) + requires Relations.TotalOrdering(lessThanOrEq) + { + forall i, j :: 0 <= i < |a| && 0 <= j < |b| ==> lessThanOrEq(a[i], b[j]) + } + + lemma BelowIsTransitive(a: seq, b: seq, lessThanOrEq: (T, T) -> bool) + requires Relations.TotalOrdering(lessThanOrEq) + requires Relations.SortedBy(a, lessThanOrEq) + requires Relations.SortedBy(b, lessThanOrEq) + requires 0 < |a| && 0 < |b| ==> lessThanOrEq(a[|a| - 1], b[0]) + ensures Below(a, b, lessThanOrEq) + {} + + lemma PushStillSortedBy(a: array, lo:nat, i: nat, lessThanOrEq: (T, T) -> bool) + requires 0 <= lo <= i < a.Length + requires Relations.SortedBy(a[lo..i], lessThanOrEq) + requires |a[lo..i]| == 0 || lessThanOrEq(a[lo..i][|a[lo..i]| - 1], a[i]) + requires Relations.TotalOrdering(lessThanOrEq) + ensures Relations.SortedBy(a[lo..i + 1], lessThanOrEq) + ensures lo < i ==> lessThanOrEq(a[i - 1], a[i]) + {} + + lemma {:isolate_assertions} TotalOrderingImpliesSortingIsUnique(s1: seq, s2: seq, lessThanOrEq: (T, T) -> bool) + requires Relations.TotalOrdering(lessThanOrEq) + requires Relations.SortedBy(s1, lessThanOrEq) && Relations.SortedBy(s2, lessThanOrEq) + requires multiset(s1) == multiset(s2) + ensures s1 == s2 + { + if |s1| == 0 { + } else { + assert s1[0] in s2 by { + assert s1[0] in multiset(s2); + } + + var i :| 0 <= i < |s2| && s2[i] == s1[0]; + assert multiset{s1[0]} == multiset{s2[i]}; + assert multiset{s1[0]} + multiset(s1[1..]) == multiset(s1) by { + assert s1 == [s1[0]] + s1[1..]; + } + assert multiset{s2[i]} + multiset(s2[0..i] + s2[i+1..]) == multiset(s2) by { + assert s2 == s2[0..i] + [s2[i]] + s2[i+1..]; + } + + assert Relations.SortedBy(s1[1..], lessThanOrEq); + assert Relations.SortedBy(s2[0..i] + s2[i+1..], lessThanOrEq) by { + if i == 0 || i == |s2| - 1 { + } else { + assert lessThanOrEq(s2[i-1], s2[i]); + assert lessThanOrEq(s2[i], s2[i+1]); + } + } + MultisetProperty(multiset{s1[0]}, multiset(s1[1..]), multiset(s2[0..i] + s2[i+1..])); + TotalOrderingImpliesSortingIsUnique(s1[1..], s2[0..i] + s2[i+1..], lessThanOrEq); + + if i == 0 { + } else { + assert s1 == [s2[i]] + s1[1..]; + assert lessThanOrEq(s2[0], s2[i]); + assert s2[0] in s1; + } + } + } + + lemma MultisetProperty(m: multiset, a: multiset, b: multiset) + requires m + a == m + b + ensures a == b + { + var a' := (m + a) - m; + var b' := (m + b) - m; + assert a == a' == b' == b; + } + + lemma LemmaNewFirstElementStillSortedBy(x: T, s: seq, lessThan: (T, T) -> bool) + requires Relations.SortedBy(s, lessThan) + requires |s| == 0 || lessThan(x, s[0]) + requires Relations.TotalOrdering(lessThan) + ensures Relations.SortedBy([x] + s, lessThan) + {} + + lemma LemmaSplitAt(s: seq, lo: nat, split: nat, hi: nat) + requires 0 <= lo + requires lo <= split + requires split <= hi + requires hi <= |s| + ensures s[lo..hi] == s[lo..split] + s[split..hi] + {} + + // This is the nat version of merge sort. + // This is an exact copy of the bounded integer implementation above + // but with `nat` instead of BoundedInts.uint64. + + method {:isolate_assertions} NatMergeSortMethod( + left: array, + right: array, + lessThanOrEq: (T, T) -> bool, + lo: nat, + hi: nat, + where: PlaceResults + ) + returns (resultPlacement: ResultPlacement) + requires Relations.TotalOrdering(lessThanOrEq) + requires lo < hi <= left.Length + requires hi <= right.Length && left != right + // reads left, right + modifies left, right + ensures !where.Either? ==> where == resultPlacement + + // We do not modify anything before lo + ensures left[..lo] == old(left[..lo]) && right[..lo] == old(right[..lo]) + // We do not modify anything above hi + ensures left[hi..] == old(left[hi..]) && right[hi..] == old(right[hi..]) + + ensures multiset(left[lo..hi]) == multiset(old(left[lo..hi])) + ensures resultPlacement.Left? ==> Relations.SortedBy(left[lo..hi], lessThanOrEq) + ensures resultPlacement.Right? ==> Relations.SortedBy(right[lo..hi], lessThanOrEq) + ensures resultPlacement.Right? ==> multiset(right[lo..hi]) == multiset(old(left[lo..hi])) + + decreases hi - lo + { + if hi - lo == 1 { + if where == Right { + right[lo] := left[lo]; + return Right; + } else { + return Left; + } + } + + ghost var beforeWork := multiset(left[lo..hi]); + var mid := ((hi - lo)/2) + lo; + var placement? := NatMergeSortMethod(left, right, lessThanOrEq, lo, mid, Either); + assert left[mid..hi] == old(left[mid..hi]); + ghost var placement2? := NatMergeSortMethod(left, right, lessThanOrEq, mid, hi, placement?); + assert placement2? == placement?; + + ghost var preMergeResult := if placement?.Left? then left else right; + calc { + multiset(preMergeResult[lo..hi]); + == { LemmaSplitAt(preMergeResult[..], lo as nat, mid as nat, hi as nat); } + multiset(preMergeResult[lo..mid] + preMergeResult[mid..hi]); + == + multiset(old(left[lo..mid]) + old(left[mid..hi])); + == { LemmaSplitAt(old(left[..]), lo as nat, mid as nat, hi as nat); } + beforeWork; + } + + ghost var mergedResult; + if placement?.Left? { + NatMergeIntoRight(left := left, right := right, lessThanOrEq := lessThanOrEq, lo := lo, mid := mid, hi := hi); + resultPlacement, mergedResult := Right, right[lo..hi]; + + assert left[hi..] == old(left[hi..]); + assert right[hi..] == old(right[hi..]); + assert left[..lo] == old(left[..lo]); + assert right[..lo] == old(right[..lo]); + } else { + assert placement?.Right?; + NatMergeIntoRight(left := right, right := left, lessThanOrEq := lessThanOrEq, lo := lo, mid := mid, hi := hi); + resultPlacement, mergedResult := Left, left[lo..hi]; + + assert left[hi..] == old(left[hi..]); + assert right[hi..] == old(right[hi..]); + assert left[..lo] == old(left[..lo]); + assert right[..lo] == old(right[..lo]); + } + + label BEFORE_RETURN: + assert left[hi..] == old(left[hi..]); + assert right[hi..] == old(right[hi..]); + assert left[..lo] == old(left[..lo]); + assert right[..lo] == old(right[..lo]); + if resultPlacement.Left? && where == Right { + // A forall comprehension might seem like a nice fit here, + // however this does not good for two reasons. + // First, Dafny currently creates a range for the full bounds of the bounded number + // see: https://github.com/dafny-lang/dafny/issues/5897 + // Second this would create two loops. + // First loop would create the `lo to hi` range of numbers. + // The second loop would then loop over these elements. + // A single loop with + for i := lo to hi + modifies right + invariant left[..lo] == old(left[..lo]) && right[..lo] == old(right[..lo]) + invariant left[hi..] == old(left[hi..]) && right[hi..] == old(right[hi..]) + invariant right[lo..i] == left[lo..i] + { + right[i] := left[i]; + assert right[lo..i] == left[lo..i]; + } + + assert right[lo..hi] == mergedResult by { + assert mergedResult == left[lo..hi]; + } + assert left[..] == old@BEFORE_RETURN(left[..]); + assert right[..lo] == old(right[..lo]); + + resultPlacement := Right; + } + if resultPlacement.Right? && where == Left { + for i := lo to hi + modifies left + invariant left[..lo] == old(left[..lo]) && right[..lo] == old(right[..lo]) + invariant left[hi..] == old(left[hi..]) && right[hi..] == old(right[hi..]) + invariant left[lo..i] == right[lo..i] + { + left[i] := right[i]; + assert right[lo..i] == left[lo..i]; + } + + assert left[lo..hi] == mergedResult by { + assert mergedResult == right[lo..hi]; + } + assert right[..] == old@BEFORE_RETURN(right[..]); + assert left[..lo] == old(left[..lo]); + + resultPlacement := Left; + } + } + + method {:isolate_assertions} NatMergeIntoRight( + nameonly left: array, + nameonly right: array, + nameonly lessThanOrEq: (T, T) -> bool, + nameonly lo: nat, + nameonly mid: nat, + nameonly hi: nat + ) + requires Relations.TotalOrdering(lessThanOrEq) + requires lo <= mid <= hi <= left.Length + requires hi <= right.Length && left != right + // We store "left" in [lo..mid] + requires Relations.SortedBy(left[lo..mid], lessThanOrEq) + // We store "right" in [mid..hi] + requires Relations.SortedBy(left[mid..hi], lessThanOrEq) + modifies right + // reads left, right + // We do not modify anything before lo + ensures right[..lo] == old(right[..lo]) && left[..lo] == old(left[..lo]) + // We do not modify anything above hi + ensures right[hi..] == old(right[hi..]) && left[..lo] == old(left[..lo]) + ensures Relations.SortedBy(right[lo..hi], lessThanOrEq) + ensures multiset(right[lo..hi]) == multiset(old(left[lo..hi])) + ensures multiset(left[lo..hi]) == multiset(old(left[lo..hi])) + { + var leftPosition, rightPosition, iter := lo, mid, lo; + while iter < hi + modifies right + + invariant lo <= leftPosition <= mid <= rightPosition <= hi + invariant leftPosition as nat - lo as nat + rightPosition as nat - mid as nat == iter as nat - lo as nat + invariant right[..lo] == old(right[..lo]) + invariant right[hi..] == old(right[hi..]) + + invariant Relations.SortedBy(left[leftPosition..mid], lessThanOrEq) + invariant Relations.SortedBy(left[rightPosition..hi], lessThanOrEq) + invariant Below(right[lo..iter], left[leftPosition..mid], lessThanOrEq) + invariant Below(right[lo..iter], left[rightPosition..hi], lessThanOrEq) + invariant Relations.SortedBy(right[lo..iter], lessThanOrEq) + invariant multiset(right[lo..iter]) == multiset(left[lo..leftPosition] + left[mid..rightPosition]) + { + + ghost var oldRightPosition, oldIter, oldLeftPosition := rightPosition, iter, leftPosition; + if leftPosition == mid || (rightPosition < hi && lessThanOrEq(left[rightPosition], left[leftPosition])) { + right[iter] := left[rightPosition]; + + PushStillSortedBy(right, lo as nat, iter as nat, lessThanOrEq); + rightPosition, iter := rightPosition + 1, iter + 1; + + BelowIsTransitive(right[lo..iter], left[leftPosition..mid], lessThanOrEq); + + assert 0 < |right[lo..iter]| && 0 < |left[rightPosition..hi]| ==> lessThanOrEq(right[lo..iter][|right[lo..iter]| - 1], left[rightPosition..hi][0]) by { + if 0 == |right[lo..iter]| || 0 == |left[rightPosition..hi]| { + } else { + assert Relations.SortedBy(left[oldRightPosition..hi], lessThanOrEq); + assert lessThanOrEq(left[oldRightPosition..hi][0], left[oldRightPosition..hi][1]); + } + } + BelowIsTransitive(right[lo..iter], left[rightPosition..hi], lessThanOrEq); + + assert multiset(right[lo..iter]) == multiset(left[lo..leftPosition] + left[mid..rightPosition]) by { + // Dafny just wants to be reminded + } + } else { + right[iter] := left[leftPosition]; + + PushStillSortedBy(right, lo as nat, iter as nat, lessThanOrEq); + leftPosition, iter := leftPosition + 1, iter + 1; + + assert 0 < |right[lo..iter]| && 0 < |left[leftPosition..mid]| ==> lessThanOrEq(right[lo..iter][|right[lo..iter]| - 1], left[leftPosition..mid][0]) by { + if 0 == |right[lo..iter]| || 0 == |left[leftPosition..mid]| { + } else { + assert Relations.SortedBy(left[oldLeftPosition..mid], lessThanOrEq); + assert lessThanOrEq(left[oldLeftPosition..mid][0], left[oldLeftPosition..mid][1]); + } + } + BelowIsTransitive(right[lo..iter], left[leftPosition..mid], lessThanOrEq); + assert 0 < |right[lo..iter]| && 0 < |left[rightPosition..hi]| ==> lessThanOrEq(right[lo..iter][|right[lo..iter]| - 1], left[rightPosition..hi][0]) by { + if 0 == |right[lo..iter]| || 0 == |left[rightPosition..hi]| { + } else { + assert right[lo..iter][|right[lo..iter]| - 1] == right[iter - 1]; + assert left[rightPosition..hi][0] == left[rightPosition]; + } + } + BelowIsTransitive(right[lo..iter], left[rightPosition..hi], lessThanOrEq); + + assert multiset(right[lo..iter]) == multiset(left[lo..leftPosition] + left[mid..rightPosition]) by { + // Dafny just wants to be reminded + } + } + } + assert multiset(right[lo..hi]) == multiset(old(left[lo..hi])) by { + assert leftPosition == mid && rightPosition == hi; + LemmaSplitAt(left[..], lo as nat, mid as nat, hi as nat); + assert old(left[lo..hi]) == left[lo..hi] == left[lo..mid] + left[mid..hi]; + } + } + +} diff --git a/DynamoDbEncryption/dafny/StructuredEncryption/src/Paths.dfy b/DynamoDbEncryption/dafny/StructuredEncryption/src/Paths.dfy index f5b8bba4e..5b64497bb 100644 --- a/DynamoDbEncryption/dafny/StructuredEncryption/src/Paths.dfy +++ b/DynamoDbEncryption/dafny/StructuredEncryption/src/Paths.dfy @@ -110,7 +110,7 @@ module StructuredEncryptionPaths { } // get the Canonical Path for these Selectors - function method {:tailrecursion} MakeCanonicalPath(path : Path) + function MakeCanonicalPath(path : Path) : (ret : CanonicalPath) requires ValidPath(path) ensures |path| == 0 ==> ret == [] @@ -120,8 +120,35 @@ module StructuredEncryptionPaths { [] else CanonicalPart(path[0]) + MakeCanonicalPath(path[1..]) + } by method { + var result : CanonicalPath := []; + for i := |path| downto 0 + invariant result == MakeCanonicalPath(path[i..]) + { + result := CanonicalPart(path[i]) + result; + } + return result; } + // get the Canonical Path for these Selectors + // function method {:tailrecursion} MakeCanonicalPath(path : Path, pos : nat := 0, acc : CanonicalPath := []) + // : (ret : CanonicalPath) + // requires ValidPath(path) + // requires pos <= |path| + // requires pos == 0 ==> |acc| == 0 + // requires pos == 1 ==> acc == CanonicalPart(path[0]) + // requires acc == MakeCanonicalPathGhost(path[..pos]) + // ensures |path| == 0 && pos == 0 ==> ret == [] + // ensures |path| == 1 ==> ret == CanonicalPart(path[0]) + // ensures ret == MakeCanonicalPathGhost(path) + // decreases |path| - pos + // { + // if |path| == pos then + // acc + // else + // MakeCanonicalPath(path, pos+1, acc + CanonicalPart(path[pos])) + // } + // Does NOT guarantee a unique output for every unique input // e.g. ['a.b'] and ['a','b'] both return 'a.b'. function method PathToString(path : Path) : string diff --git a/DynamoDbEncryption/dafny/StructuredEncryption/src/SortCanon.dfy b/DynamoDbEncryption/dafny/StructuredEncryption/src/SortCanon.dfy index 1033d899e..f4a5fbde2 100644 --- a/DynamoDbEncryption/dafny/StructuredEncryption/src/SortCanon.dfy +++ b/DynamoDbEncryption/dafny/StructuredEncryption/src/SortCanon.dfy @@ -3,6 +3,7 @@ include "../Model/AwsCryptographyDbEncryptionSdkStructuredEncryptionTypes.dfy" include "Util.dfy" +include "OptimizedMergeSort.dfy" module SortCanon { export @@ -22,6 +23,7 @@ module SortCanon { import opened Relations import opened Seq.MergeSort import opened StructuredEncryptionUtil + import OptimizedMergeSort predicate method AuthBelow(x: CanonAuthItem, y: CanonAuthItem) { Below(x.key, y.key) @@ -148,11 +150,89 @@ module SortCanon { } } - predicate method Below(x: seq, y: seq) { + predicate Below(x: seq, y: seq) { |x| != 0 ==> && |y| != 0 && x[0] <= y[0] && (x[0] == y[0] ==> Below(x[1..], y[1..])) + } by method { + + // The slice x[1..], y[1..] are un-optimized operations in Dafny. + // This means that their usage will result in a lot of data copying. + // Additional, it is very likely that these size of these sequences + // will be less than uint64. + // So writing an optimized version that only works on bounded types + // should further optimized this hot code. + + if HasUint64Len(x) && HasUint64Len(y) { + return BoundedBelow(x,y); + } + + if |x| == 0 { + assert Below(x, y); + return true; + } + + if |y| == 0 { + assert !Below(x, y); + return false; + } + + for i := 0 to |x| + invariant i <= |y| + // The function on the initial arguments + // is equal to function applied to the intermediate arguments. + invariant Below(x, y) == Below(x[i..], y[i..]) + { + if |y| <= i { + return false; + } else if y[i] < x[i] { + return false; + } else if x[i] < y[i] { + return true; + } else { + assert x[i] == y[i]; + } + } + + return true; + } + + predicate BoundedBelow(x: seq64, y: seq64) + { + Below(x,y) + } by method { + var xLength := |x| as uint64; + var yLength := |y| as uint64; + + if xLength == 0 { + assert BoundedBelow(x, y); + return true; + } + + if yLength == 0 { + assert !BoundedBelow(x, y); + return false; + } + + for i := 0 to xLength + invariant i <= yLength + // The function on the initial arguments + // is equal to function applied to the intermediate arguments. + invariant BoundedBelow(x, y) == BoundedBelow(x[i..], y[i..]) + { + if yLength <= i { + return false; + } else if y[i] < x[i] { + return false; + } else if x[i] < y[i] { + return true; + } else { + assert x[i] == y[i]; + } + } + + return true; } lemma BelowIsTotal() @@ -217,7 +297,7 @@ module SortCanon { {} - function method AuthSort(x : CanonAuthList) : (result : CanonAuthList) + function AuthSort(x : CanonAuthList) : (result : CanonAuthList) requires CanonAuthListHasNoDuplicates(x) ensures multiset(x) == multiset(result) ensures SortedBy(result, AuthBelow) @@ -229,9 +309,14 @@ module SortCanon { CanonAuthListMultiNoDup(x, ret); assert CanonAuthListHasNoDuplicates(ret); ret + } by method { + AuthBelowIsTotal(); + result := OptimizedMergeSort.MergeSortNat(x, AuthBelow); + CanonAuthListMultiNoDup(x, result); + assert CanonAuthListHasNoDuplicates(result); } - function method CryptoSort(x : CanonCryptoList) : (result : CanonCryptoList) + function CryptoSort(x : CanonCryptoList) : (result : CanonCryptoList) requires CanonCryptoListHasNoDuplicates(x) ensures multiset(x) == multiset(result) ensures multiset(result) == multiset(x) @@ -244,6 +329,11 @@ module SortCanon { CanonCryptoListMultiNoDup(x, ret); assert CanonCryptoListHasNoDuplicates(ret); ret + } by method { + CryptoBelowIsTotal(); + result := OptimizedMergeSort.MergeSortNat(x, CryptoBelow); + CanonCryptoListMultiNoDup(x, result); + assert CanonCryptoListHasNoDuplicates(result); } lemma MultisetHasNoDuplicates(xs: CanonCryptoList) diff --git a/DynamoDbEncryption/dafny/StructuredEncryption/src/Util.dfy b/DynamoDbEncryption/dafny/StructuredEncryption/src/Util.dfy index fecdcad0a..20cb74b34 100644 --- a/DynamoDbEncryption/dafny/StructuredEncryption/src/Util.dfy +++ b/DynamoDbEncryption/dafny/StructuredEncryption/src/Util.dfy @@ -26,23 +26,48 @@ module StructuredEncryptionUtil { const FooterPath : Path := [member(StructureSegment(key := FooterField))] const HeaderPaths : seq := [HeaderPath, FooterPath] const ReservedCryptoContextPrefixString := "aws-crypto-" - const ReservedCryptoContextPrefixUTF8 := UTF8.EncodeAscii(ReservedCryptoContextPrefixString) + + const ReservedCryptoContextPrefixUTF8 : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x77, 0x73, 0x2d, 0x63, 0x72, 0x79, 0x70, 0x74, 0x6f, 0x2d]; + assert s == UTF8.EncodeAscii(ReservedCryptoContextPrefixString); + s const ATTR_PREFIX := ReservedCryptoContextPrefixString + "attr." - const EC_ATTR_PREFIX : UTF8.ValidUTF8Bytes := UTF8.EncodeAscii(ATTR_PREFIX) + + const EC_ATTR_PREFIX : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x77, 0x73, 0x2d, 0x63, 0x72, 0x79, 0x70, 0x74, 0x6f, 0x2d, 0x61, 0x74, 0x74, 0x72, 0x2e]; + assert s == UTF8.EncodeAscii(ATTR_PREFIX); + s + const LEGEND := ReservedCryptoContextPrefixString + "legend" - const LEGEND_UTF8 : UTF8.ValidUTF8Bytes := UTF8.EncodeAscii(LEGEND) + + const LEGEND_UTF8 : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x77, 0x73, 0x2d, 0x63, 0x72, 0x79, 0x70, 0x74, 0x6f, 0x2d, 0x6c, 0x65, 0x67, 0x65, 0x6e, 0x64]; + assert s == UTF8.EncodeAscii(LEGEND); + s + const LEGEND_STRING : char := 'S' const LEGEND_NUMBER : char := 'N' const LEGEND_LITERAL : char := 'L' const LEGEND_BINARY : char := 'B' const NULL_STR : string := "null" - const NULL_UTF8 : UTF8.ValidUTF8Bytes := UTF8.EncodeAscii(NULL_STR) + const NULL_UTF8 : UTF8.ValidUTF8Bytes := + var s := [0x6e, 0x75, 0x6c, 0x6c]; + assert s == UTF8.EncodeAscii(NULL_STR); + s + const TRUE_STR : string := "true" - const TRUE_UTF8 : UTF8.ValidUTF8Bytes := UTF8.EncodeAscii(TRUE_STR) + const TRUE_UTF8 : UTF8.ValidUTF8Bytes := + var s := [0x74, 0x72, 0x75, 0x65]; + assert s == UTF8.EncodeAscii(TRUE_STR); + s + const FALSE_STR : string := "false" - const FALSE_UTF8 : UTF8.ValidUTF8Bytes := UTF8.EncodeAscii(FALSE_STR) + const FALSE_UTF8 : UTF8.ValidUTF8Bytes := + var s := [0x66, 0x61, 0x6c, 0x73, 0x65]; + assert s == UTF8.EncodeAscii(FALSE_STR); + s datatype CanonCryptoItem = CanonCryptoItem ( key : CanonicalPath, @@ -174,14 +199,16 @@ module StructuredEncryptionUtil { // sequences are equal if zero is returned // Some care should be taken to ensure that target languages don't over optimize this. - function method {:tailrecursion} ConstantTimeCompare(a : Bytes, b : Bytes, acc : bv8 := 0) : bv8 + function method {:tailrecursion} ConstantTimeCompare(a : Bytes, b : Bytes, pos : nat := 0, acc : bv8 := 0) : bv8 requires |a| == |b| + requires 0 <= pos <= |a| + decreases |a| - pos { - if |a| == 0 then + if |a| == pos then acc else - var x := ((a[0] as bv8) ^ (b[0] as bv8)); - ConstantTimeCompare(a[1..], b[1..], x | acc) + var x := ((a[pos] as bv8) ^ (b[pos] as bv8)); + ConstantTimeCompare(a, b, pos+1, x | acc) } predicate method ConstantTimeEquals(a : Bytes, b : Bytes) @@ -290,9 +317,10 @@ module StructuredEncryptionUtil { //# where `typeId` is the attribute's [type ID](./ddb-attribute-serialization.md#type-id) //# and `serializedValue` is the attribute's value serialized according to //# [Attribute Value Serialization](./ddb-attribute-serialization.md#attribute-value-serialization). - ensures ret == UTF8.EncodeAscii(Base64.Encode(t.typeId + t.value)) + ensures ret == UTF8.Encode(Base64.Encode(t.typeId + t.value)).value { - UTF8.EncodeAscii(Base64.Encode(t.typeId + t.value)) + var base := Base64.Encode(t.typeId + t.value); + UTF8.Encode(base).value } function method DecodeTerminal(t : UTF8.ValidUTF8Bytes) : (ret : Result) diff --git a/DynamoDbEncryption/dafny/StructuredEncryption/test/HappyCaseTests.dfy b/DynamoDbEncryption/dafny/StructuredEncryption/test/HappyCaseTests.dfy index 7c41f025c..b498fa212 100644 --- a/DynamoDbEncryption/dafny/StructuredEncryption/test/HappyCaseTests.dfy +++ b/DynamoDbEncryption/dafny/StructuredEncryption/test/HappyCaseTests.dfy @@ -13,12 +13,22 @@ module HappyCaseTests { import MaterialProviders import StructuredDataTestFixtures + const some : UTF8.ValidUTF8Bytes := + var s := [0x73, 0x6f, 0x6d, 0x65]; + assert s == UTF8.EncodeAscii("some"); + s + + const value : UTF8.ValidUTF8Bytes := + var s := [0x76, 0x61, 0x6c, 0x75, 0x65]; + assert s == UTF8.EncodeAscii("value"); + s + method {:test} TestRoundTrip() { var structuredEncryption :- expect StructuredEncryption.StructuredEncryption(StructuredEncryption.DefaultStructuredEncryptionConfig()); var cmm := StructuredDataTestFixtures.GetDefaultCMMWithKMSKeyring(); - var encContext := map[UTF8.EncodeAscii("some") := UTF8.EncodeAscii("value")]; + var encContext := map[some := value]; var algSuiteId := AlgorithmSuites.DBE_ALG_AES_256_GCM_HKDF_SHA512_COMMIT_KEY_ECDSA_P384_SYMSIG_HMAC_SHA384.id.DBE; assert && structuredEncryption.ValidState(); diff --git a/DynamoDbEncryption/dafny/StructuredEncryption/test/Header.dfy b/DynamoDbEncryption/dafny/StructuredEncryption/test/Header.dfy index be8905c86..fbd200499 100644 --- a/DynamoDbEncryption/dafny/StructuredEncryption/test/Header.dfy +++ b/DynamoDbEncryption/dafny/StructuredEncryption/test/Header.dfy @@ -21,15 +21,41 @@ module TestHeader { import AlgorithmSuites import Canonize + const abc : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x62, 0x63]; + assert s == UTF8.EncodeAscii("abc"); + s + + const def : UTF8.ValidUTF8Bytes := + var s := [0x64, 0x65, 0x66]; + assert s == UTF8.EncodeAscii("def"); + s + + const cba : UTF8.ValidUTF8Bytes := + var s := [0x63, 0x62, 0x61]; + assert s == UTF8.EncodeAscii("cba"); + s + + const fed : UTF8.ValidUTF8Bytes := + var s := [0x66, 0x65, 0x64]; + assert s == UTF8.EncodeAscii("fed"); + s + + const provID : UTF8.ValidUTF8Bytes := + var s := [0x70, 0x72, 0x6f, 0x76, 0x49, 0x44]; + assert s == UTF8.EncodeAscii("provID"); + s + + method {:test} TestRoundTrip() { var head := PartialHeader ( version := 1, flavor := 1, msgID := [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32], legend := [0x65, 0x73], - encContext := map[EncodeAscii("abc") := EncodeAscii("def")], + encContext := map[abc := def], dataKeys := [CMP.EncryptedDataKey( - keyProviderId := EncodeAscii("provID") , + keyProviderId := provID, keyProviderInfo := [1,2,3,4,5], ciphertext := [6,7,8,9])] ); @@ -45,9 +71,9 @@ module TestHeader { flavor := 1, msgID := [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32], legend := [0x65, 0x73], - encContext := map[EncodeAscii("abc") := EncodeAscii("def")], + encContext := map[abc := def], dataKeys := [CMP.EncryptedDataKey( - keyProviderId := EncodeAscii("provID") , + keyProviderId := provID, keyProviderInfo := [1,2,3,4,5], ciphertext := [6,7,8,9])] ); @@ -76,7 +102,7 @@ module TestHeader { const e : uint8 := 'e' as uint8 const f : uint8 := 'f' as uint8 method {:test} TestDuplicateContext() { - var cont : CMPEncryptionContext := map[EncodeAscii("abc") := EncodeAscii("def"), EncodeAscii("cba") := EncodeAscii("fed")]; + var cont : CMPEncryptionContext := map[abc := def, cba := fed]; var serCont := SerializeContext(cont); expect serCont == [ 0,2, // two items diff --git a/DynamoDbEncryption/dafny/StructuredEncryption/test/Paths.dfy b/DynamoDbEncryption/dafny/StructuredEncryption/test/Paths.dfy index 63726a18d..ce3c1704b 100644 --- a/DynamoDbEncryption/dafny/StructuredEncryption/test/Paths.dfy +++ b/DynamoDbEncryption/dafny/StructuredEncryption/test/Paths.dfy @@ -9,31 +9,25 @@ module PathsTests { import opened StructuredEncryptionUtil import opened StructuredEncryptionPaths + const example_table : UTF8.ValidUTF8Bytes := + var s := [0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65]; + assert s == UTF8.EncodeAscii("example_table"); + s + + const name : UTF8.ValidUTF8Bytes := + var s := [0x6e, 0x61, 0x6d, 0x65]; + assert s == UTF8.EncodeAscii("name"); + s + method {:test} TestSpecExamples() { var tableName : GoodString := "example_table"; assert(ValidString("example_table")); var pathToTest := StringToUniPath("name"); expect CanonPath(tableName, pathToTest) == - UTF8.EncodeAscii("example_table") + example_table + [0,0,0,0,0,0,0,1] // depth + ['$' as uint8] // map + [0,0,0,0,0,0,0,4] // length - + UTF8.EncodeAscii("name"); - - // var history := Selector.Map("status-history"); - // var index := Selector.List(0); - // var timestamp := Selector.Map("timestamp"); - // var pathToTest2 := TerminalLocation([history, index, timestamp]); - // expect CanonPath(tableName, pathToTest2) == - // UTF8.EncodeAscii("example_table") - // + [0,0,0,0,0,0,0,3] // depth - // + ['$' as uint8] // map - // + [0,0,0,0,0,0,0,14] // length of "status-history" - // + UTF8.EncodeAscii("status-history") - // + ['#' as uint8] // array - // + [0,0,0,0,0,0,0,0] // index - // + ['$' as uint8] // map - // + [0,0,0,0,0,0,0,9] // length of "timestamp" - // + UTF8.EncodeAscii("timestamp"); + + name; } } diff --git a/TestVectors/dafny/DDBEncryption/src/DecryptManifest.dfy b/TestVectors/dafny/DDBEncryption/src/DecryptManifest.dfy index 5ee680b4e..61a325668 100644 --- a/TestVectors/dafny/DDBEncryption/src/DecryptManifest.dfy +++ b/TestVectors/dafny/DDBEncryption/src/DecryptManifest.dfy @@ -21,6 +21,7 @@ module {:options "-functionSyntax:4"} DecryptManifest { import JsonConfig import ENC = AwsCryptographyDbEncryptionSdkDynamoDbItemEncryptorTypes import KeyVectors + import OsLang method OnePositiveTest(name : string, config : JSON, encrypted : JSON, plaintext : JSON, keys : KeyVectors.KeyVectorsClient) returns (output : Result) @@ -110,6 +111,14 @@ module {:options "-functionSyntax:4"} DecryptManifest { } } + function LogFileName() : string + { + if OsLang.GetOsShort() == "Windows" && OsLang.GetLanguageShort() == "Dotnet" then + "..\\..\\PerfLog.txt" + else + "../../PerfLog.txt" + } + method Decrypt(inFile : string, keyVectors: KeyVectors.KeyVectorsClient) returns (output : Result) requires keyVectors.ValidState() @@ -168,11 +177,13 @@ module {:options "-functionSyntax:4"} DecryptManifest { } } + var time := Time.GetAbsoluteTime(); for i := 0 to |tests.value| { var obj := tests.value[i]; :- Need(obj.1.Object?, "Value of test '" + obj.0 + "' must be an Object."); var _ :- OneTest(obj.0, obj.1, keyVectors); } + Time.PrintTimeSinceLong(time, "DB-ESDK-TV-Decrypt-" + inFile, Some(LogFileName())); timeStamp :- expect Time.GetCurrentTimeStamp(); print timeStamp + " Tests Complete.\n"; diff --git a/TestVectors/dafny/DDBEncryption/src/JsonConfig.dfy b/TestVectors/dafny/DDBEncryption/src/JsonConfig.dfy index d07e572c9..3ae66e1f4 100644 --- a/TestVectors/dafny/DDBEncryption/src/JsonConfig.dfy +++ b/TestVectors/dafny/DDBEncryption/src/JsonConfig.dfy @@ -33,6 +33,11 @@ module {:options "-functionSyntax:4"} JsonConfig { import DynamoDbItemEncryptor + const abc : UTF8.ValidUTF8Bytes := + var s := [0x61, 0x62, 0x63]; + assert s == UTF8.EncodeAscii("abc"); + s + predicate IsValidInt32(x: int) { -0x8000_0000 <= x < 0x8000_0000} type ConfigName = string @@ -502,7 +507,7 @@ module {:options "-functionSyntax:4"} JsonConfig { :- Need(|standardBeacons| > 0, "A Search Config needs at least one standard beacon."); var keyMaterial : KeyMaterial.KeyMaterial := - KeyMaterial.StaticKeyStoreInformation("abc", UTF8.EncodeAscii("abc"), [1,2,3,4,5], [1,2,3,4,5]); + KeyMaterial.StaticKeyStoreInformation("abc", abc, [1,2,3,4,5], [1,2,3,4,5]); var store := SKS.CreateStaticKeyStore(keyMaterial); var source : Types.BeaconKeySource := if keySource.Some? then @@ -527,7 +532,7 @@ module {:options "-functionSyntax:4"} JsonConfig { ensures output.Success? ==> output.value.ValidState() && fresh(output.value.Modifies()) { var keyMaterial : KeyMaterial.KeyMaterial := - KeyMaterial.StaticKeyStoreInformation("abc", UTF8.EncodeAscii("abc"), [1,2,3,4,5], [1,2,3,4,5]); + KeyMaterial.StaticKeyStoreInformation("abc", abc, [1,2,3,4,5], [1,2,3,4,5]); var store := SKS.CreateStaticKeyStore(keyMaterial); var source : Types.BeaconKeySource := Types.single(Types.SingleKeyStore(keyId := "foo", cacheTTL := 42)); diff --git a/project.properties b/project.properties index 28a3b90f1..1bba22bde 100644 --- a/project.properties +++ b/project.properties @@ -1,6 +1,6 @@ projectJavaVersion=3.8.0-SNAPSHOT mplDependencyJavaVersion=1.9.0-SNAPSHOT dafnyVersion=4.9.0 -dafnyVerifyVersion=4.9.0 +dafnyVerifyVersion=4.9.1 dafnyRuntimeJavaVersion=4.9.0 smithyDafnyJavaConversionVersion=0.1.1 diff --git a/submodules/MaterialProviders b/submodules/MaterialProviders index 70e580991..d5c4a20d9 160000 --- a/submodules/MaterialProviders +++ b/submodules/MaterialProviders @@ -1 +1 @@ -Subproject commit 70e580991678387ce897a286c4f7f449aa616785 +Subproject commit d5c4a20d97d5e074f874d672f22cef996b2c5079 diff --git a/submodules/smithy-dafny b/submodules/smithy-dafny index 186339f25..5fb3f25ea 160000 --- a/submodules/smithy-dafny +++ b/submodules/smithy-dafny @@ -1 +1 @@ -Subproject commit 186339f258f9116a3c25cc781c747ab0e94e9dc6 +Subproject commit 5fb3f25ea3444c51b2ad30b25ab03964cf866cd1