From de15a6d2d0aa0fdcf53c0766f671912329f4cffc Mon Sep 17 00:00:00 2001 From: Henry Tsai <17891086+thehenrytsai@users.noreply.github.com> Date: Thu, 12 Nov 2020 16:21:24 -0800 Subject: [PATCH] feat(ref-imp): #766 - SIP 1 - Moved provisionalProofFileUri into map file --- lib/core/versions/latest/AnchorFile.ts | 35 +--- lib/core/versions/latest/BatchWriter.ts | 3 +- lib/core/versions/latest/ErrorCode.ts | 7 +- lib/core/versions/latest/InputValidator.ts | 26 +++ lib/core/versions/latest/MapFile.ts | 23 ++- .../versions/latest/TransactionProcessor.ts | 51 ++++- .../versions/latest/models/AnchorFileModel.ts | 1 - .../versions/latest/models/MapFileModel.ts | 8 +- tests/core/AnchorFile.spec.ts | 32 ++-- tests/core/CoreProofFile.spec.ts | 5 +- tests/core/MapFile.spec.ts | 18 +- tests/core/Observer.spec.ts | 6 +- tests/core/TransactionProcessor.spec.ts | 177 ++++++++++++------ tests/generators/FileGenerator.ts | 43 +++++ tests/generators/OperationGenerator.ts | 3 +- 15 files changed, 308 insertions(+), 130 deletions(-) create mode 100644 tests/generators/FileGenerator.ts diff --git a/lib/core/versions/latest/AnchorFile.ts b/lib/core/versions/latest/AnchorFile.ts index a3308be4e..ebf7289de 100644 --- a/lib/core/versions/latest/AnchorFile.ts +++ b/lib/core/versions/latest/AnchorFile.ts @@ -3,10 +3,9 @@ import ArrayMethods from './util/ArrayMethods'; import Compressor from './util/Compressor'; import CreateOperation from './CreateOperation'; import DeactivateOperation from './DeactivateOperation'; -import Encoder from './Encoder'; import ErrorCode from './ErrorCode'; +import InputValidator from './InputValidator'; import JsonAsync from './util/JsonAsync'; -import Multihash from './Multihash'; import ProtocolParameters from './ProtocolParameters'; import RecoverOperation from './RecoverOperation'; import SidetreeError from '../../../common/SidetreeError'; @@ -49,7 +48,7 @@ export default class AnchorFile { throw SidetreeError.createFromError(ErrorCode.AnchorFileNotJson, e); } - const allowedProperties = new Set(['mapFileUri', 'coreProofFileUri', 'provisionalProofFileUri', 'operations', 'writerLockId']); + const allowedProperties = new Set(['mapFileUri', 'coreProofFileUri', 'operations', 'writerLockId']); for (const property in anchorFileModel) { if (!allowedProperties.has(property)) { throw new SidetreeError(ErrorCode.AnchorFileHasUnknownProperty); @@ -75,7 +74,7 @@ export default class AnchorFile { // Map file URI validations. const mapFileUri = anchorFileModel.mapFileUri; - AnchorFile.validateCasFileUri(mapFileUri); + InputValidator.validateCasFileUri(mapFileUri, 'map file URI'); // `operations` validations. @@ -141,7 +140,7 @@ export default class AnchorFile { // Validate core proof file URI. if (recoverOperations.length > 0 || deactivateOperations.length > 0) { - AnchorFile.validateCasFileUri(anchorFileModel.coreProofFileUri); + InputValidator.validateCasFileUri(anchorFileModel.coreProofFileUri, 'core proof file URI'); } else { if (anchorFileModel.coreProofFileUri !== undefined) { throw new SidetreeError( @@ -151,11 +150,6 @@ export default class AnchorFile { } } - // Validate provisional proof file URI. - if (anchorFileModel.provisionalProofFileUri !== undefined) { - AnchorFile.validateCasFileUri(anchorFileModel.provisionalProofFileUri); - } - const anchorFile = new AnchorFile(anchorFileModel, didUniqueSuffixes, createOperations, recoverOperations, deactivateOperations); return anchorFile; } @@ -167,7 +161,6 @@ export default class AnchorFile { writerLockId: string | undefined, mapFileHash: string, coreProofFileHash: string | undefined, - provisionalProofFileHash: string | undefined, createOperationArray: CreateOperation[], recoverOperationArray: RecoverOperation[], deactivateOperationArray: DeactivateOperation[] @@ -205,7 +198,6 @@ export default class AnchorFile { writerLockId, mapFileUri: mapFileHash, coreProofFileUri: coreProofFileHash, - provisionalProofFileUri: provisionalProofFileHash, operations: { create: createOperations, recover: recoverOperations, @@ -213,6 +205,11 @@ export default class AnchorFile { } }; + // Only insert `coreProofFileUri` property if a value is given. + if (coreProofFileHash !== undefined) { + anchorFileModel.coreProofFileUri = coreProofFileHash; + } + return anchorFileModel; } @@ -223,13 +220,12 @@ export default class AnchorFile { writerLockId: string | undefined, mapFileHash: string, coreProofFileHash: string | undefined, - provisionalProofFileHash: string | undefined, createOperations: CreateOperation[], recoverOperations: RecoverOperation[], deactivateOperations: DeactivateOperation[] ): Promise { const anchorFileModel = await AnchorFile.createModel( - writerLockId, mapFileHash, coreProofFileHash, provisionalProofFileHash, createOperations, recoverOperations, deactivateOperations + writerLockId, mapFileHash, coreProofFileHash, createOperations, recoverOperations, deactivateOperations ); const anchorFileJson = JSON.stringify(anchorFileModel); const anchorFileBuffer = Buffer.from(anchorFileJson); @@ -237,17 +233,6 @@ export default class AnchorFile { return Compressor.compress(anchorFileBuffer); } - private static validateCasFileUri (casFileUri: any) { - if (typeof casFileUri !== 'string') { - throw new SidetreeError(ErrorCode.AnchorFileCasFileUriNotString); - } - - const casFileUriAsHashBuffer = Encoder.decodeAsBuffer(casFileUri); - if (!Multihash.isComputedUsingHashAlgorithm(casFileUriAsHashBuffer, ProtocolParameters.hashAlgorithmInMultihashCode)) { - throw new SidetreeError(ErrorCode.AnchorFileCasFileUriUnsupported, `CAS file URI '${casFileUri}' is computed using an unsupported hash algorithm.`); - } - } - private static validateWriterLockId (writerLockId: string) { // Max size check. const writerLockIdSizeInBytes = Buffer.from(writerLockId).length; diff --git a/lib/core/versions/latest/BatchWriter.ts b/lib/core/versions/latest/BatchWriter.ts index 642e3a99b..9d9808f5a 100644 --- a/lib/core/versions/latest/BatchWriter.ts +++ b/lib/core/versions/latest/BatchWriter.ts @@ -76,7 +76,7 @@ export default class BatchWriter implements IBatchWriter { console.info(LogColor.lightBlue(`Wrote chunk file ${LogColor.green(chunkFileHash)} to content addressable store.`)); // Write the map file to content addressable store. - const mapFileBuffer = await MapFile.createBuffer(chunkFileHash, updateOperations); + const mapFileBuffer = await MapFile.createBuffer(chunkFileHash, provisionalProofFileHash, updateOperations); const mapFileHash = await this.cas.write(mapFileBuffer); console.info(LogColor.lightBlue(`Wrote map file ${LogColor.green(mapFileHash)} to content addressable store.`)); @@ -86,7 +86,6 @@ export default class BatchWriter implements IBatchWriter { writerLockId, mapFileHash, coreProofFileHash, - provisionalProofFileHash, createOperations, recoverOperations, deactivateOperations diff --git a/lib/core/versions/latest/ErrorCode.ts b/lib/core/versions/latest/ErrorCode.ts index a5c3779ef..20907e8da 100644 --- a/lib/core/versions/latest/ErrorCode.ts +++ b/lib/core/versions/latest/ErrorCode.ts @@ -2,8 +2,6 @@ * Error codes used ONLY by this version of the protocol. */ export default { - AnchorFileCasFileUriNotString: 'anchor_file_cas_file_uri_not_string', - AnchorFileCasFileUriUnsupported: 'anchor_file_cas_file_uri_unsupported', AnchorFileCoreProofFileUriNotAllowed: 'anchor_file_core_proof_file_uri_not_allowed', AnchorFileCreatePropertyNotArray: 'anchor_file_create_property_not_array', AnchorFileDeactivatePropertyNotArray: 'anchor_file_deactivate_property_not_array', @@ -36,6 +34,7 @@ export default { CoreProofFileHasNoProofs: 'core_proof_file_has_no_proofs', CoreProofFileNotJson: 'core_proof_file_not_json', CoreProofFileOperationsNotFound: 'core_proof_file_operations_not_found', + CoreProofFileProofCountNotTheSameAsOperationCountInAnchorFile: 'core_proof_file_proof_count_not_the_same_as_operation_count_in_anchor_file', CoreProofFileRecoverPropertyNotAnArray: 'core_proof_file_recover_property_not_an_array', CreateOperationMissingOrUnknownProperty: 'create_operation_missing_or_unknown_property', CreateOperationSuffixDataIsNotObject: 'create_operation_suffix_data_is_not_object', @@ -100,6 +99,8 @@ export default { DocumentNotValidOriginalDocument: 'document_not_valid_original_document', EncoderValidateBase64UrlStringInputNotBase64UrlString: 'encoder_validate_base64_url_string_input_not_base64_url_string', EncoderValidateBase64UrlStringInputNotString: 'encoder_validate_base64_url_string_input_not_string', + InputValidatorCasFileUriNotString: 'input_validator_cas_file_uri_not_string', + InputValidatorCasFileUriUnsupported: 'input_validator_cas_file_uri_unsupported', InputValidatorInputCannotBeAnArray: 'input_validator_input_cannot_be_an_array', InputValidatorInputContainsNowAllowedProperty: 'input_validator_input_contains_now_allowed_property', InputValidatorInputIsNotAnObject: 'input_validator_input_is_not_an_object', @@ -126,6 +127,7 @@ export default { MapFileMultipleOperationsForTheSameDid: 'map_file_multiple_operations_for_the_same_did', MapFileNotJson: 'map_file_not_json', MapFileOperationsPropertyHasMissingOrUnknownProperty: 'map_file_operations_property_has_missing_or_unknown_property', + MapFileProvisionalProofFileUriNotAllowed: 'map_file_provisional_proof_file_uri_not_allowed', MapFileUpdateOperationsNotArray: 'map_file_update_operations_not_array', MultihashNotLatestSupportedHashAlgorithm: 'multihash_not_latest_supported_hash_algorithm', MultihashUnsupportedHashAlgorithm: 'multihash_unsupported_hash_algorithm', @@ -143,6 +145,7 @@ export default { ProvisionalProofFileHasNoProofs: 'provisional_proof_file_has_no_proofs', ProvisionalProofFileNotJson: 'provisional_proof_file_not_json', ProvisionalProofFileOperationsNotFound: 'provisional_proof_file_operations_not_found', + ProvisionalProofFileProofCountNotTheSameAsOperationCountInMapFile: 'provisional_proof_file_proof_count_not_the_same_as_operation_count_in_map_file', ProvisionalProofFileUpdatePropertyNotAnArray: 'provisional_proof_file_update_property_not_an_array', QueueingMultipleOperationsPerDidNotAllowed: 'queueing_multiple_operations_per_did_not_allowed', RecoverOperationMissingOrInvalidDidUniqueSuffix: 'recover_operation_missing_or_invalid_did_unique_suffix', diff --git a/lib/core/versions/latest/InputValidator.ts b/lib/core/versions/latest/InputValidator.ts index 80266061b..4a107bd5f 100644 --- a/lib/core/versions/latest/InputValidator.ts +++ b/lib/core/versions/latest/InputValidator.ts @@ -1,4 +1,7 @@ +import Encoder from './Encoder'; import ErrorCode from './ErrorCode'; +import Multihash from './Multihash'; +import ProtocolParameters from './ProtocolParameters'; import SidetreeError from '../../../common/SidetreeError'; /** @@ -34,4 +37,27 @@ export default class InputValidator { } } } + + /** + * Validates that the given input is a valid CAS File URI. + * @param inputContextForErrorLogging This string is used for error logging purposes only. e.g. 'document', or 'suffix data'. + */ + public static validateCasFileUri (casFileUri: any, inputContextForErrorLogging: string) { + const casFileUriType = typeof casFileUri; + if (casFileUriType !== 'string') { + throw new SidetreeError( + ErrorCode.InputValidatorCasFileUriNotString, + `Input ${inputContextForErrorLogging} CAS file URI '${casFileUri}' needs to be of string type, but is of ${casFileUriType} type instead.` + ); + } + + const casFileUriAsHashBuffer = Encoder.decodeAsBuffer(casFileUri); + const hashAlgorithmInMultihashCode = ProtocolParameters.hashAlgorithmInMultihashCode; + if (!Multihash.isComputedUsingHashAlgorithm(casFileUriAsHashBuffer, hashAlgorithmInMultihashCode)) { + throw new SidetreeError( + ErrorCode.InputValidatorCasFileUriUnsupported, + `Input ${inputContextForErrorLogging} CAS file URI '${casFileUri}' is not computed using hash algorithm of code ${hashAlgorithmInMultihashCode}.` + ); + } + } } diff --git a/lib/core/versions/latest/MapFile.ts b/lib/core/versions/latest/MapFile.ts index 0dcc6640d..07c024b2c 100644 --- a/lib/core/versions/latest/MapFile.ts +++ b/lib/core/versions/latest/MapFile.ts @@ -1,6 +1,7 @@ import ArrayMethods from './util/ArrayMethods'; import Compressor from './util/Compressor'; import ErrorCode from './ErrorCode'; +import InputValidator from './InputValidator'; import JsonAsync from './util/JsonAsync'; import MapFileModel from './models/MapFileModel'; import Multihash from './Multihash'; @@ -43,7 +44,7 @@ export default class MapFile { throw SidetreeError.createFromError(ErrorCode.MapFileNotJson, error); } - const allowedProperties = new Set(['chunks', 'operations']); + const allowedProperties = new Set(['chunks', 'operations', 'provisionalProofFileUri']); for (const property in mapFileModel) { if (!allowedProperties.has(property)) { throw new SidetreeError(ErrorCode.MapFileHasUnknownProperty); @@ -55,6 +56,18 @@ export default class MapFile { const updateOperations = await MapFile.parseOperationsProperty(mapFileModel.operations); const didUniqueSuffixes = updateOperations.map(operation => operation.didUniqueSuffix); + // Validate provisional proof file URI. + if (updateOperations.length > 0) { + InputValidator.validateCasFileUri(mapFileModel.provisionalProofFileUri, 'provisional proof file URI'); + } else { + if (mapFileModel.provisionalProofFileUri !== undefined) { + throw new SidetreeError( + ErrorCode.MapFileProvisionalProofFileUriNotAllowed, + `Provisional proof file '${mapFileModel.provisionalProofFileUri}' not allowed in a map file with no updates.` + ); + } + } + const mapFile = new MapFile(mapFileModel, didUniqueSuffixes, updateOperations); return mapFile; } @@ -117,7 +130,9 @@ export default class MapFile { /** * Creates the Map File buffer. */ - public static async createBuffer (chunkFileHash: string, updateOperationArray: UpdateOperation[]): Promise { + public static async createBuffer ( + chunkFileHash: string, provisionalProofFileHash: string | undefined, updateOperationArray: UpdateOperation[] + ): Promise { const updateOperations = updateOperationArray.map(operation => { return { didSuffix: operation.didUniqueSuffix, @@ -129,11 +144,13 @@ export default class MapFile { chunks: [{ chunkFileUri: chunkFileHash }] }; - // Only insert an `operations` property if there are update operations. + // Only insert `operations` and `provisionalProofFileHash` properties if there are update operations. if (updateOperations.length > 0) { mapFileModel.operations = { update: updateOperations }; + + mapFileModel.provisionalProofFileUri = provisionalProofFileHash; } const rawData = JSON.stringify(mapFileModel); diff --git a/lib/core/versions/latest/TransactionProcessor.ts b/lib/core/versions/latest/TransactionProcessor.ts index 62d26a086..2fcacb620 100644 --- a/lib/core/versions/latest/TransactionProcessor.ts +++ b/lib/core/versions/latest/TransactionProcessor.ts @@ -49,16 +49,16 @@ export default class TransactionProcessor implements ITransactionProcessor { const mapFile = await this.downloadAndVerifyMapFile(anchorFile, anchoredData.numberOfOperations); // Download and verify core proof file. - await this.downloadAndVerifyCoreProofFile(anchorFile); + const coreProofFile = await this.downloadAndVerifyCoreProofFile(anchorFile); // Download and verify provisional proof file. - await this.downloadAndVerifyProvisionalProofFile(anchorFile); + const provisionalProofFile = await this.downloadAndVerifyProvisionalProofFile(mapFile); // Download and verify chunk file. const chunkFileModel = await this.downloadAndVerifyChunkFile(mapFile); // Compose into operations from all the files downloaded. - const operations = await this.composeAnchoredOperationModels(transaction, anchorFile, mapFile, chunkFileModel); + const operations = await this.composeAnchoredOperationModels(transaction, anchorFile, mapFile, coreProofFile, provisionalProofFile, chunkFileModel); // If the code reaches here, it means that the batch of operations is valid, store the operations. await this.operationStore.put(operations); @@ -132,20 +132,39 @@ export default class TransactionProcessor implements ITransactionProcessor { const fileBuffer = await this.downloadFileFromCas(coreProofFileUri, ProtocolParameters.maxProofFileSizeInBytes); const coreProofFile = await CoreProofFile.parse(fileBuffer, anchorFile.deactivateOperations.map(operation => operation.didUniqueSuffix)); + const recoverAndDeactivateCount = anchorFile.deactivateOperations.length + anchorFile.recoverOperations.length; + const proofCountInCoreProofFile = coreProofFile.deactivateProofs.length + coreProofFile.recoverProofs.length; + if (recoverAndDeactivateCount !== proofCountInCoreProofFile) { + throw new SidetreeError( + ErrorCode.CoreProofFileProofCountNotTheSameAsOperationCountInAnchorFile, + `Proof count of ${proofCountInCoreProofFile} in core proof file different to recover + deactivate count of ${recoverAndDeactivateCount} in anchor file.` + ); + } + return coreProofFile; } - private async downloadAndVerifyProvisionalProofFile (anchorFile: AnchorFile): Promise { - const provisionalProofFileUri = anchorFile.model.provisionalProofFileUri; - if (provisionalProofFileUri === undefined) { + private async downloadAndVerifyProvisionalProofFile (mapFile: MapFile | undefined): Promise { + // If there is no provisional proof file to download, just return. + if (mapFile === undefined || mapFile.model.provisionalProofFileUri === undefined) { return; } + const provisionalProofFileUri = mapFile.model.provisionalProofFileUri; console.info(`Downloading provisional proof file '${provisionalProofFileUri}', max file size limit ${ProtocolParameters.maxProofFileSizeInBytes}...`); const fileBuffer = await this.downloadFileFromCas(provisionalProofFileUri, ProtocolParameters.maxProofFileSizeInBytes); const provisionalProofFile = await ProvisionalProofFile.parse(fileBuffer); + const operationCountInMapFile = mapFile.didUniqueSuffixes.length; + const proofCountInProvisionalProofFile = provisionalProofFile.updateProofs.length; + if (operationCountInMapFile !== proofCountInProvisionalProofFile) { + throw new SidetreeError( + ErrorCode.ProvisionalProofFileProofCountNotTheSameAsOperationCountInMapFile, + `Proof count ${proofCountInProvisionalProofFile} in provisional proof file is different from operation count ${operationCountInMapFile} in map file.` + ); + } + return provisionalProofFile; } @@ -244,6 +263,8 @@ export default class TransactionProcessor implements ITransactionProcessor { transaction: TransactionModel, anchorFile: AnchorFile, mapFile: MapFile | undefined, + coreProofFile: CoreProofFile | undefined, + provisionalProofFile: ProvisionalProofFile | undefined, chunkFile: ChunkFileModel | undefined ): Promise { @@ -262,6 +283,19 @@ export default class TransactionProcessor implements ITransactionProcessor { // TODO: Issue 442 - https://github.com/decentralized-identity/sidetree/issues/442 // Use actual operation request object instead of buffer. + // Prepare proofs to compose the original operation requests. + const proofs: (string | undefined)[] = createOperations.map(() => undefined); // Creates do not have proofs. + if (coreProofFile !== undefined) { + const recoverProofs = coreProofFile.recoverProofs.map((proof) => proof.signedDataJws.toCompactJws()); + const deactivateProofs = coreProofFile.deactivateProofs.map((proof) => proof.signedDataJws.toCompactJws()); + proofs.push(...recoverProofs); + proofs.push(...deactivateProofs); + } + if (provisionalProofFile !== undefined) { + const updateProofs = provisionalProofFile.updateProofs.map((proof) => proof.signedDataJws.toCompactJws()); + proofs.push(...updateProofs); + } + // NOTE: The last set of `operations` are deactivates, they don't have `delta` property. const anchoredOperationModels = []; for (let i = 0; i < operations.length; i++) { @@ -277,6 +311,11 @@ export default class TransactionProcessor implements ITransactionProcessor { operationObject.delta = chunkFile.deltas[i]; } + // Add the `signedData` property unless it is a create operation. + if (operation.type !== OperationType.Create) { + operationObject.signedData = proofs[i]; + } + const patchedOperationBuffer = Buffer.from(JSON.stringify(operationObject)); const anchoredOperationModel: AnchoredOperationModel = { didUniqueSuffix: operation.didUniqueSuffix, diff --git a/lib/core/versions/latest/models/AnchorFileModel.ts b/lib/core/versions/latest/models/AnchorFileModel.ts index 784777e0d..e028c342b 100644 --- a/lib/core/versions/latest/models/AnchorFileModel.ts +++ b/lib/core/versions/latest/models/AnchorFileModel.ts @@ -5,7 +5,6 @@ export default interface AnchorFileModel { writerLockId: string | undefined; mapFileUri: string; coreProofFileUri?: string; - provisionalProofFileUri?: string; operations: { create?: { suffixData: { diff --git a/lib/core/versions/latest/models/MapFileModel.ts b/lib/core/versions/latest/models/MapFileModel.ts index 668f7810f..2476ca18c 100644 --- a/lib/core/versions/latest/models/MapFileModel.ts +++ b/lib/core/versions/latest/models/MapFileModel.ts @@ -2,14 +2,14 @@ * Defines the external Map File structure. */ export default interface MapFileModel { - chunks: { - chunkFileUri: string - }[]; - + provisionalProofFileUri?: string; operations?: { update: { didSuffix: string, signedData: string }[] }; + chunks: { + chunkFileUri: string + }[]; } diff --git a/tests/core/AnchorFile.spec.ts b/tests/core/AnchorFile.spec.ts index e24717ec4..bbcdff7ad 100644 --- a/tests/core/AnchorFile.spec.ts +++ b/tests/core/AnchorFile.spec.ts @@ -12,7 +12,6 @@ describe('AnchorFile', async () => { it('should parse an anchor file model correctly.', async () => { const mapFileUri = 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA'; const coreProofFileUri = 'EiBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB'; - const provisionalProofFileUri = undefined; // Create operation. const createOperationData = await OperationGenerator.generateCreateOperation(); @@ -29,7 +28,7 @@ describe('AnchorFile', async () => { const deactivateOperation = deactivateOperationData.deactivateOperation; const anchorFileBuffer = await AnchorFile.createBuffer( - undefined, mapFileUri, coreProofFileUri, provisionalProofFileUri, [createOperation], [recoverOperation], [deactivateOperation] + undefined, mapFileUri, coreProofFileUri, [createOperation], [recoverOperation], [deactivateOperation] ); const parsedAnchorFile = await AnchorFile.parse(anchorFileBuffer); @@ -46,14 +45,13 @@ describe('AnchorFile', async () => { it('should throw error if core proof file is specified but there is no recover and no deactivate operation.', async () => { const mapFileUri = 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA'; const coreProofFileUri = 'EiBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB'; // Should not be allowed with no recovers and deactivates. - const provisionalProofFileUri = undefined; // Create operation. const createOperationData = await OperationGenerator.generateCreateOperation(); const createOperation = createOperationData.createOperation; const anchorFileBuffer = - await AnchorFile.createBuffer(undefined, mapFileUri, coreProofFileUri, provisionalProofFileUri, [createOperation], [], []); + await AnchorFile.createBuffer(undefined, mapFileUri, coreProofFileUri, [createOperation], [], []); JasmineSidetreeErrorValidator.expectSidetreeErrorToBeThrownAsync( () => AnchorFile.parse(anchorFileBuffer), @@ -150,23 +148,25 @@ describe('AnchorFile', async () => { const createOperationData = await OperationGenerator.generateCreateOperation(); const createOperation = createOperationData.createOperation; const coreProofFileHash = undefined; - const provisionalProofFileHash = undefined; - const anchorFileModel = await AnchorFile.createModel('writerLock', 'unusedMockFileHash', coreProofFileHash, provisionalProofFileHash, [createOperation], [], []); + const anchorFileModel = await AnchorFile.createModel('writerLock', 'unusedMockFileHash', coreProofFileHash, [createOperation], [], []); (anchorFileModel as any).mapFileUri = 1234; // Intentionally setting the mapFileUri as an incorrect type. const anchorFileBuffer = Buffer.from(JSON.stringify(anchorFileModel)); const anchorFileCompressed = await Compressor.compress(anchorFileBuffer); - await expectAsync(AnchorFile.parse(anchorFileCompressed)).toBeRejectedWith(new SidetreeError(ErrorCode.AnchorFileCasFileUriNotString)); + await JasmineSidetreeErrorValidator.expectSidetreeErrorToBeThrownAsync( + () => AnchorFile.parse(anchorFileCompressed), + ErrorCode.InputValidatorCasFileUriNotString, + 'map file URI' + ); }); it('should throw if map file hash is invalid.', async () => { const createOperationData = await OperationGenerator.generateCreateOperation(); const createOperation = createOperationData.createOperation; const coreProofFileHash = undefined; - const provisionalProofFileHash = undefined; - const anchorFileModel = await AnchorFile.createModel('writerLock', 'invalidMapFileHash', coreProofFileHash, provisionalProofFileHash, [createOperation], [], []); + const anchorFileModel = await AnchorFile.createModel('writerLock', 'invalidMapFileHash', coreProofFileHash, [createOperation], [], []); try { const anchorFileBuffer = Buffer.from(JSON.stringify(anchorFileModel)); @@ -174,7 +174,7 @@ describe('AnchorFile', async () => { await AnchorFile.parse(anchorFileCompressed); } catch (error) { - expect(error.code).toEqual(ErrorCode.AnchorFileCasFileUriUnsupported); + expect(error.code).toEqual(ErrorCode.InputValidatorCasFileUriUnsupported); } }); @@ -182,8 +182,7 @@ describe('AnchorFile', async () => { const createOperationData = await OperationGenerator.generateCreateOperation(); const createOperation = createOperationData.createOperation; const coreProofFileHash = undefined; - const provisionalProofFileHash = undefined; - const anchorFileModel = await AnchorFile.createModel('unusedWriterLockId', 'unusedMockFileHash', coreProofFileHash, provisionalProofFileHash, [createOperation], [], []); + const anchorFileModel = await AnchorFile.createModel('unusedWriterLockId', 'unusedMockFileHash', coreProofFileHash, [createOperation], [], []); (anchorFileModel as any).writerLockId = {}; // intentionally set to invalid value @@ -197,9 +196,8 @@ describe('AnchorFile', async () => { const createOperationData = await OperationGenerator.generateCreateOperation(); const createOperation = createOperationData.createOperation; const coreProofFileHash = undefined; - const provisionalProofFileHash = undefined; const anchorFileModel = - await AnchorFile.createModel('unusedWriterLockId', 'unusedMockFileHash', coreProofFileHash, provisionalProofFileHash, [createOperation], [], []); + await AnchorFile.createModel('unusedWriterLockId', 'unusedMockFileHash', coreProofFileHash, [createOperation], [], []); (anchorFileModel as any).writerLockId = crypto.randomBytes(2000).toString('hex'); // Intentionally larger than maximum. @@ -287,7 +285,6 @@ describe('AnchorFile', async () => { it('should created an anchor file model correctly.', async () => { const mapFileHash = 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA'; const coreProofFileHash = 'EiBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB'; - const provisionalProofFileHash = undefined; // Create operation. const createOperationData = await OperationGenerator.generateCreateOperation(); @@ -304,7 +301,7 @@ describe('AnchorFile', async () => { const deactivateOperation = deactivateOperationData.deactivateOperation; const anchorFileModel = await AnchorFile.createModel( - undefined, mapFileHash, coreProofFileHash, provisionalProofFileHash, [createOperation], [recoverOperation], [deactivateOperation] + undefined, mapFileHash, coreProofFileHash, [createOperation], [recoverOperation], [deactivateOperation] ); expect(anchorFileModel.mapFileUri).toEqual(mapFileHash); @@ -328,11 +325,10 @@ describe('AnchorFile', async () => { it('should created a compressed buffer correctly.', async () => { const mapFileHash = 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA'; const coreProofFileHash = undefined; - const provisionalProofFileHash = undefined; const createOperationData = await OperationGenerator.generateCreateOperation(); const createOperation = createOperationData.createOperation; - const anchorFileBuffer = await AnchorFile.createBuffer(undefined, mapFileHash, coreProofFileHash, provisionalProofFileHash, [createOperation], [], []); + const anchorFileBuffer = await AnchorFile.createBuffer(undefined, mapFileHash, coreProofFileHash, [createOperation], [], []); const anchorFile = await AnchorFile.parse(anchorFileBuffer); diff --git a/tests/core/CoreProofFile.spec.ts b/tests/core/CoreProofFile.spec.ts index cfdddcf23..11b3c08e7 100644 --- a/tests/core/CoreProofFile.spec.ts +++ b/tests/core/CoreProofFile.spec.ts @@ -6,11 +6,10 @@ import Jwk from '../../lib/core/versions/latest/util/Jwk'; import OperationGenerator from '../generators/OperationGenerator'; describe('CoreProofFile', async () => { - - xit('Anchor file and core proof file must have matching content.', async () => { + xit('Relax logic when failed downloading of core proof files.', async () => { }); - xit('Map file and provisional file must have matching content.', async () => { + xit('(Same as above) Should allow operations in Anchor file to be constructed even if (map, provisional proof files are not downloadable).', async () => { }); xit('Batch writer should not write a proof file that is over the size limit.', async () => { diff --git a/tests/core/MapFile.spec.ts b/tests/core/MapFile.spec.ts index afee26cf0..315dc69fd 100644 --- a/tests/core/MapFile.spec.ts +++ b/tests/core/MapFile.spec.ts @@ -33,7 +33,7 @@ describe('MapFile', async () => { it('should throw if has an unknown property.', async () => { const mapFile = { unknownProperty: 'Unknown property', - ChunkFileHash: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' + chunks: [{ chunkFileUri: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' }] }; const fileBuffer = Buffer.from(JSON.stringify(mapFile)); const fileCompressed = await Compressor.compress(fileBuffer); @@ -43,13 +43,27 @@ describe('MapFile', async () => { it('should throw if missing chunk file hash.', async () => { const mapFile = { - // ChunkFileHash: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA', // Intentionally kept to show what the expected property should be. + // chunks: [{ chunkFileUri: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' }], // Intentionally kept to show what the expected property should be. }; const fileBuffer = Buffer.from(JSON.stringify(mapFile)); const fileCompressed = await Compressor.compress(fileBuffer); await expectAsync(MapFile.parse(fileCompressed)).toBeRejectedWith(new SidetreeError(ErrorCode.MapFileChunksPropertyMissingOrIncorrectType)); }); + + it('should throw if there is no updates but a provisional proof file URI is given.', async () => { + const mapFile: MapFileModel = { + provisionalProofFileUri: 'EiBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB', + chunks: [{ chunkFileUri: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA' }] + }; + const fileBuffer = Buffer.from(JSON.stringify(mapFile)); + const fileCompressed = await Compressor.compress(fileBuffer); + + await JasmineSidetreeErrorValidator.expectSidetreeErrorToBeThrownAsync( + () => MapFile.parse(fileCompressed), + ErrorCode.MapFileProvisionalProofFileUriNotAllowed + ); + }); }); describe('parseOperationsProperty()', async () => { diff --git a/tests/core/Observer.spec.ts b/tests/core/Observer.spec.ts index 5b9501772..ee936ff87 100644 --- a/tests/core/Observer.spec.ts +++ b/tests/core/Observer.spec.ts @@ -164,7 +164,6 @@ describe('Observer', async () => { const createOperations = [operation1Data.createOperation, operation2Data.createOperation]; const coreProofFileHash = undefined; - const provisionalProofFileHash = undefined; // Generating chunk file data. const mockChunkFileBuffer = await ChunkFile.createBuffer(createOperations, [], []); @@ -175,7 +174,8 @@ describe('Observer', async () => { const mockChunkFileHash = Encoder.encode(Multihash.hash(Buffer.from('MockChunkFileHash'))); // Generating map file data. - const mockMapFileBuffer = await MapFile.createBuffer(mockChunkFileHash, []); + const mockProvisionalProofFileUri = undefined; + const mockMapFileBuffer = await MapFile.createBuffer(mockChunkFileHash, mockProvisionalProofFileUri, []); const mockMapFileHash = Encoder.encode(Multihash.hash(Buffer.from('MockMapFileHash'))); const mockMapFileFetchResult: FetchResult = { code: FetchResultCode.Success, @@ -184,7 +184,7 @@ describe('Observer', async () => { // Generating anchor file data. const mockAnchorFileBuffer = - await AnchorFile.createBuffer('writerlock', mockMapFileHash, coreProofFileHash, provisionalProofFileHash, createOperations, [], []); + await AnchorFile.createBuffer('writerlock', mockMapFileHash, coreProofFileHash, createOperations, [], []); const mockAnchoredFileFetchResult: FetchResult = { code: FetchResultCode.Success, content: mockAnchorFileBuffer diff --git a/tests/core/TransactionProcessor.spec.ts b/tests/core/TransactionProcessor.spec.ts index 26f8ae3a9..3d09be7dc 100644 --- a/tests/core/TransactionProcessor.spec.ts +++ b/tests/core/TransactionProcessor.spec.ts @@ -7,6 +7,7 @@ import DownloadManager from '../../lib/core/DownloadManager'; import ErrorCode from '../../lib/core/versions/latest/ErrorCode'; import FetchResult from '../../lib/common/models/FetchResult'; import FetchResultCode from '../../lib/common/enums/FetchResultCode'; +import FileGenerator from '../generators/FileGenerator'; import IBlockchain from '../../lib/core/interfaces/IBlockchain'; import Ipfs from '../../lib/ipfs/Ipfs'; import JasmineSidetreeErrorValidator from '../JasmineSidetreeErrorValidator'; @@ -231,7 +232,7 @@ describe('TransactionProcessor', () => { const createOperation1 = (await OperationGenerator.generateCreateOperation()).createOperation; const createOperation2 = (await OperationGenerator.generateCreateOperation()).createOperation; const anyHash = OperationGenerator.generateRandomHash(); - const mockAnchorFileModel = await AnchorFile.createModel('writerLockId', anyHash, undefined, undefined, [createOperation1, createOperation2], [], []); + const mockAnchorFileModel = await AnchorFile.createModel('writerLockId', anyHash, undefined, [createOperation1, createOperation2], [], []); const mockAnchorFileBuffer = await Compressor.compress(Buffer.from(JSON.stringify(mockAnchorFileModel))); spyOn(transactionProcessor as any, 'downloadFileFromCas').and.returnValue(Promise.resolve(mockAnchorFileBuffer)); @@ -308,7 +309,7 @@ describe('TransactionProcessor', () => { it('should return the parsed file.', async (done) => { const createOperationData = await OperationGenerator.generateCreateOperation(); const anyHash = OperationGenerator.generateRandomHash(); - const mockAnchorFileModel = await AnchorFile.createModel('wrierLockId', anyHash, undefined, undefined, [createOperationData.createOperation], [], []); + const mockAnchorFileModel = await AnchorFile.createModel('wrierLockId', anyHash, undefined, [createOperationData.createOperation], [], []); const mockAnchorFileBuffer = await Compressor.compress(Buffer.from(JSON.stringify(mockAnchorFileModel))); spyOn(transactionProcessor as any, 'downloadFileFromCas').and.returnValue(Promise.resolve(mockAnchorFileBuffer)); @@ -333,18 +334,18 @@ describe('TransactionProcessor', () => { }); describe('downloadAndVerifyMapFile', () => { - it('should validate the map file when the map file does not declare the `operations` property.', async (done) => { + it('should validate a valid map file for the case that it does not have the `operations` property.', async (done) => { const createOperationData = await OperationGenerator.generateCreateOperation(); const mapFileHash = OperationGenerator.generateRandomHash(); const coreProofFileHash = undefined; - const provisionalProofFileHash = undefined; const anchorFileBuffer = - await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, provisionalProofFileHash, [createOperationData.createOperation], [], []); + await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, [createOperationData.createOperation], [], []); const anchorFile = await AnchorFile.parse(anchorFileBuffer); // Setting up a mock map file that has 1 update in it to be downloaded. + const provisionalProofFileHash = undefined; const chunkFileHash = OperationGenerator.generateRandomHash(); - const mockMapFileBuffer = await MapFile.createBuffer(chunkFileHash, []); + const mockMapFileBuffer = await MapFile.createBuffer(chunkFileHash, provisionalProofFileHash, []); spyOn(transactionProcessor as any, 'downloadFileFromCas').and.returnValue(Promise.resolve(mockMapFileBuffer)); // Setting the total paid operation count to be 1 (needs to be at least 2 in success case). @@ -361,15 +362,15 @@ describe('TransactionProcessor', () => { const createOperationData = await OperationGenerator.generateCreateOperation(); const mapFileHash = OperationGenerator.generateRandomHash(); const coreProofFileHash = undefined; - const provisionalProofFileHash = undefined; const anchorFileBuffer = - await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, provisionalProofFileHash, [createOperationData.createOperation], [], []); + await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, [createOperationData.createOperation], [], []); const anchorFile = await AnchorFile.parse(anchorFileBuffer); // Setting up a mock map file that has 1 update in it to be downloaded. + const provisionalProofFileHash = undefined; const updateOperationRequestData = await OperationGenerator.generateUpdateOperationRequest(); const chunkFileHash = OperationGenerator.generateRandomHash(); - const mockMapFileBuffer = await MapFile.createBuffer(chunkFileHash, [updateOperationRequestData.updateOperation]); + const mockMapFileBuffer = await MapFile.createBuffer(chunkFileHash, provisionalProofFileHash, [updateOperationRequestData.updateOperation]); spyOn(transactionProcessor as any, 'downloadFileFromCas').and.returnValue(Promise.resolve(mockMapFileBuffer)); // Setting the total paid operation count to be 1 (needs to be at least 2 in success case). @@ -384,15 +385,15 @@ describe('TransactionProcessor', () => { const createOperationData = await OperationGenerator.generateCreateOperation(); const mapFileHash = OperationGenerator.generateRandomHash(); const coreProofFileHash = undefined; - const provisionalProofFileHash = undefined; const anchorFileBuffer = - await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, provisionalProofFileHash, [createOperationData.createOperation], [], []); + await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, [createOperationData.createOperation], [], []); const anchorFile = await AnchorFile.parse(anchorFileBuffer); // Setting up a mock map file that has 1 update in it to be downloaded. + const provisionalProofFileHash = OperationGenerator.generateRandomHash(); const updateOperationRequestData = await OperationGenerator.generateUpdateOperationRequest(createOperationData.createOperation.didUniqueSuffix); const chunkFileHash = OperationGenerator.generateRandomHash(); - const mockMapFileBuffer = await MapFile.createBuffer(chunkFileHash, [updateOperationRequestData.updateOperation]); + const mockMapFileBuffer = await MapFile.createBuffer(chunkFileHash, provisionalProofFileHash, [updateOperationRequestData.updateOperation]); spyOn(transactionProcessor as any, 'downloadFileFromCas').and.returnValue(Promise.resolve(mockMapFileBuffer)); const totalPaidOperationCount = 10; @@ -406,9 +407,8 @@ describe('TransactionProcessor', () => { const createOperationData = await OperationGenerator.generateCreateOperation(); const mapFileHash = OperationGenerator.generateRandomHash(); const coreProofFileHash = undefined; - const provisionalProofFileHash = undefined; const anchorFileBuffer = - await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, provisionalProofFileHash, [createOperationData.createOperation], [], []); + await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, [createOperationData.createOperation], [], []); const anchorFile = await AnchorFile.parse(anchorFileBuffer); // Mocking an unexpected error thrown. @@ -425,9 +425,8 @@ describe('TransactionProcessor', () => { const createOperationData = await OperationGenerator.generateCreateOperation(); const mapFileHash = OperationGenerator.generateRandomHash(); const coreProofFileHash = undefined; - const provisionalProofFileHash = undefined; const anchorFileBuffer = - await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, provisionalProofFileHash, [createOperationData.createOperation], [], []); + await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, [createOperationData.createOperation], [], []); const anchorFile = await AnchorFile.parse(anchorFileBuffer); // Mocking a non-network related known error thrown. @@ -446,9 +445,8 @@ describe('TransactionProcessor', () => { const createOperationData = await OperationGenerator.generateCreateOperation(); const mapFileHash = OperationGenerator.generateRandomHash(); const coreProofFileHash = undefined; - const provisionalProofFileHash = undefined; const anchorFileBuffer = - await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, provisionalProofFileHash, [createOperationData.createOperation], [], []); + await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, [createOperationData.createOperation], [], []); const anchorFile = await AnchorFile.parse(anchorFileBuffer); // Mocking a non-network related known error thrown. @@ -480,45 +478,92 @@ describe('TransactionProcessor', () => { const mapFileHash = OperationGenerator.generateRandomHash(); const coreProofFileHash = OperationGenerator.generateRandomHash(); - const provisionalProofFileHash = undefined; const anchorFileBuffer = await AnchorFile.createBuffer( - 'writerLockId', mapFileHash, coreProofFileHash, provisionalProofFileHash, [createOperation], [recoverOperation], [deactivateOperation] + 'writerLockId', mapFileHash, coreProofFileHash, [createOperation], [recoverOperation], [deactivateOperation] ); const anchorFile = await AnchorFile.parse(anchorFileBuffer); - const downloadFileFromCasSpy = spyOn(transactionProcessor as any, 'downloadFileFromCas'); - const coreProofFileParseSpy = spyOn(CoreProofFile, 'parse'); - await transactionProcessor['downloadAndVerifyCoreProofFile'](anchorFile); + const mockCoreProofFileBuffer = await CoreProofFile.createBuffer([recoverOperation], [deactivateOperation]); + spyOn(transactionProcessor as any, 'downloadFileFromCas').and.returnValue(Promise.resolve(mockCoreProofFileBuffer)); - expect(downloadFileFromCasSpy).toHaveBeenCalled(); - expect(coreProofFileParseSpy).toHaveBeenCalled(); + const actualProcessedCoreProofFile = await transactionProcessor['downloadAndVerifyCoreProofFile'](anchorFile); + expect(actualProcessedCoreProofFile).toBeDefined(); + expect(actualProcessedCoreProofFile!.recoverProofs.length).toEqual(1); + expect(actualProcessedCoreProofFile!.recoverProofs[0].signedDataJws).toEqual(recoverOperationData.recoverOperation.signedDataJws); + expect(actualProcessedCoreProofFile!.deactivateProofs.length).toEqual(1); + expect(actualProcessedCoreProofFile!.deactivateProofs[0].signedDataJws).toEqual(deactivateOperationData.deactivateOperation.signedDataJws); }); - }); - - describe('downloadAndVerifyProvisionalProofFile()', () => { - it('should download and parse the provisional proof file.', async () => { - const createOperationData = await OperationGenerator.generateCreateOperation(); - const createOperation = createOperationData.createOperation; - const [, recoveryPrivateKey] = await Jwk.generateEs256kKeyPair(); + it('should throw if core proof count is not the same as the recover and deactivate combined count.', async () => { + const [, anyPrivateKey] = await Jwk.generateEs256kKeyPair(); const recoverOperationData = await OperationGenerator.generateRecoverOperation( - { didUniqueSuffix: 'EiBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB', recoveryPrivateKey } + { didUniqueSuffix: 'EiBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB', recoveryPrivateKey: anyPrivateKey } ); const recoverOperation = recoverOperationData.recoverOperation; + const deactivateOperationData = await OperationGenerator.createDeactivateOperation('EiAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA', anyPrivateKey); + const deactivateOperation = deactivateOperationData.deactivateOperation; + const mapFileHash = OperationGenerator.generateRandomHash(); const coreProofFileHash = OperationGenerator.generateRandomHash(); - const provisionalProofFileHash = OperationGenerator.generateRandomHash(); - const anchorFileBuffer = - await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, provisionalProofFileHash, [createOperation], [recoverOperation], []); + const anchorFileBuffer = await AnchorFile.createBuffer( + 'writerLockId', mapFileHash, coreProofFileHash, [], [recoverOperation], [deactivateOperation] + ); const anchorFile = await AnchorFile.parse(anchorFileBuffer); - const downloadFileFromCasSpy = spyOn(transactionProcessor as any, 'downloadFileFromCas'); - const provisionalProofFileParseSpy = spyOn(ProvisionalProofFile, 'parse'); - await transactionProcessor['downloadAndVerifyProvisionalProofFile'](anchorFile); + const mockCoreProofFileBuffer = await CoreProofFile.createBuffer([recoverOperation], []); // Intentionally missing proofs for deactivate. + spyOn(transactionProcessor as any, 'downloadFileFromCas').and.returnValue(Promise.resolve(mockCoreProofFileBuffer)); - expect(downloadFileFromCasSpy).toHaveBeenCalled(); - expect(provisionalProofFileParseSpy).toHaveBeenCalled(); + await JasmineSidetreeErrorValidator.expectSidetreeErrorToBeThrownAsync( + () => transactionProcessor['downloadAndVerifyCoreProofFile'](anchorFile), + ErrorCode.CoreProofFileProofCountNotTheSameAsOperationCountInAnchorFile + ); + }); + }); + + describe('downloadAndVerifyProvisionalProofFile()', () => { + it('should download and parse the provisional proof file.', async () => { + const [updatePublicKey, updatePrivateKey] = await Jwk.generateEs256kKeyPair(); + const updateOperationData = await OperationGenerator.generateUpdateOperation( + 'EiBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB', updatePublicKey, updatePrivateKey + ); + const updateOperation = updateOperationData.updateOperation; + + const chunkFileHash = OperationGenerator.generateRandomHash(); + const provisionalProofFileHash = OperationGenerator.generateRandomHash(); + + const mapFileBuffer = await MapFile.createBuffer(chunkFileHash, provisionalProofFileHash, [updateOperation]); + const mapFile = await MapFile.parse(mapFileBuffer); + + const mockProvisionalProofFileBuffer = await ProvisionalProofFile.createBuffer([updateOperation]); + spyOn(transactionProcessor as any, 'downloadFileFromCas').and.returnValue(Promise.resolve(mockProvisionalProofFileBuffer)); + + const actualProcessedProvisionalProofFile = await transactionProcessor['downloadAndVerifyProvisionalProofFile'](mapFile); + expect(actualProcessedProvisionalProofFile).toBeDefined(); + expect(actualProcessedProvisionalProofFile!.updateProofs.length).toEqual(1); + expect(actualProcessedProvisionalProofFile!.updateProofs[0].signedDataJws).toEqual(updateOperationData.updateOperation.signedDataJws); + }); + + it('should throw if provisional proof count is not the same as update operation count.', async () => { + const [updatePublicKey, updatePrivateKey] = await Jwk.generateEs256kKeyPair(); + const updateOperationData = await OperationGenerator.generateUpdateOperation( + 'EiBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB', updatePublicKey, updatePrivateKey + ); + const updateOperation = updateOperationData.updateOperation; + + const chunkFileHash = OperationGenerator.generateRandomHash(); + const provisionalProofFileHash = OperationGenerator.generateRandomHash(); + + const mapFileBuffer = await MapFile.createBuffer(chunkFileHash, provisionalProofFileHash, [updateOperation]); + const mapFile = await MapFile.parse(mapFileBuffer); + + const mockProvisionalProofFileBuffer = await ProvisionalProofFile.createBuffer([updateOperation, updateOperation]); // Intentionally having 2 proofs. + spyOn(transactionProcessor as any, 'downloadFileFromCas').and.returnValue(Promise.resolve(mockProvisionalProofFileBuffer)); + + await JasmineSidetreeErrorValidator.expectSidetreeErrorToBeThrownAsync( + () => transactionProcessor['downloadAndVerifyProvisionalProofFile'](mapFile), + ErrorCode.ProvisionalProofFileProofCountNotTheSameAsOperationCountInMapFile + ); }); }); @@ -533,7 +578,7 @@ describe('TransactionProcessor', () => { it('should return undefined if unexpected error caught.', async (done) => { const anyHash = OperationGenerator.generateRandomHash(); - const mapFileBuffer = await MapFile.createBuffer(anyHash, []); + const mapFileBuffer = await MapFile.createBuffer(anyHash, anyHash, []); const mapFileModel = await MapFile.parse(mapFileBuffer); // Mocking an unexpected error thrown. @@ -547,7 +592,7 @@ describe('TransactionProcessor', () => { it('should throw if a network related error is caught.', async (done) => { const anyHash = OperationGenerator.generateRandomHash(); - const mapFileBuffer = await MapFile.createBuffer(anyHash, []); + const mapFileBuffer = await MapFile.createBuffer(anyHash, anyHash, []); const mapFileModel = await MapFile.parse(mapFileBuffer); // Mocking a non-network related known error thrown. @@ -563,7 +608,7 @@ describe('TransactionProcessor', () => { it('should return undefined if non-network related known error is caught.', async (done) => { const anyHash = OperationGenerator.generateRandomHash(); - const mapFileBuffer = await MapFile.createBuffer(anyHash, []); + const mapFileBuffer = await MapFile.createBuffer(anyHash, anyHash, []); const mapFileModel = await MapFile.parse(mapFileBuffer); // Mocking a non-network related known error thrown. @@ -591,34 +636,47 @@ describe('TransactionProcessor', () => { writer: 'anyWriter' }; - // Create anchor file with 1 create operation. + // Create anchor file with 1 create and 1 recover operation. const createOperationData = await OperationGenerator.generateCreateOperation(); const createOperation = createOperationData.createOperation; + const [, recoveryPrivateKey] = await Jwk.generateEs256kKeyPair(); + const recoverOperationData = await OperationGenerator.generateRecoverOperation({ + didUniqueSuffix: OperationGenerator.generateRandomHash(), + recoveryPrivateKey + }); + const recoverOperation = recoverOperationData.recoverOperation; const mapFileHash = OperationGenerator.generateRandomHash(); - const coreProofFileHash = undefined; - const provisionalProofFileHash = undefined; + const coreProofFileHash = OperationGenerator.generateRandomHash(); const anchorFileBuffer = - await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, provisionalProofFileHash, [createOperation], [], []); + await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, [createOperation], [recoverOperation], []); const anchorFile = await AnchorFile.parse(anchorFileBuffer); // Create map file model with 1 update operation. + const provisionalProofFileHash = OperationGenerator.generateRandomHash(); const updateOperationRequestData = await OperationGenerator.generateUpdateOperationRequest(); const updateOperation = updateOperationRequestData.updateOperation; const chunkFileHash = OperationGenerator.generateRandomHash(); - const mapFileBuffer = await MapFile.createBuffer(chunkFileHash, [updateOperation]); + const mapFileBuffer = await MapFile.createBuffer(chunkFileHash, provisionalProofFileHash, [updateOperation]); const mapFileModel = await MapFile.parse(mapFileBuffer); + // Create core and provisional proof file. + const coreProofFile = await FileGenerator.createCoreProofFile([recoverOperation], []); + const provisionalProofFile = await FileGenerator.createProvisionalProofFile([updateOperation]); + // Create chunk file model with delta for the 2 operations created above. const chunkFileBuffer = await ChunkFile.createBuffer([createOperation], [], [updateOperation]); const chunkFileModel = await ChunkFile.parse(chunkFileBuffer); - const anchoredOperationModels = await transactionProcessor['composeAnchoredOperationModels'](transactionModel, anchorFile, mapFileModel, chunkFileModel); + const anchoredOperationModels = await transactionProcessor['composeAnchoredOperationModels']( + transactionModel, anchorFile, mapFileModel, coreProofFile, provisionalProofFile, chunkFileModel + ); - expect(anchoredOperationModels.length).toEqual(2); + expect(anchoredOperationModels.length).toEqual(3); expect(anchoredOperationModels[0].didUniqueSuffix).toEqual(createOperation.didUniqueSuffix); expect(anchoredOperationModels[0].operationIndex).toEqual(0); expect(anchoredOperationModels[0].transactionTime).toEqual(1); - expect(anchoredOperationModels[1].didUniqueSuffix).toEqual(updateOperation.didUniqueSuffix); + expect(anchoredOperationModels[1].didUniqueSuffix).toEqual(recoverOperation.didUniqueSuffix); + expect(anchoredOperationModels[2].didUniqueSuffix).toEqual(updateOperation.didUniqueSuffix); done(); }); @@ -639,12 +697,10 @@ describe('TransactionProcessor', () => { const createOperation = createOperationData.createOperation; const mapFileHash = OperationGenerator.generateRandomHash(); const coreProofFileHash = undefined; - const provisionalProofFileHash = undefined; - const anchorFileBuffer = - await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, provisionalProofFileHash, [createOperation], [], []); + const anchorFileBuffer = await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, [createOperation], [], []); const anchorFile = await AnchorFile.parse(anchorFileBuffer); - const anchoredOperationModels = await transactionProcessor['composeAnchoredOperationModels'](transactionModel, anchorFile, undefined, undefined); + const anchoredOperationModels = await transactionProcessor['composeAnchoredOperationModels'](transactionModel, anchorFile, undefined, undefined, undefined, undefined); expect(anchoredOperationModels.length).toEqual(1); expect(anchoredOperationModels[0].didUniqueSuffix).toEqual(createOperation.didUniqueSuffix); @@ -672,11 +728,14 @@ describe('TransactionProcessor', () => { const deactivateOperation = deactivateOperationData.deactivateOperation; const mapFileHash = OperationGenerator.generateRandomHash(); const coreProofFileHash = OperationGenerator.generateRandomHash(); - const provisionalProofFileHash = undefined; - const anchorFileBuffer = await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, provisionalProofFileHash, [], [], [deactivateOperation]); + const anchorFileBuffer = await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, [], [], [deactivateOperation]); const anchorFile = await AnchorFile.parse(anchorFileBuffer); - const anchoredOperationModels = await transactionProcessor['composeAnchoredOperationModels'](transactionModel, anchorFile, undefined, undefined); + // Construct the core proof file to go with the deactivate operation. + const coreProofFile = await FileGenerator.createCoreProofFile([], [deactivateOperation]); + const anchoredOperationModels = await transactionProcessor['composeAnchoredOperationModels']( + transactionModel, anchorFile, undefined, coreProofFile, undefined, undefined + ); const returnedOperation = await Operation.parse(anchoredOperationModels[0].operationBuffer); expect(returnedOperation.didUniqueSuffix).toEqual(deactivateOperation.didUniqueSuffix); diff --git a/tests/generators/FileGenerator.ts b/tests/generators/FileGenerator.ts new file mode 100644 index 000000000..b1f7da436 --- /dev/null +++ b/tests/generators/FileGenerator.ts @@ -0,0 +1,43 @@ +import CoreProofFile from '../../lib/core/versions/latest/CoreProofFile'; +import DeactivateOperation from '../../lib/core/versions/latest/DeactivateOperation'; +import ProvisionalProofFile from '../../lib/core/versions/latest/ProvisionalProofFile'; +import RecoverOperation from '../../lib/core/versions/latest/RecoverOperation'; +import UpdateOperation from '../../lib/core/versions/latest/UpdateOperation'; + +/** + * A class containing methods for generating various Sidetree files. + * Mainly useful for testing purposes. + */ +export default class FileGenerator { + + /** + * Creates a `CoreProofFile`, mainly used for testing purposes. + */ + public static async createCoreProofFile ( + recoverOperations: RecoverOperation[], deactivateOperations: DeactivateOperation[] + ): Promise { + const deactivatedDidUniqueSuffixes = deactivateOperations.map(operation => operation.didUniqueSuffix); + const coreProofFileBuffer = await CoreProofFile.createBuffer(recoverOperations, deactivateOperations); + + if (coreProofFileBuffer === undefined) { + return undefined; + } + + const coreProofFile = await CoreProofFile.parse(coreProofFileBuffer, deactivatedDidUniqueSuffixes); + return coreProofFile; + } + + /** + * Creates a `ProvisionalProofFile`, mainly used for testing purposes. + */ + public static async createProvisionalProofFile (updateOperations: UpdateOperation[]): Promise { + const provisionalProofFileBuffer = await ProvisionalProofFile.createBuffer(updateOperations); + + if (provisionalProofFileBuffer === undefined) { + return undefined; + } + + const provisionalProofFile = await ProvisionalProofFile.parse(provisionalProofFileBuffer); + return provisionalProofFile; + } +} diff --git a/tests/generators/OperationGenerator.ts b/tests/generators/OperationGenerator.ts index d2a033e1c..98368fd94 100644 --- a/tests/generators/OperationGenerator.ts +++ b/tests/generators/OperationGenerator.ts @@ -672,7 +672,6 @@ export default class OperationGenerator { public static async generateAnchorFile (recoveryOperationCount: number): Promise { const mapFileUri = 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA'; const coreProofFileUri = 'EiBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB'; - const provisionalProofFileHash = undefined; const recoverOperations = []; @@ -685,7 +684,7 @@ export default class OperationGenerator { recoverOperations.push(recoverOperation); } - const anchorFileBuffer = await AnchorFile.createBuffer(undefined, mapFileUri, coreProofFileUri, provisionalProofFileHash, [], recoverOperations, []); + const anchorFileBuffer = await AnchorFile.createBuffer(undefined, mapFileUri, coreProofFileUri, [], recoverOperations, []); return anchorFileBuffer; }