From 8a62900422f62f9a9b5bed654f3aa91c0f67e1e0 Mon Sep 17 00:00:00 2001 From: Henry Tsai <17891086+thehenrytsai@users.noreply.github.com> Date: Tue, 24 Nov 2020 10:30:30 -0800 Subject: [PATCH] feat(ref-imp): #766 - Updated anchor (core index) file schema --- lib/core/versions/latest/AnchorFile.ts | 99 +++++++----- .../versions/latest/DeactivateOperation.ts | 26 +--- lib/core/versions/latest/ErrorCode.ts | 4 +- lib/core/versions/latest/InputValidator.ts | 25 +++ lib/core/versions/latest/MapFile.ts | 35 +---- lib/core/versions/latest/Operation.ts | 4 +- lib/core/versions/latest/RecoverOperation.ts | 49 ++---- .../versions/latest/TransactionProcessor.ts | 144 ++++++++++++++---- lib/core/versions/latest/UpdateOperation.ts | 2 +- .../versions/latest/models/AnchorFileModel.ts | 18 +-- tests/core/AnchorFile.spec.ts | 47 +++--- tests/core/CoreProofFile.spec.ts | 9 ++ tests/core/DeactivateOperation.spec.ts | 4 +- tests/core/MapFile.spec.ts | 6 +- tests/core/RecoverOperation.spec.ts | 30 +--- tests/core/TransactionProcessor.spec.ts | 83 +++++++++- 16 files changed, 362 insertions(+), 223 deletions(-) diff --git a/lib/core/versions/latest/AnchorFile.ts b/lib/core/versions/latest/AnchorFile.ts index ebf7289de..b8acaaf3f 100644 --- a/lib/core/versions/latest/AnchorFile.ts +++ b/lib/core/versions/latest/AnchorFile.ts @@ -6,6 +6,8 @@ import DeactivateOperation from './DeactivateOperation'; import ErrorCode from './ErrorCode'; import InputValidator from './InputValidator'; import JsonAsync from './util/JsonAsync'; +import Multihash from './Multihash'; +import OperationReferenceModel from './models/OperationReferenceModel'; import ProtocolParameters from './ProtocolParameters'; import RecoverOperation from './RecoverOperation'; import SidetreeError from '../../../common/SidetreeError'; @@ -24,8 +26,8 @@ export default class AnchorFile { public readonly model: AnchorFileModel, public readonly didUniqueSuffixes: string[], public readonly createOperations: CreateOperation[], - public readonly recoverOperations: RecoverOperation[], - public readonly deactivateOperations: DeactivateOperation[]) { } + public readonly recoverDidSuffixes: string[], + public readonly deactivateDidSuffixes: string[]) { } /** * Parses and validates the given anchor file buffer. @@ -55,6 +57,9 @@ export default class AnchorFile { } } + // TODO: #631 - If `operations` does not exist, then `mapFileUri` MUST exist. ie. There must be at least one operation in a batch. + // TODO: #631 - If `mapFileUri` does not exist, then `operations` MUST have just deactivates. ie. non-deactivates have delta in chunk file. + if (!('mapFileUri' in anchorFileModel)) { throw new SidetreeError(ErrorCode.AnchorFileMapFileUriMissing); } @@ -105,33 +110,29 @@ export default class AnchorFile { } // Validate `recover` if exists. - const recoverOperations: RecoverOperation[] = []; + let recoverDidSuffixes: string[] = []; if (operations.recover !== undefined) { if (!Array.isArray(operations.recover)) { throw new SidetreeError(ErrorCode.AnchorFileRecoverPropertyNotArray); } - // Validate every recover operation. - for (const operation of operations.recover) { - const recoverOperation = await RecoverOperation.parseOperationFromAnchorFile(operation); - recoverOperations.push(recoverOperation); - didUniqueSuffixes.push(recoverOperation.didUniqueSuffix); - } + // Validate every recover reference. + InputValidator.validateOperationReferences(operations.recover, 'recover'); + recoverDidSuffixes = (operations.recover as OperationReferenceModel[]).map(operation => operation.didSuffix); + didUniqueSuffixes.push(...recoverDidSuffixes); } // Validate `deactivate` if exists. - const deactivateOperations: DeactivateOperation[] = []; + let deactivateDidSuffixes: string[] = []; if (operations.deactivate !== undefined) { if (!Array.isArray(operations.deactivate)) { throw new SidetreeError(ErrorCode.AnchorFileDeactivatePropertyNotArray); } - // Validate every operation. - for (const operation of operations.deactivate) { - const deactivateOperation = await DeactivateOperation.parseOperationFromAnchorFile(operation); - deactivateOperations.push(deactivateOperation); - didUniqueSuffixes.push(deactivateOperation.didUniqueSuffix); - } + // Validate every deactivate reference. + InputValidator.validateOperationReferences(operations.deactivate, 'deactivate'); + deactivateDidSuffixes = (operations.deactivate as OperationReferenceModel[]).map(operation => operation.didSuffix); + didUniqueSuffixes.push(...deactivateDidSuffixes); } if (ArrayMethods.hasDuplicates(didUniqueSuffixes)) { @@ -139,7 +140,7 @@ export default class AnchorFile { } // Validate core proof file URI. - if (recoverOperations.length > 0 || deactivateOperations.length > 0) { + if (recoverDidSuffixes.length > 0 || deactivateDidSuffixes.length > 0) { InputValidator.validateCasFileUri(anchorFileModel.coreProofFileUri, 'core proof file URI'); } else { if (anchorFileModel.coreProofFileUri !== undefined) { @@ -150,7 +151,7 @@ export default class AnchorFile { } } - const anchorFile = new AnchorFile(anchorFileModel, didUniqueSuffixes, createOperations, recoverOperations, deactivateOperations); + const anchorFile = new AnchorFile(anchorFileModel, didUniqueSuffixes, createOperations, recoverDidSuffixes, deactivateDidSuffixes); return anchorFile; } @@ -159,7 +160,7 @@ export default class AnchorFile { */ public static async createModel ( writerLockId: string | undefined, - mapFileHash: string, + mapFileUri: string | undefined, coreProofFileHash: string | undefined, createOperationArray: CreateOperation[], recoverOperationArray: RecoverOperation[], @@ -170,7 +171,19 @@ export default class AnchorFile { AnchorFile.validateWriterLockId(writerLockId); } - const createOperations = createOperationArray.map(operation => { + const anchorFileModel: AnchorFileModel = { + writerLockId, + mapFileUri + }; + + // Only insert `operations` property if there is at least one operation reference. + if (createOperationArray.length > 0 || + recoverOperationArray.length > 0 || + deactivateOperationArray.length > 0) { + anchorFileModel.operations = { }; + } + + const createReferences = createOperationArray.map(operation => { return { suffixData: { deltaHash: operation.suffixData.deltaHash, @@ -180,30 +193,32 @@ export default class AnchorFile { }; }); - const recoverOperations = recoverOperationArray.map(operation => { - return { - didSuffix: operation.didUniqueSuffix, - signedData: operation.signedDataJws.toCompactJws() - }; + // Only insert `create` property if there are create operation references. + if (createReferences.length > 0) { + anchorFileModel.operations!.create = createReferences; + } + + const recoverReferences = recoverOperationArray.map(operation => { + const revealValue = Multihash.canonicalizeThenHashThenEncode(operation.signedData.recoveryKey); + + return { didSuffix: operation.didUniqueSuffix, revealValue }; }); - const deactivateOperations = deactivateOperationArray.map(operation => { - return { - didSuffix: operation.didUniqueSuffix, - signedData: operation.signedDataJws.toCompactJws() - }; + // Only insert `recover` property if there are recover operation references. + if (recoverReferences.length > 0) { + anchorFileModel.operations!.recover = recoverReferences; + } + + const deactivateReferences = deactivateOperationArray.map(operation => { + const revealValue = Multihash.canonicalizeThenHashThenEncode(operation.signedData.recoveryKey); + + return { didSuffix: operation.didUniqueSuffix, revealValue }; }); - const anchorFileModel = { - writerLockId, - mapFileUri: mapFileHash, - coreProofFileUri: coreProofFileHash, - operations: { - create: createOperations, - recover: recoverOperations, - deactivate: deactivateOperations - } - }; + // Only insert `deactivate` property if there are deactivate operation references. + if (deactivateReferences.length > 0) { + anchorFileModel.operations!.deactivate = deactivateReferences; + } // Only insert `coreProofFileUri` property if a value is given. if (coreProofFileHash !== undefined) { @@ -218,14 +233,14 @@ export default class AnchorFile { */ public static async createBuffer ( writerLockId: string | undefined, - mapFileHash: string, + mapFileUri: string | undefined, coreProofFileHash: string | undefined, createOperations: CreateOperation[], recoverOperations: RecoverOperation[], deactivateOperations: DeactivateOperation[] ): Promise { const anchorFileModel = await AnchorFile.createModel( - writerLockId, mapFileHash, coreProofFileHash, createOperations, recoverOperations, deactivateOperations + writerLockId, mapFileUri, coreProofFileHash, createOperations, recoverOperations, deactivateOperations ); const anchorFileJson = JSON.stringify(anchorFileModel); const anchorFileBuffer = Buffer.from(anchorFileJson); diff --git a/lib/core/versions/latest/DeactivateOperation.ts b/lib/core/versions/latest/DeactivateOperation.ts index bdc586e2e..486b8e24c 100644 --- a/lib/core/versions/latest/DeactivateOperation.ts +++ b/lib/core/versions/latest/DeactivateOperation.ts @@ -44,22 +44,13 @@ export default class DeactivateOperation implements OperationModel { this.signedData = signedData; } - /** - * Parses the given input as a deactivate operation entry in the anchor file. - */ - public static async parseOperationFromAnchorFile (input: any): Promise { - const operationBuffer = Buffer.from(JSON.stringify(input)); - const operation = await DeactivateOperation.parseObject(input, operationBuffer, true); - return operation; - } - /** * Parses the given buffer as a `UpdateOperation`. */ public static async parse (operationBuffer: Buffer): Promise { const operationJsonString = operationBuffer.toString(); const operationObject = await JsonAsync.parse(operationJsonString); - const deactivateOperation = await DeactivateOperation.parseObject(operationObject, operationBuffer, false); + const deactivateOperation = await DeactivateOperation.parseObject(operationObject, operationBuffer); return deactivateOperation; } @@ -68,13 +59,9 @@ export default class DeactivateOperation implements OperationModel { * The `operationBuffer` given is assumed to be valid and is assigned to the `operationBuffer` directly. * NOTE: This method is purely intended to be used as an optimization method over the `parse` method in that * JSON parsing is not required to be performed more than once when an operation buffer of an unknown operation type is given. - * @param anchorFileMode If set to true, then `type` is expected to be absent. */ - public static async parseObject (operationObject: any, operationBuffer: Buffer, anchorFileMode: boolean): Promise { - let expectedPropertyCount = 3; - if (anchorFileMode) { - expectedPropertyCount = 2; - } + public static async parseObject (operationObject: any, operationBuffer: Buffer): Promise { + const expectedPropertyCount = 3; const properties = Object.keys(operationObject); if (properties.length !== expectedPropertyCount) { @@ -89,11 +76,8 @@ export default class DeactivateOperation implements OperationModel { const signedData = await DeactivateOperation.parseSignedDataPayload( signedDataJws.payload, operationObject.didSuffix); - // If not in anchor file mode, we need to validate `type` property. - if (!anchorFileMode) { - if (operationObject.type !== OperationType.Deactivate) { - throw new SidetreeError(ErrorCode.DeactivateOperationTypeIncorrect); - } + if (operationObject.type !== OperationType.Deactivate) { + throw new SidetreeError(ErrorCode.DeactivateOperationTypeIncorrect); } return new DeactivateOperation( diff --git a/lib/core/versions/latest/ErrorCode.ts b/lib/core/versions/latest/ErrorCode.ts index c3d0c6ec5..69c1c3749 100644 --- a/lib/core/versions/latest/ErrorCode.ts +++ b/lib/core/versions/latest/ErrorCode.ts @@ -139,6 +139,8 @@ export default { OperationPayloadMissingOrIncorrectType: 'operation_payload_missing_or_incorrect_type', OperationProcessorCreateOperationDoesNotHaveRevealValue: 'operation_processor_create_operation_does_not_have_reveal_value', OperationProcessorUnknownOperationType: 'operation_processor_unknown_operation_type', + OperationReferenceDidSuffixIsNotAString: 'operation_reference_did_suffix_is_not_a_string', + OperationReferenceRevealValueIsNotAString: 'operation_reference_reveal_value_is_not_a_string', OperationTypeUnknownOrMissing: 'operation_type_unknown_or_missing', ProvisionalProofFileDecompressionFailure: 'provisional_proof_file_decompression_failure', ProvisionalProofFileHasNoProofs: 'provisional_proof_file_has_no_proofs', @@ -160,8 +162,6 @@ export default { UpdateOperationMissingOrUnknownProperty: 'update_operation_missing_or_unknown_property', UpdateOperationSignedDataHasMissingOrUnknownProperty: 'update_operation_signed_data_has_missing_or_unknown_property', UpdateOperationTypeIncorrect: 'update_operation_type_incorrect', - UpdateReferenceDidSuffixIsNotAString: 'update_reference_did_suffix_is_not_a_string', - UpdateReferenceRevealValueIsNotAString: 'update_reference_reveal_value_is_not_a_string', ValueTimeLockVerifierInvalidNumberOfOperations: 'value_time_lock_verifier_invalid_number_of_operations', ValueTimeLockVerifierTransactionTimeOutsideLockRange: 'value_time_lock_verifier_transaction_time_outside_lock_range', ValueTimeLockVerifierTransactionWriterLockOwnerMismatch: 'value_time_lock_verifier_transaction_writer_lock_owner_mismatch' diff --git a/lib/core/versions/latest/InputValidator.ts b/lib/core/versions/latest/InputValidator.ts index 4a107bd5f..d6a9dcf2d 100644 --- a/lib/core/versions/latest/InputValidator.ts +++ b/lib/core/versions/latest/InputValidator.ts @@ -60,4 +60,29 @@ export default class InputValidator { ); } } + + /** + * Validates the given recover/deactivate/update operation reference. + */ + public static validateOperationReferences (operationReferences: any, inputContextForErrorLogging: string) { + for (const operationReference of operationReferences) { + InputValidator.validateObjectContainsOnlyAllowedProperties(operationReference, ['didSuffix', 'revealValue'], `${inputContextForErrorLogging} operation reference`); + + const didSuffixType = typeof operationReference.didSuffix; + if (didSuffixType !== 'string') { + throw new SidetreeError( + ErrorCode.OperationReferenceDidSuffixIsNotAString, + `Property 'didSuffix' in ${inputContextForErrorLogging} operation reference is of type ${didSuffixType}, but needs to be a string.` + ); + } + + const revealValueType = typeof operationReference.revealValue; + if (revealValueType !== 'string') { + throw new SidetreeError( + ErrorCode.OperationReferenceRevealValueIsNotAString, + `Property 'revealValue' in ${inputContextForErrorLogging} operation reference is of type ${revealValueType}, but needs to be a string.` + ); + } + } + } } diff --git a/lib/core/versions/latest/MapFile.ts b/lib/core/versions/latest/MapFile.ts index ca6fea493..343b5c238 100644 --- a/lib/core/versions/latest/MapFile.ts +++ b/lib/core/versions/latest/MapFile.ts @@ -97,7 +97,7 @@ export default class MapFile { } // Validate all update operation references. - MapFile.validateUpdateOperationReferences(operations.update); + InputValidator.validateOperationReferences(operations.update, 'update'); // Make sure no operation with same DID. const didSuffixes = (operations.update as OperationReferenceModel[]).map(operation => operation.didSuffix); @@ -108,28 +108,6 @@ export default class MapFile { return didSuffixes; } - private static validateUpdateOperationReferences (updateReferences: any) { - for (const updateReference of updateReferences) { - InputValidator.validateObjectContainsOnlyAllowedProperties(updateReference, ['didSuffix', 'revealValue'], 'update operation reference'); - - const didSuffixType = typeof updateReference.didSuffix; - if (didSuffixType !== 'string') { - throw new SidetreeError( - ErrorCode.UpdateReferenceDidSuffixIsNotAString, - `Update reference property 'didSuffix' is of type ${didSuffixType}, but needs to be a string.` - ); - } - - const revealValueType = typeof updateReference.revealValue; - if (revealValueType !== 'string') { - throw new SidetreeError( - ErrorCode.UpdateReferenceRevealValueIsNotAString, - `Update reference property 'revealValue' is of type ${revealValueType}, but needs to be a string.` - ); - } - } - } - /** * Validates the given `chunks` property, throws error if the property fails validation. */ @@ -158,13 +136,10 @@ export default class MapFile { public static async createBuffer ( chunkFileHash: string, provisionalProofFileHash: string | undefined, updateOperationArray: UpdateOperation[] ): Promise { - const updateOperations = updateOperationArray.map(operation => { + const updateReferences = updateOperationArray.map(operation => { const revealValue = Multihash.canonicalizeThenHashThenEncode(operation.signedData.updateKey); - return { - didSuffix: operation.didUniqueSuffix, - revealValue - }; + return { didSuffix: operation.didUniqueSuffix, revealValue }; }); const mapFileModel: MapFileModel = { @@ -172,9 +147,9 @@ export default class MapFile { }; // Only insert `operations` and `provisionalProofFileHash` properties if there are update operations. - if (updateOperations.length > 0) { + if (updateReferences.length > 0) { mapFileModel.operations = { - update: updateOperations + update: updateReferences }; mapFileModel.provisionalProofFileUri = provisionalProofFileHash; diff --git a/lib/core/versions/latest/Operation.ts b/lib/core/versions/latest/Operation.ts index f29f3e753..30b4b91f4 100644 --- a/lib/core/versions/latest/Operation.ts +++ b/lib/core/versions/latest/Operation.ts @@ -34,9 +34,9 @@ export default class Operation { } else if (operationType === OperationType.Update) { return UpdateOperation.parseObject(operationObject, operationBuffer); } else if (operationType === OperationType.Recover) { - return RecoverOperation.parseObject(operationObject, operationBuffer, isAnchorFileMode); + return RecoverOperation.parseObject(operationObject, operationBuffer); } else if (operationType === OperationType.Deactivate) { - return DeactivateOperation.parseObject(operationObject, operationBuffer, isAnchorFileMode); + return DeactivateOperation.parseObject(operationObject, operationBuffer); } else { throw new SidetreeError(ErrorCode.OperationTypeUnknownOrMissing); } diff --git a/lib/core/versions/latest/RecoverOperation.ts b/lib/core/versions/latest/RecoverOperation.ts index 6b86414c4..5c4479b7b 100644 --- a/lib/core/versions/latest/RecoverOperation.ts +++ b/lib/core/versions/latest/RecoverOperation.ts @@ -53,21 +53,12 @@ export default class RecoverOperation implements OperationModel { } /** - * Parses the given input as a recover operation entry in the anchor file. - */ - public static async parseOperationFromAnchorFile (input: any): Promise { - const operationBuffer = Buffer.from(JSON.stringify(input)); - const operation = await RecoverOperation.parseObject(input, operationBuffer, true); - return operation; - } - - /** - * Parses the given buffer as a `UpdateOperation`. + * Parses the given buffer as a `RecoverOperation`. */ public static async parse (operationBuffer: Buffer): Promise { const operationJsonString = operationBuffer.toString(); const operationObject = await JsonAsync.parse(operationJsonString); - const recoverOperation = await RecoverOperation.parseObject(operationObject, operationBuffer, false); + const recoverOperation = await RecoverOperation.parseObject(operationObject, operationBuffer); return recoverOperation; } @@ -76,13 +67,9 @@ export default class RecoverOperation implements OperationModel { * The `operationBuffer` given is assumed to be valid and is assigned to the `operationBuffer` directly. * NOTE: This method is purely intended to be used as an optimization method over the `parse` method in that * JSON parsing is not required to be performed more than once when an operation buffer of an unknown operation type is given. - * @param anchorFileMode If set to true, then `delta` and `type` properties are expected to be absent. */ - public static async parseObject (operationObject: any, operationBuffer: Buffer, anchorFileMode: boolean): Promise { - let expectedPropertyCount = 4; - if (anchorFileMode) { - expectedPropertyCount = 2; - } + public static async parseObject (operationObject: any, operationBuffer: Buffer): Promise { + const expectedPropertyCount = 4; const properties = Object.keys(operationObject); if (properties.length !== expectedPropertyCount) { @@ -96,24 +83,18 @@ export default class RecoverOperation implements OperationModel { const signedDataJws = Jws.parseCompactJws(operationObject.signedData); const signedData = await RecoverOperation.parseSignedDataPayload(signedDataJws.payload); - // If not in anchor file mode, we need to validate `type` and `delta` properties. + if (operationObject.type !== OperationType.Recover) { + throw new SidetreeError(ErrorCode.RecoverOperationTypeIncorrect); + } + let delta; - if (!anchorFileMode) { - if (operationObject.type !== OperationType.Recover) { - throw new SidetreeError(ErrorCode.RecoverOperationTypeIncorrect); - } - - try { - Operation.validateDelta(operationObject.delta); - delta = { - patches: operationObject.delta.patches, - updateCommitment: operationObject.delta.updateCommitment - }; - } catch { - // For compatibility with data pruning, we have to assume that `delta` may be unavailable, - // thus an operation with invalid `delta` needs to be processed as an operation with unavailable `delta`, - // so here we let `delta` be `undefined`. - } + try { + Operation.validateDelta(operationObject.delta); + delta = operationObject.delta; + } catch { + // For compatibility with data pruning, we have to assume that `delta` may be unavailable, + // thus an operation with invalid `delta` needs to be processed as an operation with unavailable `delta`, + // so here we let `delta` be `undefined`. } return new RecoverOperation( diff --git a/lib/core/versions/latest/TransactionProcessor.ts b/lib/core/versions/latest/TransactionProcessor.ts index 60c10ab95..776b7cee6 100644 --- a/lib/core/versions/latest/TransactionProcessor.ts +++ b/lib/core/versions/latest/TransactionProcessor.ts @@ -130,9 +130,9 @@ export default class TransactionProcessor implements ITransactionProcessor { console.info(`Downloading core proof file '${coreProofFileUri}', max file size limit ${ProtocolParameters.maxProofFileSizeInBytes}...`); const fileBuffer = await this.downloadFileFromCas(coreProofFileUri, ProtocolParameters.maxProofFileSizeInBytes); - const coreProofFile = await CoreProofFile.parse(fileBuffer, anchorFile.deactivateOperations.map(operation => operation.didUniqueSuffix)); + const coreProofFile = await CoreProofFile.parse(fileBuffer, anchorFile.deactivateDidSuffixes); - const recoverAndDeactivateCount = anchorFile.deactivateOperations.length + anchorFile.recoverOperations.length; + const recoverAndDeactivateCount = anchorFile.deactivateDidSuffixes.length + anchorFile.recoverDidSuffixes.length; const proofCountInCoreProofFile = coreProofFile.deactivateProofs.length + coreProofFile.recoverProofs.length; if (recoverAndDeactivateCount !== proofCountInCoreProofFile) { throw new SidetreeError( @@ -180,6 +180,12 @@ export default class TransactionProcessor implements ITransactionProcessor { private async downloadAndVerifyMapFile (anchorFile: AnchorFile, paidOperationCount: number): Promise { try { const anchorFileModel = anchorFile.model; + + // If no map file URI is defined (legitimate case when there is only deactivates in the operation batch), then no map file to download. + if (anchorFileModel.mapFileUri === undefined) { + return undefined; + } + console.info(`Downloading map file '${anchorFileModel.mapFileUri}', max file size limit ${ProtocolParameters.maxMapFileSizeInBytes}...`); const fileBuffer = await this.downloadFileFromCas(anchorFileModel.mapFileUri, ProtocolParameters.maxMapFileSizeInBytes); @@ -270,26 +276,15 @@ export default class TransactionProcessor implements ITransactionProcessor { chunkFile: ChunkFileModel | undefined ): Promise { + // TODO: #766 - Handle combinations of different availability of files here. + // TODO: #766 - Pending more PR for of remainder of the operation types. const createOperations = anchorFile.createOperations; - const recoverOperations = anchorFile.recoverOperations; - const deactivateOperations = anchorFile.deactivateOperations; // NOTE: this version of the protocol uses only ONE chunk file, - // and operations must be ordered by types with the following order: create, recover, update, deactivate. + // and operations must be ordered by types with the following order: create, recover, deactivate, update. const operations = []; operations.push(...createOperations); - operations.push(...recoverOperations); - operations.push(...deactivateOperations); - - // Prepare proofs to compose the original operation requests. - const proofs: (string | undefined)[] = createOperations.map(() => undefined); // Creates do not have proofs. - if (coreProofFile !== undefined) { - const recoverProofs = coreProofFile.recoverProofs.map((proof) => proof.signedDataJws.toCompactJws()); - const deactivateProofs = coreProofFile.deactivateProofs.map((proof) => proof.signedDataJws.toCompactJws()); - proofs.push(...recoverProofs); - proofs.push(...deactivateProofs); - } // NOTE: The last set of `operations` are deactivates, they don't have `delta` property. const anchoredOperationModels = []; @@ -306,11 +301,6 @@ export default class TransactionProcessor implements ITransactionProcessor { operationObject.delta = chunkFile.deltas[i]; } - // Add the `signedData` property unless it is a create operation. - if (operation.type !== OperationType.Create) { - operationObject.signedData = proofs[i]; - } - const patchedOperationBuffer = Buffer.from(JSON.stringify(operationObject)); const anchoredOperationModel: AnchoredOperationModel = { didUniqueSuffix: operation.didUniqueSuffix, @@ -324,14 +314,112 @@ export default class TransactionProcessor implements ITransactionProcessor { anchoredOperationModels.push(anchoredOperationModel); } + const anchoredRecoverOperationModels = TransactionProcessor.composeAnchoredRecoverOperationModels( + transaction, anchorFile, coreProofFile!, chunkFile + ); + + const anchoredDeactivateOperationModels = TransactionProcessor.composeAnchoredDeactivateOperationModels( + transaction, anchorFile, coreProofFile! + ); + const anchoredUpdateOperationModels = TransactionProcessor.composeAnchoredUpdateOperationModels( transaction, anchorFile, mapFile, provisionalProofFile, chunkFile ); + anchoredOperationModels.push(...anchoredRecoverOperationModels); + anchoredOperationModels.push(...anchoredDeactivateOperationModels); anchoredOperationModels.push(...anchoredUpdateOperationModels); return anchoredOperationModels; } + private static composeAnchoredRecoverOperationModels ( + transaction: TransactionModel, + anchorFile: AnchorFile, + coreProofFile: CoreProofFile, + chunkFile: ChunkFileModel | undefined + ): AnchoredOperationModel[] { + if (anchorFile.recoverDidSuffixes.length === 0) { + return []; + } + + let recoverDeltas; + if (chunkFile !== undefined) { + const recoverDeltaStartIndex = anchorFile.createOperations.length; + recoverDeltas = chunkFile.deltas.slice(recoverDeltaStartIndex); + } + + const recoverDidSuffixes = anchorFile.recoverDidSuffixes; + const recoverProofs = coreProofFile.recoverProofs.map((proof) => proof.signedDataJws.toCompactJws()); + + const anchoredOperationModels = []; + for (let i = 0; i < recoverDidSuffixes.length; i++) { + // Compose the original operation request from the files. + const composedRequest = { + type: OperationType.Recover, + didSuffix: recoverDidSuffixes[i], + signedData: recoverProofs[i], + delta: recoverDeltas?.[i] // Add `delta` property if chunk file found. + }; + + // TODO: Issue 442 - https://github.com/decentralized-identity/sidetree/issues/442 + // Use actual operation request object instead of buffer. + const operationBuffer = Buffer.from(JSON.stringify(composedRequest)); + + const anchoredOperationModel: AnchoredOperationModel = { + didUniqueSuffix: recoverDidSuffixes[i], + type: OperationType.Recover, + operationBuffer, + operationIndex: anchorFile.createOperations.length + i, + transactionNumber: transaction.transactionNumber, + transactionTime: transaction.transactionTime + }; + + anchoredOperationModels.push(anchoredOperationModel); + } + + return anchoredOperationModels; + } + + private static composeAnchoredDeactivateOperationModels ( + transaction: TransactionModel, + anchorFile: AnchorFile, + coreProofFile: CoreProofFile + ): AnchoredOperationModel[] { + if (anchorFile.deactivateDidSuffixes.length === 0) { + return []; + } + + const deactivateDidSuffixes = anchorFile.didUniqueSuffixes; + const deactivateProofs = coreProofFile.deactivateProofs.map((proof) => proof.signedDataJws.toCompactJws()); + + const anchoredOperationModels = []; + for (let i = 0; i < deactivateDidSuffixes.length; i++) { + // Compose the original operation request from the files. + const composedRequest = { + type: OperationType.Deactivate, + didSuffix: deactivateDidSuffixes[i], + signedData: deactivateProofs[i] + }; + + // TODO: Issue 442 - https://github.com/decentralized-identity/sidetree/issues/442 + // Use actual operation request object instead of buffer. + const operationBuffer = Buffer.from(JSON.stringify(composedRequest)); + + const anchoredOperationModel: AnchoredOperationModel = { + didUniqueSuffix: deactivateDidSuffixes[i], + type: OperationType.Deactivate, + operationBuffer, + operationIndex: anchorFile.createOperations.length + anchorFile.recoverDidSuffixes.length + i, + transactionNumber: transaction.transactionNumber, + transactionTime: transaction.transactionTime + }; + + anchoredOperationModels.push(anchoredOperationModel); + } + + return anchoredOperationModels; + } + private static composeAnchoredUpdateOperationModels ( transaction: TransactionModel, anchorFile: AnchorFile, @@ -346,19 +434,23 @@ export default class TransactionProcessor implements ITransactionProcessor { return []; } + let updateDeltas; + if (chunkFile !== undefined) { + const updateDeltaStartIndex = anchorFile.createOperations.length + anchorFile.recoverDidSuffixes.length; + updateDeltas = chunkFile!.deltas.slice(updateDeltaStartIndex); + } + const updateDidSuffixes = mapFile.didUniqueSuffixes; const updateProofs = provisionalProofFile!.updateProofs.map((proof) => proof.signedDataJws.toCompactJws()); - const updateDeltaStartIndex = anchorFile.createOperations.length + anchorFile.recoverOperations.length; - const updateDeltas = chunkFile!.deltas.slice(updateDeltaStartIndex); const anchoredOperationModels = []; - for (let i = 0; i < updateDeltas.length; i++) { + for (let i = 0; i < updateDidSuffixes.length; i++) { // Compose the original operation request from the files. const composedRequest = { type: OperationType.Update, didSuffix: updateDidSuffixes[i], signedData: updateProofs[i], - delta: updateDeltas[i] + delta: updateDeltas?.[i] // Add `delta` property if chunk file found. }; // TODO: Issue 442 - https://github.com/decentralized-identity/sidetree/issues/442 @@ -369,7 +461,7 @@ export default class TransactionProcessor implements ITransactionProcessor { didUniqueSuffix: updateDidSuffixes[i], type: OperationType.Update, operationBuffer, - operationIndex: updateDeltaStartIndex + i, + operationIndex: anchorFile.didUniqueSuffixes.length + i, transactionNumber: transaction.transactionNumber, transactionTime: transaction.transactionTime }; diff --git a/lib/core/versions/latest/UpdateOperation.ts b/lib/core/versions/latest/UpdateOperation.ts index 2238cce66..16174946b 100644 --- a/lib/core/versions/latest/UpdateOperation.ts +++ b/lib/core/versions/latest/UpdateOperation.ts @@ -68,7 +68,7 @@ export default class UpdateOperation implements OperationModel { * JSON parsing is not required to be performed more than once when an operation buffer of an unknown operation type is given. */ public static async parseObject (operationObject: any, operationBuffer: Buffer): Promise { - let expectedPropertyCount = 4; + const expectedPropertyCount = 4; const properties = Object.keys(operationObject); if (properties.length !== expectedPropertyCount) { diff --git a/lib/core/versions/latest/models/AnchorFileModel.ts b/lib/core/versions/latest/models/AnchorFileModel.ts index e028c342b..5152953d7 100644 --- a/lib/core/versions/latest/models/AnchorFileModel.ts +++ b/lib/core/versions/latest/models/AnchorFileModel.ts @@ -1,11 +1,13 @@ +import OperationReferenceModel from './OperationReferenceModel'; + /** * Defines the external Anchor File structure. */ export default interface AnchorFileModel { - writerLockId: string | undefined; - mapFileUri: string; + writerLockId?: string; + mapFileUri?: string; coreProofFileUri?: string; - operations: { + operations?: { create?: { suffixData: { deltaHash: string; @@ -13,13 +15,7 @@ export default interface AnchorFileModel { type?: string; }; }[], - recover?: { - didSuffix: string; - signedData: string; - }[], - deactivate?: { - didSuffix: string; - signedData: string; - }[] + recover?: OperationReferenceModel[], + deactivate?: OperationReferenceModel[] }; } diff --git a/tests/core/AnchorFile.spec.ts b/tests/core/AnchorFile.spec.ts index bbcdff7ad..0c524b9d7 100644 --- a/tests/core/AnchorFile.spec.ts +++ b/tests/core/AnchorFile.spec.ts @@ -1,9 +1,11 @@ import * as crypto from 'crypto'; import AnchorFile from '../../lib/core/versions/latest/AnchorFile'; +import AnchorFileModel from '../../lib/core/versions/latest/models/AnchorFileModel'; import Compressor from '../../lib/core/versions/latest/util/Compressor'; import ErrorCode from '../../lib/core/versions/latest/ErrorCode'; import JasmineSidetreeErrorValidator from '../JasmineSidetreeErrorValidator'; import Jwk from '../../lib/core/versions/latest/util/Jwk'; +import Multihash from '../../lib/core/versions/latest/Multihash'; import OperationGenerator from '../generators/OperationGenerator'; import SidetreeError from '../../lib/common/SidetreeError'; @@ -35,10 +37,10 @@ describe('AnchorFile', async () => { expect(parsedAnchorFile.createOperations.length).toEqual(1); expect(parsedAnchorFile.createOperations[0].encodedSuffixData).toEqual(createOperation.encodedSuffixData); - expect(parsedAnchorFile.recoverOperations.length).toEqual(1); - expect(parsedAnchorFile.recoverOperations[0].signedDataJws.toCompactJws()).toEqual(recoverOperation.signedDataJws.toCompactJws()); - expect(parsedAnchorFile.deactivateOperations.length).toEqual(1); - expect(parsedAnchorFile.deactivateOperations[0].signedDataJws.toCompactJws()).toEqual(deactivateOperation.signedDataJws.toCompactJws()); + expect(parsedAnchorFile.recoverDidSuffixes.length).toEqual(1); + expect(parsedAnchorFile.recoverDidSuffixes[0]).toEqual(recoverOperation.didUniqueSuffix); + expect(parsedAnchorFile.deactivateDidSuffixes.length).toEqual(1); + expect(parsedAnchorFile.deactivateDidSuffixes[0]).toEqual(deactivateOperation.didUniqueSuffix); expect(parsedAnchorFile.model.mapFileUri).toEqual(mapFileUri); }); @@ -259,18 +261,14 @@ describe('AnchorFile', async () => { delete createOperationRequest.type; delete createOperationRequest.delta; - const deactivateOperationRequest = await OperationGenerator.createDeactivateOperationRequest( - createOperationData.createOperation.didUniqueSuffix, // Intentionally using the same DID unique suffix. - createOperationData.recoveryPrivateKey - ); - - // Strip away properties not allowed in the deactivateOperations array elements. - delete deactivateOperationRequest.type; - const anchorFile = { + const anchorFile: AnchorFileModel = { mapFileUri: 'EiB4ypIXxG9aFhXv2YC8I2tQvLEBbQAsNzHmph17vMfVYA', operations: { create: [createOperationRequest], - deactivate: [deactivateOperationRequest] + deactivate: [{ + didSuffix: createOperationData.createOperation.didUniqueSuffix, // Intentionally using the same DID unique suffix. + revealValue: 'unused' + }] } }; const anchorFileBuffer = Buffer.from(JSON.stringify(anchorFile)); @@ -305,19 +303,30 @@ describe('AnchorFile', async () => { ); expect(anchorFileModel.mapFileUri).toEqual(mapFileHash); - expect(anchorFileModel.operations.create![0].suffixData).toEqual({ + expect(anchorFileModel.operations!.create![0].suffixData).toEqual({ deltaHash: createOperation.suffixData.deltaHash, recoveryCommitment: createOperation.suffixData.recoveryCommitment, type: undefined }); // Verify recover operation. - const recoveryOperationInAnchorFile = anchorFileModel.operations.recover![0]; + const recoveryOperationInAnchorFile = anchorFileModel.operations!.recover![0]; + const recoveryRevealValue = Multihash.canonicalizeThenHashThenEncode(recoverOperation.signedData.recoveryKey); expect(recoveryOperationInAnchorFile.didSuffix).toEqual(recoverOperation.didUniqueSuffix); - expect(recoveryOperationInAnchorFile.signedData).toEqual(recoverOperation.signedDataJws.toCompactJws()); + expect(recoveryOperationInAnchorFile.revealValue).toEqual(recoveryRevealValue); // Verify deactivate operation. - const deactivateOperationInAnchorFile = anchorFileModel.operations.deactivate![0]; + const deactivateOperationInAnchorFile = anchorFileModel.operations!.deactivate![0]; + const deactivateRevealValue = Multihash.canonicalizeThenHashThenEncode(deactivateOperation.signedData.recoveryKey); expect(deactivateOperationInAnchorFile.didSuffix).toEqual(deactivateOperation.didUniqueSuffix); - expect(deactivateOperationInAnchorFile.signedData).toEqual(deactivateOperation.signedDataJws.toCompactJws()); + expect(deactivateOperationInAnchorFile.revealValue).toEqual(deactivateRevealValue); + }); + + it('should not create `operations` property if there is no create, recover, and deactivates.', async () => { + const writerLockId = undefined; + const mapFileHash = OperationGenerator.generateRandomHash(); + const coreProofFileHash = undefined; + const anchorFileModel = await AnchorFile.createModel(writerLockId, mapFileHash, coreProofFileHash, [], [], []); + + expect(anchorFileModel.operations).toBeUndefined(); }); }); @@ -333,7 +342,7 @@ describe('AnchorFile', async () => { const anchorFile = await AnchorFile.parse(anchorFileBuffer); expect(anchorFile.model.mapFileUri).toEqual(mapFileHash); - expect(anchorFile.model.operations.create![0].suffixData).toEqual({ + expect(anchorFile.model.operations!.create![0].suffixData).toEqual({ deltaHash: createOperation.suffixData.deltaHash, recoveryCommitment: createOperation.suffixData.recoveryCommitment }); }); diff --git a/tests/core/CoreProofFile.spec.ts b/tests/core/CoreProofFile.spec.ts index e4d692bbf..c1015e8b9 100644 --- a/tests/core/CoreProofFile.spec.ts +++ b/tests/core/CoreProofFile.spec.ts @@ -18,6 +18,15 @@ describe('CoreProofFile', async () => { xit('Should we check signature on observation time for all updates, recoveries, and deactivates?', async () => { }); + xit('there maybe no map file to write in batch writer if all are deactivates', async () => { + }); + + xit('if writer lock exceeds max size, it should be considered as an invalid lock, maybe we should write a small batch still?', async () => { + }); + + xit('Should we check reveal value is a multihash instead of just a string type check?', async () => { + }); + describe('parse()', async () => { it('should parse a valid core proof file successfully.', async () => { const [, anyPrivateKey] = await Jwk.generateEs256kKeyPair(); // Used in multiple signed data for testing purposes. diff --git a/tests/core/DeactivateOperation.spec.ts b/tests/core/DeactivateOperation.spec.ts index 0f71b4fd9..8a8b57895 100644 --- a/tests/core/DeactivateOperation.spec.ts +++ b/tests/core/DeactivateOperation.spec.ts @@ -75,7 +75,7 @@ describe('DeactivateOperation', async () => { const recoveryRevealValue = 'anyUnusedRecoveryRevealValue'; const signedData = { didSuffix: didUniqueSuffix, - recovery_reveal_value: recoveryRevealValue, + revealValue: recoveryRevealValue, extraProperty: 'An unknown extra property' }; const encodedDelta = Encoder.encode(JSON.stringify(signedData)); @@ -98,7 +98,7 @@ describe('DeactivateOperation', async () => { const recoveryRevealValue = 'anyUnusedRecoveryRevealValue'; const signedData = { didSuffix: didUniqueSuffix, - recovery_reveal_value: recoveryRevealValue + revealValue: recoveryRevealValue }; const encodedSignedData = Encoder.encode(JSON.stringify(signedData)); await expectAsync(DeactivateOperation.parseSignedDataPayload(encodedSignedData, 'mismatchingDidUniqueSuffix')) diff --git a/tests/core/MapFile.spec.ts b/tests/core/MapFile.spec.ts index af8d99c95..4a4b4ef62 100644 --- a/tests/core/MapFile.spec.ts +++ b/tests/core/MapFile.spec.ts @@ -116,7 +116,8 @@ describe('MapFile', async () => { JasmineSidetreeErrorValidator.expectSidetreeErrorToBeThrown( () => (MapFile as any).validateOperationsProperty(operationsProperty), - ErrorCode.UpdateReferenceDidSuffixIsNotAString + ErrorCode.OperationReferenceDidSuffixIsNotAString, + 'update' ); }); @@ -130,7 +131,8 @@ describe('MapFile', async () => { JasmineSidetreeErrorValidator.expectSidetreeErrorToBeThrown( () => (MapFile as any).validateOperationsProperty(operationsProperty), - ErrorCode.UpdateReferenceRevealValueIsNotAString + ErrorCode.OperationReferenceRevealValueIsNotAString, + 'update' ); }); }); diff --git a/tests/core/RecoverOperation.spec.ts b/tests/core/RecoverOperation.spec.ts index 558aac3e6..354c69a77 100644 --- a/tests/core/RecoverOperation.spec.ts +++ b/tests/core/RecoverOperation.spec.ts @@ -67,39 +67,17 @@ describe('RecoverOperation', async () => { }); - describe('parseOperationFromAnchorFile()', async () => { - it('should parse the operation included in an anchor file without the `delta` property.', async (done) => { - const didUniqueSuffix = 'anyDidSuffix'; - const [, recoveryPrivateKey] = await Jwk.generateEs256kKeyPair(); - - const recoverOperationData = await OperationGenerator.generateRecoverOperation({ didUniqueSuffix, recoveryPrivateKey }); - const recoverOperationRequest = JSON.parse(recoverOperationData.operationBuffer.toString()); - - // Intentionally remove properties that wouldn't exist in an anchor file. - delete recoverOperationRequest.type; - delete recoverOperationRequest.delta; - - const recoverOperation = await RecoverOperation.parseOperationFromAnchorFile(recoverOperationRequest); - - expect(recoverOperation).toBeDefined(); - expect(recoverOperation.delta).toBeUndefined(); - expect(recoverOperation.didUniqueSuffix).toEqual(didUniqueSuffix); - - done(); - }); - }); - describe('parseObject()', async () => { it('should throw if operation contains an additional unknown property.', async (done) => { const recoverOperation = { + type: OperationType.Recover, didSuffix: 'unusedSuffix', - recovery_reveal_value: 'unusedReveal', + revealValue: 'unusedReveal', signedData: 'unusedSignedData', extraProperty: 'thisPropertyShouldCauseErrorToBeThrown' }; - const anchorFileMode = true; - await expectAsync((RecoverOperation as any).parseObject(recoverOperation, Buffer.from('anyValue'), anchorFileMode)) + await expectAsync(RecoverOperation.parseObject(recoverOperation, Buffer.from('anyValue'))) .toBeRejectedWith(new SidetreeError(ErrorCode.RecoverOperationMissingOrUnknownProperty)); done(); }); @@ -112,7 +90,7 @@ describe('RecoverOperation', async () => { recoveryKey: 'anyUnusedRecoveryKey', nextRecoveryCommitmentHash: Encoder.encode(Multihash.hash(Buffer.from('some one time password'))), extraProperty: 'An unknown extra property', - recovery_reveal_value: 'some value' + revealValue: 'some value' }; const encodedSignedData = Encoder.encode(JSON.stringify(signedData)); await expectAsync(RecoverOperation.parseSignedDataPayload(encodedSignedData)) diff --git a/tests/core/TransactionProcessor.spec.ts b/tests/core/TransactionProcessor.spec.ts index b1ca30993..8486b965a 100644 --- a/tests/core/TransactionProcessor.spec.ts +++ b/tests/core/TransactionProcessor.spec.ts @@ -1,4 +1,5 @@ import AnchorFile from '../../lib/core/versions/latest/AnchorFile'; +import AnchorFileModel from '../../lib/core/versions/latest/models/AnchorFileModel'; import AnchoredDataSerializer from '../../lib/core/versions/latest/AnchoredDataSerializer'; import ChunkFile from '../../lib/core/versions/latest/ChunkFile'; import Compressor from '../../lib/core/versions/latest/util/Compressor'; @@ -261,8 +262,8 @@ describe('TransactionProcessor', () => { createOperations: [], didUniqueSuffixes: ['abc', 'def'], model: { writerLockId: 'lock', mapFileUri: 'map_hash', operations: {} }, - recoverOperations: [], - deactivateOperations: [] + recoverDidSuffixes: [], + deactivateDidSuffixes: [] }; spyOn(AnchorFile, 'parse').and.returnValue(Promise.resolve(mockAnchorFile)); @@ -309,7 +310,7 @@ describe('TransactionProcessor', () => { it('should return the parsed file.', async (done) => { const createOperationData = await OperationGenerator.generateCreateOperation(); const anyHash = OperationGenerator.generateRandomHash(); - const mockAnchorFileModel = await AnchorFile.createModel('wrierLockId', anyHash, undefined, [createOperationData.createOperation], [], []); + const mockAnchorFileModel = await AnchorFile.createModel(undefined, anyHash, undefined, [createOperationData.createOperation], [], []); const mockAnchorFileBuffer = await Compressor.compress(Buffer.from(JSON.stringify(mockAnchorFileModel))); spyOn(transactionProcessor as any, 'downloadFileFromCas').and.returnValue(Promise.resolve(mockAnchorFileBuffer)); @@ -381,6 +382,28 @@ describe('TransactionProcessor', () => { done(); }); + it('should return undefined if anchor file does not contain the provisional index file URI.', async () => { + const deactivateDidSuffix = OperationGenerator.generateRandomHash(); + const anchorFileModel: AnchorFileModel = { + coreProofFileUri: OperationGenerator.generateRandomHash(), + operations: { + deactivate: [ + { + didSuffix: deactivateDidSuffix, + revealValue: OperationGenerator.generateRandomHash() + } + ] + } + }; + const anchorFile = new (AnchorFile as any)(anchorFileModel, [deactivateDidSuffix], [], [], [deactivateDidSuffix]); + + // Setting the total paid operation count to be 1 (needs to be at least 2 in success case). + const totalPaidOperationCount = 1; + const fetchedMapFile = await transactionProcessor['downloadAndVerifyMapFile'](anchorFile, totalPaidOperationCount); + + expect(fetchedMapFile).toBeUndefined(); + }); + it('should remove update operation references if paid fee is not enough to cover all updates.', async (done) => { const createOperationData = await OperationGenerator.generateCreateOperation(); const mapFileHash = OperationGenerator.generateRandomHash(); @@ -686,7 +709,7 @@ describe('TransactionProcessor', () => { const updateOperation = updateOperationRequestData.updateOperation; const chunkFileHash = OperationGenerator.generateRandomHash(); const mapFileBuffer = await MapFile.createBuffer(chunkFileHash, provisionalProofFileHash, [updateOperation]); - const mapFileModel = await MapFile.parse(mapFileBuffer); + const mapFile = await MapFile.parse(mapFileBuffer); // Create core and provisional proof file. const coreProofFile = await FileGenerator.createCoreProofFile([recoverOperation], []); @@ -697,7 +720,7 @@ describe('TransactionProcessor', () => { const chunkFileModel = await ChunkFile.parse(chunkFileBuffer); const anchoredOperationModels = await transactionProcessor['composeAnchoredOperationModels']( - transactionModel, anchorFile, mapFileModel, coreProofFile, provisionalProofFile, chunkFileModel + transactionModel, anchorFile, mapFile, coreProofFile, provisionalProofFile, chunkFileModel ); expect(anchoredOperationModels.length).toEqual(3); @@ -771,5 +794,55 @@ describe('TransactionProcessor', () => { expect(returnedOperation.operationBuffer.length).toBeGreaterThan(0); done(); }); + + it('should succeed with deltas being set to `undefined` if chunk file is not given.', async () => { + // Mock a transaction model. + const transactionModel: TransactionModel = { + anchorString: 'anything', + normalizedTransactionFee: 999, + transactionFeePaid: 9999, + transactionNumber: 1, + transactionTime: 1, + transactionTimeHash: 'anyValue', + writer: 'anyWriter' + }; + + // Mock core index file with a recovery. + const [, anyPrivateKey] = await Jwk.generateEs256kKeyPair(); + const recoverOperationData = await OperationGenerator.generateRecoverOperation( + { didUniqueSuffix: OperationGenerator.generateRandomHash(), recoveryPrivateKey: anyPrivateKey } + ); + const recoverOperation = recoverOperationData.recoverOperation; + const mapFileHash = OperationGenerator.generateRandomHash(); + const coreProofFileHash = OperationGenerator.generateRandomHash(); + const anchorFileBuffer = await AnchorFile.createBuffer('writerLockId', mapFileHash, coreProofFileHash, [], [recoverOperation], []); + const anchorFile = await AnchorFile.parse(anchorFileBuffer); + + // Mock a provisional index file with an update. + const provisionalProofFileHash = OperationGenerator.generateRandomHash(); + const updateOperationRequestData = await OperationGenerator.generateUpdateOperationRequest(); + const updateOperation = updateOperationRequestData.updateOperation; + const chunkFileHash = OperationGenerator.generateRandomHash(); + const mapFileBuffer = await MapFile.createBuffer(chunkFileHash, provisionalProofFileHash, [updateOperation]); + const mapFile = await MapFile.parse(mapFileBuffer); + + // Create core and provisional proof file. + const coreProofFile = await FileGenerator.createCoreProofFile([recoverOperation], []); + const provisionalProofFile = await FileGenerator.createProvisionalProofFile([updateOperation]); + + const anchoredOperationModels = await transactionProcessor['composeAnchoredOperationModels']( + transactionModel, anchorFile, mapFile, coreProofFile, provisionalProofFile, undefined + ); + + expect(anchoredOperationModels.length).toEqual(2); + + const composedRecoverRequest = JSON.parse(anchoredOperationModels[0].operationBuffer.toString()); + const composedUpdateRequest = JSON.parse(anchoredOperationModels[1].operationBuffer.toString()); + + expect(composedRecoverRequest.didSuffix).toEqual(recoverOperation.didUniqueSuffix); + expect(composedUpdateRequest.didSuffix).toEqual(updateOperation.didUniqueSuffix); + expect(composedRecoverRequest.delta).toBeUndefined(); + expect(composedUpdateRequest.delta).toBeUndefined(); + }); }); });