Skip to content

Commit

Permalink
Added verified_contents.json generator.
Browse files Browse the repository at this point in the history
  • Loading branch information
boocmp committed Nov 15, 2024
1 parent e1e08d7 commit 8a4b099
Show file tree
Hide file tree
Showing 2 changed files with 190 additions and 0 deletions.
167 changes: 167 additions & 0 deletions lib/contentSign.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */

const fs = require('fs')
const path = require('path').posix
const crypto = require('crypto')

const getComponentFiles = (dir) => {
let result = []

const files = fs.readdirSync(dir)

for (const file of files) {
const filePath = path.join(dir, file)
const stat = fs.statSync(filePath)

if (stat.isDirectory()) {
const subDirFiles = getComponentFiles(filePath)
result = result.concat(subDirFiles)
} else {
result.push(filePath)
}
}

return result.sort()
}

const computeBlockHashes = (filePath) => {
const buffer = Buffer.alloc(4096)

const file = fs.openSync(filePath, 'r')
hashes = []

while (true) {
const bytesRead = fs.readSync(file, buffer, 0, buffer.length)
if (bytesRead <= 0) {
break
}
const hash = crypto.createHash('sha256')
hash.update(buffer.subarray(0, bytesRead))
hashes.push(hash.digest())
}

if (!hashes) {
const hash = crypto.createHash('sha256')
hash.update('')
hashes.push(hash.digest())
}

return hashes
}

const computeRootHash = (file) => {
let blockHashes = computeBlockHashes(file)
if (!blockHashes) {
return ''
}

const branch_factor = 4096 / 32

while (blockHashes.length > 1) {
let i = 0
parent_nodes = []
while (i != blockHashes.length) {
const hash = crypto.createHash('sha256')
for (let j = 0; j < branch_factor && i != blockHashes.length; j++, i++) {
hash.update(blockHashes[i])
}
parent_nodes.push(hash.digest())
}
blockHashes = parent_nodes
}
return blockHashes[0]
}

const createPayload = (component, files) => {
const payload = {
content_hashes: [
{
block_size: 4096,
digest: 'sha256',
files: [],
format: 'treehash',
hash_block_size: 4096,
},
],
item_id: component.id,
item_version: component.version,
protocol_version: 1,
}

for (const file of files) {
const rootHash = computeRootHash(file)
payload.content_hashes[0].files.push({
path: file.replace(component.dir, ''),
root_hash: Buffer.from(rootHash).toString('base64Url'),
})
}

return payload
}

const signPayload = (protectedBy, payload, privateKey) => {
const signer = crypto.createSign('RSA-SHA256')
signer.update(protectedBy)
signer.update('.')
signer.update(payload)

return signer.sign(privateKey, 'base64url')
}

const ensureTrailingSlash = (path) => {
if (path.charAt(path.length - 1) !== '/') {
path += '/'
}
return path
}

const createVerifiedContents = (inputDir, id, version, publisherProofKey) => {
inputDir = ensureTrailingSlash(inputDir)

const componentFiles = getComponentFiles(inputDir)

const component = {
dir: inputDir,
id: id,
version: version,
}

const payload = createPayload(component, componentFiles)

const protection = {
alg: 'RS256',
}

const encodedPayload = Buffer.from(JSON.stringify(payload)).toString(
'base64url'
)
const encodedProtection = Buffer.from(JSON.stringify(protection)).toString(
'base64url'
)

const signature = signPayload(
encodedProtection,
encodedPayload,
publisherProofKey
)

return {
description: 'treehash per file',
signed_content: {
payload: encodedPayload,
signature: {
protected: encodedProtection,
header: {
kid: 'webstore',
},
signature: signature,
},
},
}
}

export default {
createVerifiedContents,
}
23 changes: 23 additions & 0 deletions lib/util.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import { S3Client, GetObjectCommand, HeadObjectCommand, PutObjectCommand, PutObj
import replace from 'replace-in-file'
import { pipeline } from 'stream/promises'
import { tmpdir } from 'os'
import { createVerifiedContents } from './contentSign.js'

const DynamoDBTableName = 'Extensions'
const FirstVersion = '1.0.0'
Expand Down Expand Up @@ -63,6 +64,26 @@ const fetchTextFromURL = (listURL) => {
return p
}

const generateComponentMetadata = (inputDir, publisherProofKey) => {
const manifest = parseManifest(path.join(inputDir, 'manifest.json'))
const componentId = getIDFromBase64PublicKey(manifest.key)
const version = manifest.version

const verifiedContents = createVerifiedContents(
inputDir,
componentId,
version,
publisherProofKey
)

const metadataDir = path.join(inputDir, '_metadata')
fs.mkdirSync(metadataDir)
fs.writeFileSync(
path.join(metadataDir, 'verified_contents.json'),
JSON.stringify(verifiedContents)
)
}

const generateCRXFile = (binary, crxFile, privateKeyFile, publisherProofKey,
publisherProofKeyAlt, inputDir) => {
if (!binary) {
Expand All @@ -78,6 +99,8 @@ const generateCRXFile = (binary, crxFile, privateKeyFile, publisherProofKey,
throw new Error(`Private key file '${privateKeyFile}' is missing, was it uploaded?`)
}

generateComponentMetadata(inputDir, publisherProofKey)

const tmp = tmpdir()
const tempUserDataDir = fs.mkdtempSync(path.join(tmp, 'crx-package-job-'))
const args = [
Expand Down

0 comments on commit 8a4b099

Please sign in to comment.