diff --git a/.gitignore b/.gitignore index 7bbe1a3f..2a971197 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ /node_modules /coverage +/dist \ No newline at end of file diff --git a/dist/index.js b/dist/index.js deleted file mode 100644 index f8adc354..00000000 --- a/dist/index.js +++ /dev/null @@ -1,32 +0,0 @@ -const core = require('@actions/core'); -import { getFilesForUpload } from './src/file-system-utils'; -import { uploadAll } from './src/azure-upload-utils'; -try { - /** - * File types to upload should look like - * { ".html": "text/html" } - */ - const fileTypesToUpload = JSON.parse(core.getInput('fileTypesToUpload')); - /** - * Directories to upload should look like - * [ - * { directoryToUpload: "", shouldRecurse: "", baseContainerPath: "" } - * ] - */ - const directoriesToUpload = JSON.parse(core.getInput('directoriesToUpload')) || []; - let filesToUpload = []; - directoriesToUpload.forEach(t => { - filesToUpload = filesToUpload.concat(getFilesForUpload(t.directoryToUpload, t.shouldRecurse, t.baseContainerPath, Object.keys(fileTypesToUpload))); - }); - /** - * Azure Blob Configurations should look like - * [ - * { connectionString: "", container: "" } - * ] - */ - const azureBlobConfiguration = JSON.parse(core.getInput('azureBlobConfiguration')); - uploadAll(azureBlobConfiguration, filesToUpload, fileTypesToUpload); -} -catch (error) { - core.setFailed(error.message); -} diff --git a/dist/src/azure-upload-utils.js b/dist/src/azure-upload-utils.js deleted file mode 100644 index 0d8745e9..00000000 --- a/dist/src/azure-upload-utils.js +++ /dev/null @@ -1,16 +0,0 @@ -const { BlobServiceClient } = require('@azure/storage-blob'); -const path = require('path'); -const fs = require('fs'); -export function uploadAll(uploadConfigs, filesToUpload, supportedContentTypes) { - uploadConfigs.forEach(t => { - const blobServiceClient = BlobServiceClient.fromConnectionString(t.connectionString); - const containerClient = blobServiceClient.getContainerClient(t.container); - filesToUpload.forEach(x => { - let stream = fs.readFileSync(x.absoluteDiskPath); - let contentType = supportedContentTypes[path.extname(x.absoluteDiskPath)]; - if (!contentType) - throw `Unsupported Content Type for ${x.absoluteDiskPath}`; - containerClient.uploadBlockBlob(x.relativeUploadPath, stream, stream.length, { blobHTTPHeaders: { blobContentType: contentType } }); - }); - }); -} diff --git a/dist/src/file-system-utils.js b/dist/src/file-system-utils.js deleted file mode 100644 index eef4f575..00000000 --- a/dist/src/file-system-utils.js +++ /dev/null @@ -1,54 +0,0 @@ -import fs from 'fs'; -import Queue from 'queue-fifo'; -import path from 'path'; -let q = new Queue(); -/** - * Traverses the disk and builds a list/map of which files to upload and with what relative paths. - * - * @param scanDirectory - Directory to scan on the disk - * @param shouldRecurse - If we should recurse and upload files in nested directories - * @param baseContainerPath - Most likely $web for Azure Blobs, if static content is being uploaded - * @param extensionsToUpload - List of extensions to upload - * - * @returns an array of objects which carry absolute path on disk - * and where they would be uploaded in cloud - */ -export function getFilesForUpload(scanDirectory, shouldRecurse, baseContainerPath, extensionsToUpload) { - let filesToUpload = []; - q.enqueue(scanDirectory); - while (!q.isEmpty()) { - const currentDirectoryPath = q.dequeue(); - console.log('Traversing directory: ', currentDirectoryPath); - const currentDirectoryContents = fs.readdirSync(currentDirectoryPath); - const filesInCurrentDirectory = currentDirectoryContents - // filter for files only - .filter(t => !fs.lstatSync(path.join(currentDirectoryPath, t)).isDirectory()) - // filenames to full path - .map(t => path.join(currentDirectoryPath, t)); - console.log(`Files in ${currentDirectoryPath}`, filesInCurrentDirectory); - let uploadCandidates = filesInCurrentDirectory - // make sure we only target the specified extensions - .filter(t => extensionsToUpload.some(x => t.endsWith(x))); - console.log(`Upload candidates from ${currentDirectoryPath}`, uploadCandidates); - filesToUpload.push(...uploadCandidates); - if (shouldRecurse) { - let dirsInDir = currentDirectoryContents - .filter(t => fs.lstatSync(path.join(currentDirectoryPath, t)) - // this time, query for directories only - .isDirectory()).map(t => path.join(currentDirectoryPath, t)); - if (dirsInDir && dirsInDir.length) { - // enqueue directories, continue traversing - dirsInDir.forEach(t => q.enqueue(t)); - } - } - } - let uploadStructure = filesToUpload.map(t => { - let relativePath = t.replace(scanDirectory, ''); - if (relativePath[0] === '/') - relativePath = relativePath.substring(1); - if (baseContainerPath !== undefined) - relativePath = `${baseContainerPath}/${relativePath}`; - return { absoluteDiskPath: t, relativeUploadPath: relativePath }; - }); - return uploadStructure; -}