diff --git a/.eslintignore b/.eslintignore index 63250b640b..658ab2f49a 100644 --- a/.eslintignore +++ b/.eslintignore @@ -7,6 +7,3 @@ # compiled by tsc from /src/electron/ /public - -# rest resources -/src/utils/resources \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index abc414c105..e0e31619a9 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -67,5 +67,16 @@ ], "editor.codeActionsOnSave": { "source.fixAll.eslint": true - } + }, + "cSpell.words": [ + "Testcafe", + "antd", + "bitcoind", + "clightning", + "cmps", + "logobw", + "mrblenny", + "unzipper", + "uploader" + ] } diff --git a/src/__mocks__/archiver.js b/src/__mocks__/archiver.js new file mode 100644 index 0000000000..aac705b845 --- /dev/null +++ b/src/__mocks__/archiver.js @@ -0,0 +1,22 @@ +const createMockArchiver = () => { + // TODO: this code should live in __mocks__/archiver.js + + // eslint-disable-next-line @typescript-eslint/no-var-requires + const { PassThrough } = require('stream'); + let mockStream; + // return a fake stream when "archiver()" is called in the app + const ctor = function() { + mockStream = new PassThrough(); + mockStream.file = jest.fn(); + mockStream.directory = jest.fn(); + mockStream.append = jest.fn(); + mockStream.finalize = jest.fn(); + return mockStream; + }; + // attach a func to emit events on the stream from the tests + ctor.mockEmit = (event, data) => mockStream.emit(event, data); + + return ctor; +}; + +export default createMockArchiver(); diff --git a/src/__mocks__/fs-extra.js b/src/__mocks__/fs-extra.js index 701df48326..2043ec663a 100644 --- a/src/__mocks__/fs-extra.js +++ b/src/__mocks__/fs-extra.js @@ -1,8 +1,11 @@ module.exports = { outputFile: jest.fn(), + writeFile: jest.fn(), pathExists: jest.fn(), readFile: jest.fn(), remove: jest.fn(), ensureDir: jest.fn(), - copyFile: jest.fn(), + copy: jest.fn(), + createWriteStream: jest.fn(), + createReadStream: jest.fn(), }; diff --git a/src/components/network/ImportNetwork.spec.tsx b/src/components/network/ImportNetwork.spec.tsx index 25a28797a0..61c9fd0e92 100644 --- a/src/components/network/ImportNetwork.spec.tsx +++ b/src/components/network/ImportNetwork.spec.tsx @@ -21,7 +21,9 @@ describe('ImportNetwork component', () => { it('has a file uploader', async () => { const { getByText } = renderComponent(); expect( - getByText('Click or drag ZIP file to this area to import'), + getByText( + 'Drag a zip file exported from Polar here, or click to browse for the file', + ), ).toBeInTheDocument(); }); diff --git a/src/components/network/ImportNetwork.tsx b/src/components/network/ImportNetwork.tsx index a484632e97..f33b304591 100644 --- a/src/components/network/ImportNetwork.tsx +++ b/src/components/network/ImportNetwork.tsx @@ -1,4 +1,5 @@ -import React, { useState } from 'react'; +import React from 'react'; +import { useAsyncCallback } from 'react-async-hook'; import { RouteComponentProps } from 'react-router'; import { UploadOutlined } from '@ant-design/icons'; import styled from '@emotion/styled'; @@ -35,27 +36,17 @@ const ImportNetwork: React.FC = () => { const { navigateTo, notify } = useStoreActions(s => s.app); const { importNetwork } = useStoreActions(s => s.network); const { l } = usePrefixedTranslation('cmps.network.ImportNetwork'); - const [importing, setImporting] = useState(false); + const doImportNetwork = useAsyncCallback(async (file: RcFile) => { + try { + const network = await importNetwork(file.path); + notify({ message: l('importSuccess', { name: network.name }) }); + navigateTo(HOME); + } catch (error) { + notify({ message: l('importError', { file: file.name }), error }); + } - const doImportNetwork = (file: RcFile) => { - setImporting(true); - - // we kick off the import promise, but don't wait for it - importNetwork(file.path) - .then(network => { - notify({ message: l('importSuccess', { name: network.name }) }); - navigateTo(HOME); - }) - .catch(error => { - notify({ message: l('importError', { file: file.name }), error }); - }) - .then(() => { - setImporting(false); - }); - - // return false to prevent the Upload.Dragger from sending the file somewhere - return false; - }; + return; + }); const theme = useTheme(); return ( @@ -70,10 +61,10 @@ const ImportNetwork: React.FC = () => { // to not display a file in the upload dragger after the user has selected a zip fileList={undefined} accept=".zip" - disabled={importing} - beforeUpload={doImportNetwork} + disabled={doImportNetwork.loading} + beforeUpload={doImportNetwork.execute} > - {importing ? ( + {doImportNetwork.loading ? ( <>

{l('importText')}

diff --git a/src/components/network/NetworkView.spec.tsx b/src/components/network/NetworkView.spec.tsx index 013dad4eba..3424f9f0d0 100644 --- a/src/components/network/NetworkView.spec.tsx +++ b/src/components/network/NetworkView.spec.tsx @@ -1,7 +1,7 @@ import React from 'react'; import electron from 'electron'; import fsExtra from 'fs-extra'; -import { fireEvent, wait, waitForElement } from '@testing-library/dom'; +import { fireEvent, getByText, wait, waitForElement } from '@testing-library/dom'; import { act } from '@testing-library/react'; import { createMemoryHistory } from 'history'; import { Status } from 'shared/types'; diff --git a/src/components/routing/Routes.spec.tsx b/src/components/routing/Routes.spec.tsx index c985c08c8a..e8d8370929 100644 --- a/src/components/routing/Routes.spec.tsx +++ b/src/components/routing/Routes.spec.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { renderWithProviders } from 'utils/tests'; -import { HOME, NETWORK_NEW, Routes, NETWORK_IMPORT } from 'components/routing'; +import { HOME, NETWORK_IMPORT, NETWORK_NEW, Routes } from 'components/routing'; describe('App container', () => { const renderComponent = (route: string) => { @@ -19,6 +19,6 @@ describe('App container', () => { it('should render the import network page', () => { const { getByText } = renderComponent(NETWORK_IMPORT); - expect(getByText('Import a pre-defined Lightning Network')).toBeInTheDocument(); + expect(getByText('Import a Lightning Network')).toBeInTheDocument(); }); }); diff --git a/src/i18n/locales/en-US.json b/src/i18n/locales/en-US.json index 33bba3b7d2..5a648606e7 100644 --- a/src/i18n/locales/en-US.json +++ b/src/i18n/locales/en-US.json @@ -263,8 +263,8 @@ "cmps.network.NetworkView.exportSuccess": "Exported '{{name}}'. Saved the zip file to {{destination}}", "cmps.network.NetworkView.notReadyToExport": "Cannot export a running network", "cmps.network.NetworkView.notReadyToExportDescription": "Make sure the network is completely stopped before exporting it.", - "cmps.network.ImportNetwork.title": "Import a pre-defined Lightning Network", - "cmps.network.ImportNetwork.fileDraggerArea": "Click or drag ZIP file to this area to import", + "cmps.network.ImportNetwork.title": "Import a Lightning Network", + "cmps.network.ImportNetwork.fileDraggerArea": "Drag a zip file exported from Polar here, or click to browse for the file", "cmps.network.ImportNetwork.importText": "Importing...", "cmps.network.ImportNetwork.importSuccess": "Imported network '{{name}}' successfully", "cmps.network.ImportNetwork.importError": "Could not import '{{file}}'", @@ -347,5 +347,5 @@ "store.models.network.removeLastErr": "Cannot remove the only bitcoin node", "store.models.network.removeCompatErr": "There are no other compatible backends for {{lnName}} to connect to. You must remove the {{lnName}} node first", "utils.network.backendCompatError": "This network does not contain a Bitcoin Core v{{requiredVersion}} (or lower) node which is required for {{implementation}} v{{version}}", - "utils.network.importClightningWindows": "Importing networks with c-lightning nodes is not supported on Windows" + "utils.network.incompatibleImplementation": "Importing networks with {{implementation}} nodes is not supported on {{platform}}" } diff --git a/src/store/models/network.spec.ts b/src/store/models/network.spec.ts index 6038fe3713..9b1f536811 100644 --- a/src/store/models/network.spec.ts +++ b/src/store/models/network.spec.ts @@ -27,16 +27,14 @@ jest.mock('utils/files', () => ({ jest.mock('utils/network', () => ({ ...jest.requireActual('utils/network'), importNetworkFromZip: () => { - return jest.fn().mockImplementation(() => { - const network = { - id: 1, - nodes: { - bitcoin: [{}], - lightning: [{}], - }, - }; - return [network, {}]; - })(); + const network = { + id: 1, + nodes: { + bitcoin: [{}], + lightning: [{}], + }, + }; + return [network, {}]; }, })); @@ -825,11 +823,11 @@ describe('Network model', () => { describe('Export', () => { it('should export a network and show a save dialogue', async () => { - const { network: networkActions } = store.getActions(); + const { exportNetwork } = store.getActions().network; const spy = jest.spyOn(electron.remote.dialog, 'showSaveDialog'); - const exported = await networkActions.exportNetwork(getNetwork()); + const exported = await exportNetwork(getNetwork()); expect(exported).toBeDefined(); expect(spy).toHaveBeenCalled(); @@ -842,18 +840,18 @@ describe('Network model', () => { // returns undefined if user closes the window mock.mockImplementation(() => ({} as any)); - const { network: networkActions } = store.getActions(); - const exported = await networkActions.exportNetwork(getNetwork()); + const { exportNetwork } = store.getActions().network; + const exported = await exportNetwork(getNetwork()); expect(exported).toBeUndefined(); }); }); describe('Import', () => { it('should import a network', async () => { - const { network: networkActions } = store.getActions(); + const { importNetwork } = store.getActions().network; const statePreImport = store.getState(); - const imported = await networkActions.importNetwork('zip'); + const imported = await importNetwork('zip'); expect(imported.id).toBeDefined(); expect(imported.nodes.bitcoin.length).toBeGreaterThan(0); expect(imported.nodes.lightning.length).toBeGreaterThan(0); diff --git a/src/store/models/network.ts b/src/store/models/network.ts index 82ca63c106..60f8a32be8 100644 --- a/src/store/models/network.ts +++ b/src/store/models/network.ts @@ -1,6 +1,6 @@ import { remote, SaveDialogOptions } from 'electron'; import { info } from 'electron-log'; -import { copyFile, ensureDir } from 'fs-extra'; +import { copy, ensureDir } from 'fs-extra'; import { join } from 'path'; import { push } from 'connected-react-router'; import { Action, action, Computed, computed, Thunk, thunk } from 'easy-peasy'; @@ -645,7 +645,7 @@ const networkModel: NetworkModel = { const zipped = await zipNetwork(network, allCharts[network.id]); - await copyFile(zipped, zipDestination); + await copy(zipped, zipDestination); info('exported network to', zipDestination); return zipDestination; }), @@ -655,14 +655,14 @@ const networkModel: NetworkModel = { network: { networks }, } = getStoreState(); - const { network: networkActions } = getStoreActions(); - const { designer: designerActions } = getStoreActions(); + const { add, save } = getStoreActions().network; + const { setChart } = getStoreActions().designer; const [newNetwork, chart] = await importNetworkFromZip(path, networks); - networkActions.add(newNetwork); - designerActions.setChart({ chart, id: newNetwork.id }); - await networkActions.save(); + add(newNetwork); + setChart({ chart, id: newNetwork.id }); + await save(); info('imported', newNetwork); return newNetwork; diff --git a/src/utils/constants.ts b/src/utils/constants.ts index 37279241a9..0290238b09 100644 --- a/src/utils/constants.ts +++ b/src/utils/constants.ts @@ -143,6 +143,7 @@ export const dockerConfigs: Record = { 'bitcoind', '-server=1', '-regtest=1', + '-reindex', '-rpcauth={{rpcUser}}:{{rpcAuth}}', '-debug=0', '-zmqpubrawblock=tcp://0.0.0.0:28334', diff --git a/src/utils/network.ts b/src/utils/network.ts index b8a3e038b8..ff8441726c 100644 --- a/src/utils/network.ts +++ b/src/utils/network.ts @@ -1,6 +1,5 @@ import { debug } from 'electron-log'; -import { promises as fs } from 'fs'; -import { copy } from 'fs-extra'; +import { copy, mkdirp, readFile, writeFile } from 'fs-extra'; import { basename, join } from 'path'; import { IChart } from '@mrblenny/react-flow-chart'; import detectPort from 'detect-port'; @@ -21,12 +20,13 @@ import { ManagedImage, Network, } from 'types'; +import NetworkDesigner from 'components/designer/NetworkDesigner'; import { dataPath, networksPath, nodePath } from './config'; -import { BasePorts, DOCKER_REPO } from './constants'; +import { BasePorts, DOCKER_REPO, dockerConfigs } from './constants'; import { getName } from './names'; import { range } from './numbers'; import { isVersionCompatible } from './strings'; -import { isWindows } from './system'; +import { getPolarPlatform } from './system'; import { prefixTranslation } from './translate'; import { unzip, zip } from './zip'; @@ -227,12 +227,43 @@ const isNetwork = (value: any): value is Network => { }; const readNetwork = async (path: string, id: number): Promise => { - const rawNetwork = await fs.readFile(path); + const rawNetwork = await readFile(path); const network = JSON.parse(rawNetwork.toString('utf-8')); if (!isNetwork(network)) { throw Error(`${path} did not contain a valid network!`); } + return network; +}; + +const isChart = (value: any): value is IChart => + typeof value === 'object' && + typeof value.offset === 'object' && + typeof value.nodes === 'object' && + typeof value.links === 'object' && + typeof value.selected === 'object' && + typeof value.hovered === 'object'; + +const readExportFile = async (path: string, id: number): Promise<[Network, IChart]> => { + const rawFile = await readFile(path); + + const parsed = JSON.parse(rawFile.toString('utf-8')); + if (!parsed.network) { + throw Error(`${path} did not contain a 'network' field`); + } + if (!parsed.chart) { + throw Error(`${path} did not contain a 'chart' field`); + } + const network = parsed.network as unknown; + const chart = parsed.chart as unknown; + if (!isNetwork(network)) { + throw Error(`${path} did not contain a valid network`); + } + + if (!isChart(chart)) { + throw Error(`${path} did not contain a valid chart`); + } + network.path = join(dataPath, 'networks', id.toString()); network.id = id; @@ -248,10 +279,7 @@ const readNetwork = async (path: string, id: number): Promise => { const clightning = ln as CLightningNode; clightning.paths = { macaroon: join( - network.path, - 'volumes', - 'c-lightning', - clightning.name, + nodePath(network, 'c-lightning', clightning.name), 'rest-api', 'access.macaroon', ), @@ -259,25 +287,7 @@ const readNetwork = async (path: string, id: number): Promise => { } }); - return network; -}; - -const isChart = (value: any): value is IChart => - typeof value === 'object' && - typeof value.offset === 'object' && - typeof value.nodes === 'object' && - typeof value.links === 'object' && - typeof value.selected === 'object' && - typeof value.hovered === 'object'; - -const readChart = async (path: string): Promise => { - const rawChart = await fs.readFile(path); - const chart = JSON.parse(rawChart.toString('utf-8')); - if (!isChart(chart)) { - throw Error(`${path} did not contain a valid chart`); - } - - return chart; + return [network, chart]; }; /** @@ -291,10 +301,7 @@ export const getNetworkFromZip = async ( const destination = join(os.tmpdir(), basename(zip, '.zip')); await unzip(zip, destination); - const [network, chart] = await Promise.all([ - readNetwork(join(destination, 'network.json'), newId), - readChart(join(destination, 'chart.json')), - ]); + const [network, chart] = await readExportFile(join(destination, 'export.json'), newId); return [network, chart, destination]; }; @@ -320,16 +327,19 @@ export const importNetworkFromZip = async ( zipPath, newId, ); - const networkHasCLightning = newNetwork.nodes.lightning.some( - n => n.implementation === 'c-lightning', - ); - if (isWindows() && networkHasCLightning) { - throw Error(l('importClightningWindows')); + const platform = getPolarPlatform(); + + for (const { implementation } of newNetwork.nodes.lightning) { + const { platforms } = dockerConfigs[implementation]; + const nodeSupportsPlatform = platforms.includes(platform); + if (!nodeSupportsPlatform) { + throw Error(l('incompatibleImplementation', { implementation, platform })); + } } const newNetworkDirectory = join(dataPath, 'networks', newId.toString()); - await fs.mkdir(newNetworkDirectory, { recursive: true }); + await mkdirp(newNetworkDirectory); const thingsToCopy = ['docker-compose.yml', 'volumes']; await Promise.all( @@ -366,22 +376,16 @@ export const zipNameForNetwork = (network: Network): string => * @return Path of created `.zip` file */ export const zipNetwork = async (network: Network, chart: IChart): Promise => { + const exportFileContent = { + network, + chart, + }; + + await writeFile(join(network.path, 'export.json'), JSON.stringify(exportFileContent)); + const destination = join(tmpdir(), zipNameForNetwork(network)); - await zip({ - destination, - objects: [ - { - name: 'network.json', - object: network, - }, - { - name: 'chart.json', - object: chart, - }, - ], - paths: [join(network.path, 'docker-compose.yml'), join(network.path, 'volumes')], - }); + await zip(network.path, destination); return destination; }; diff --git a/src/utils/tests/resources/bar.txt b/src/utils/tests/resources/bar.txt deleted file mode 100644 index 5716ca5987..0000000000 --- a/src/utils/tests/resources/bar.txt +++ /dev/null @@ -1 +0,0 @@ -bar diff --git a/src/utils/tests/resources/baz/qux.ts b/src/utils/tests/resources/baz/qux.ts deleted file mode 100644 index a1f362fecc..0000000000 --- a/src/utils/tests/resources/baz/qux.ts +++ /dev/null @@ -1 +0,0 @@ -console.log('qux'); diff --git a/src/utils/tests/resources/foo.json b/src/utils/tests/resources/foo.json deleted file mode 100644 index 70b62b8cfd..0000000000 --- a/src/utils/tests/resources/foo.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "foo": 2 -} diff --git a/src/utils/tests/resources/test.zip b/src/utils/tests/resources/test.zip deleted file mode 100644 index d2d89672d7..0000000000 Binary files a/src/utils/tests/resources/test.zip and /dev/null differ diff --git a/src/utils/tests/resources/zipped-network.zip b/src/utils/tests/resources/zipped-network.zip deleted file mode 100644 index 4fa1d74b7e..0000000000 Binary files a/src/utils/tests/resources/zipped-network.zip and /dev/null differ diff --git a/src/utils/zip.spec.ts b/src/utils/zip.spec.ts index 5ab87fd622..4e9b0ee143 100644 --- a/src/utils/zip.spec.ts +++ b/src/utils/zip.spec.ts @@ -1,10 +1,11 @@ -import { promises as fs } from 'fs'; +import fsExtra from 'fs-extra'; import { join } from 'path'; import archiver from 'archiver'; -import { tmpdir } from 'os'; +import { PassThrough } from 'stream'; import { unzip, zip } from './zip'; -jest.mock('fs-extra', () => jest.requireActual('fs-extra')); +const fsMock = fsExtra as jest.Mocked; +const archiverMock = archiver as jest.Mocked; describe('unzip', () => { it("fail to unzip something that isn't a zip", async () => { @@ -13,105 +14,94 @@ describe('unzip', () => { ).rejects.toThrow(); }); - it('unzips test.zip', async () => { - const destination = join(tmpdir(), 'zip-test-' + Date.now()); - await unzip(join(__dirname, 'tests', 'resources', 'test.zip'), destination); - - const entries = await fs.readdir(destination, { withFileTypes: true }); - expect(entries.map(e => e.name)).toContain('foo.json'); - expect(entries.map(e => e.name)).toContain('bar.txt'); - expect(entries.map(e => e.name)).toContain('baz'); - - const fooFile = entries.find(e => e.name === 'foo.json'); - const barFile = entries.find(e => e.name === 'bar.txt'); - const bazDir = entries.find(e => e.name === 'baz'); - - expect(fooFile).toBeDefined(); - expect(barFile).toBeDefined(); - expect(bazDir).toBeDefined(); - - expect(fooFile?.isFile()).toBeTruthy(); - expect(barFile?.isFile()).toBeTruthy(); - expect(bazDir?.isDirectory()).toBeTruthy(); - - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const bazEntries = await fs.readdir(join(destination, bazDir!.name), { - withFileTypes: true, - }); - - expect(bazEntries).toHaveLength(1); - expect(bazEntries.map(e => e.name)).toContain('qux.ts'); - - const qux = await fs.readFile(join(destination, 'baz', 'qux.ts')); - expect(qux.toString('utf-8')).toBe('console.log("qux");\n'); - - const bar = await fs.readFile(join(destination, 'bar.txt')); - expect(bar.toString('utf-8')).toBe('bar\n'); - - const foo = await fs.readFile(join(destination, 'foo.json')); - expect(foo.toString('utf-8')).toBe(JSON.stringify({ foo: 2 }, null, 4) + '\n'); - }); - it("fails to unzip something that doesn't exist", async () => { return expect(unzip('foobar', 'bazfoo')).rejects.toThrow(); }); }); describe('zip', () => { - it('zips objects', async () => { - const objects: Array<{ name: string; object: any }> = [ - { - name: 'firstObject', - object: 2, - }, - { - name: 'secondObject', - object: { baz: 'baz' }, - }, - { - name: 'thirdObject', - object: [2, { foo: 'foo' }, false], - }, - ]; - - const zipped = join(tmpdir(), `zip-test-${Date.now()}.zip`); - await zip({ - destination: zipped, - objects, - paths: [], - }); - - const unzipped = join(tmpdir(), `zip-test-${Date.now()}`); - await unzip(zipped, unzipped); - - for (const obj of objects) { - const read = await fs - .readFile(join(unzipped, obj.name)) - .then(read => JSON.parse(read.toString('utf-8'))); - expect(read).toEqual(obj.object); - } + // it('zips objects', async () => { + // const objects: Array<{ name: string; object: any }> = [ + // { + // name: 'firstObject', + // object: 2, + // }, + // { + // name: 'secondObject', + // object: { baz: 'baz' }, + // }, + // { + // name: 'thirdObject', + // object: [2, { foo: 'foo' }, false], + // }, + // ]; + + // const zipped = join(tmpdir(), `zip-test-${Date.now()}.zip`); + // await zip({ + // destination: zipped, + // objects, + // paths: [], + // }); + + // const unzipped = join(tmpdir(), `zip-test-${Date.now()}`); + // await unzip(zipped, unzipped); + + // for (const obj of objects) { + // const read = await fsExtra + // .readFile(join(unzipped, obj.name)) + // .then(read => JSON.parse(read.toString('utf-8'))); + // expect(read).toEqual(obj.object); + // } + // }); + + // it('zips paths', async () => { + // const files = [ + // join(__dirname, 'tests', 'resources', 'bar.txt'), + // join(__dirname, 'tests', 'resources', 'foo.json'), + // join(__dirname, 'tests', 'resources', 'baz'), + // ]; + // const zipped = join(tmpdir(), `zip-test-${Date.now()}.zip`); + // await zip({ destination: zipped, objects: [], paths: files }); + + // const unzipped = join(tmpdir(), `zip-test-${Date.now()}`); + // await unzip(zipped, unzipped); + + // const entries = await fs..readdir(unzipped, { withFileTypes: true }); + + // const bar = entries.find(e => e.name === 'bar.txt'); + // const baz = entries.find(e => e.name === 'baz'); + // const foo = entries.find(e => e.name === 'foo.json'); + + // expect(bar?.isFile()).toBeTruthy(); + // expect(baz?.isDirectory()).toBeTruthy(); + // expect(foo?.isFile()).toBeTruthy(); + // }); + + it('should fail if there is an archiver error', async () => { + fsMock.createWriteStream.mockReturnValueOnce(new PassThrough() as any); + + const promise = zip('source', 'destination'); + + // emit an error after a small delay + const mockError = new Error('test-error'); + setTimeout(() => { + archiverMock.mockEmit('error', mockError); + }, 100); + + await expect(promise).rejects.toEqual(mockError); }); - it('zips paths', async () => { - const files = [ - join(__dirname, 'tests', 'resources', 'bar.txt'), - join(__dirname, 'tests', 'resources', 'foo.json'), - join(__dirname, 'tests', 'resources', 'baz'), - ]; - const zipped = join(tmpdir(), `zip-test-${Date.now()}.zip`); - await zip({ destination: zipped, objects: [], paths: files }); - - const unzipped = join(tmpdir(), `zip-test-${Date.now()}`); - await unzip(zipped, unzipped); + it('should fail if there is an archiver warning', async () => { + fsMock.createWriteStream.mockReturnValueOnce(new PassThrough() as any); - const entries = await fs.readdir(unzipped, { withFileTypes: true }); + const promise = zip('source', 'destination'); - const bar = entries.find(e => e.name === 'bar.txt'); - const baz = entries.find(e => e.name === 'baz'); - const foo = entries.find(e => e.name === 'foo.json'); + // emit an error after a small delay + const mockError = new Error('test-warning'); + setTimeout(() => { + archiverMock.mockEmit('warning', mockError); + }, 100); - expect(bar?.isFile()).toBeTruthy(); - expect(baz?.isDirectory()).toBeTruthy(); - expect(foo?.isFile()).toBeTruthy(); + await expect(promise).rejects.toEqual(mockError); }); }); diff --git a/src/utils/zip.ts b/src/utils/zip.ts index 2dfe5e72f7..01d8e209ee 100644 --- a/src/utils/zip.ts +++ b/src/utils/zip.ts @@ -1,7 +1,7 @@ -import { error, warn } from 'electron-log'; +import { error, info, warn } from 'electron-log'; import fs from 'fs'; -import { pathExists } from 'fs-extra'; -import { basename } from 'path'; +import { createWriteStream, pathExists } from 'fs-extra'; +import { join } from 'path'; import archiver from 'archiver'; import unzipper from 'unzipper'; @@ -30,54 +30,10 @@ export const unzip = (zip: string, destination: string): Promise => { }); }; -interface ZipArgs { - /** The destination of the generated zip */ - destination: string; - objects: Array<{ - /** Object to serialize (with `JSON.stringify`) and store in the zip */ - object: any; - /** Name of this object in the generated zip */ - name: string; - }>; - /** Files or folders to include */ - paths: string[]; -} - -/** - * Adds a raw string into the ZIP archive - * - * @param archive ZIP archive to add the file to - * @param content content to add into archive - * @param nameInArchive name of file in archive - */ -const addStringToZip = ( - archive: archiver.Archiver, - content: string, - nameInArchive: string, -): void => { - archive.append(content, { name: nameInArchive }); - return; -}; - -/** - * Add the given path to the archive. If it's a file we add it directly, it it is a directory - * we recurse over all the files within that directory - * - * @param archive ZIP archive to add the file to - * @param filePath file to add, absolute path - */ -const addFileOrDirectoryToZip = async (archive: archiver.Archiver, filePath: string) => { - const isDir = await fs.promises.lstat(filePath).then(res => res.isDirectory()); - if (isDir) { - archive.directory(filePath, basename(filePath)); - } else { - archive.file(filePath, { name: basename(filePath) }); - } -}; - -export const zip = ({ destination, objects, paths }: ZipArgs): Promise => +export const zip = (source: string, destination: string): Promise => new Promise(async (resolve, reject) => { - const output = fs.createWriteStream(destination); + info('zipping', source, 'to', destination); + const output = createWriteStream(destination); const archive = archiver('zip'); // finished @@ -95,13 +51,16 @@ export const zip = ({ destination, objects, paths }: ZipArgs): Promise => // pipe all zipped data to the output archive.pipe(output); - const pathPromises = paths.map(p => addFileOrDirectoryToZip(archive, p)); - - for (const obj of objects) { - addStringToZip(archive, JSON.stringify(obj.object), obj.name); - } - - await Promise.all(pathPromises); + // avoid including the c-lightning RPC socket + const entryDataFunction: archiver.EntryDataFunction = entry => { + if (entry.name?.endsWith(join('lightningd', 'regtest', 'lightning-rpc'))) { + console.info('skipping', entry); + return false; + } + return entry; + }; + // append files from a sub-directory, putting its contents at the root of archive + archive.directory(source, false, entryDataFunction); // we've added all files, tell this to the archive so it can emit the 'close' event // once all streams have finished diff --git a/tsconfig.json b/tsconfig.json index 2fb6f38e94..a0d6591585 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -16,6 +16,5 @@ "jsx": "preserve", "baseUrl": "src" }, - "include": ["src"], - "exclude": ["src/utils/tests/resources"] + "include": ["src"] }