diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index ddf5acb3ea..0000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,96 +0,0 @@ -version: 2 -jobs: - node10-e2e: - machine: true - environment: - USE_NVM_VERSION: "10" - steps: &stepsE2E - - checkout - - run: - name: Install nvm $USE_NVM_VERSION - command: source ~/.bashrc && nvm install $USE_NVM_VERSION && nvm use $USE_NVM_VERSION - - restore_cache: - keys: - - v1-dependencies-{{ checksum "yarn.lock" }} - - v1-dependencies- - - run: - name: Install yarn - command: >- - curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add - - && echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo - tee /etc/apt/sources.list.d/yarn.list && sudo apt-get update && sudo - apt-get install yarn - - run: - name: Docker swarm init - command: docker swarm init - - run: - name: Install and build packages - command: >- - source ~/.bashrc && nvm use $USE_NVM_VERSION && yarn && yarn bootstrap && yarn - build && cd __tests__/e2e && ../../node_modules/.bin/tsc && yarn install - - save_cache: - paths: - - node_modules - key: v1-dependencies-{{ checksum "yarn.lock" }} - - run: - name: Generate files - command: >- - source ~/.bashrc && nvm use $USE_NVM_VERSION && node --version && cd __tests__/e2e - && yarn generate -c 3 -v $USE_NVM_VERSION - - run: - name: Make scripts executable - command: >- - sudo chmod +x __tests__/e2e/dist/docker* && sudo chmod +x - __tests__/e2e/dist/node0/docker/testnet-e2e/entrypoint.sh && sudo - chmod +x __tests__/e2e/dist/node1/docker/testnet-e2e/entrypoint.sh - && sudo chmod +x - __tests__/e2e/dist/node2/docker/testnet-e2e/entrypoint.sh && sudo - chmod +x __tests__/e2e/dist/node0/ark.sh && sudo chmod +x - __tests__/e2e/dist/node1/ark.sh && sudo chmod +x - __tests__/e2e/dist/node2/ark.sh - - run: - name: Docker init and start - command: cd __tests__/e2e/dist && ./docker-init.sh && ./docker-start.sh - - run: - name: Wait 10 sec for docker containers to be up - command: sleep 10 - - run: - name: Run tests - command: >- - cd __tests__/e2e && sudo chown -R $USER:$USER ./dist/ && source - ~/.bashrc && nvm use $USE_NVM_VERSION && yarn run-tests -s scenario1 - - run: - name: Output results - node0 - when: always - command: >- - cat __tests__/e2e/dist/node0/output.log && cat - __tests__/e2e/dist/node0/errors.log - - run: - name: Output results - node1 - when: always - command: >- - cat __tests__/e2e/dist/node1/output.log && cat - __tests__/e2e/dist/node1/errors.log - - run: - name: Output results - node2 - when: always - command: >- - cat __tests__/e2e/dist/node2/output.log && cat - __tests__/e2e/dist/node2/errors.log - node11-e2e: - machine: true - environment: - USE_NVM_VERSION: "11" - steps: *stepsE2E - node12-e2e: - machine: true - environment: - USE_NVM_VERSION: "12" - steps: *stepsE2E -workflows: - version: 2 - test: - jobs: - - node10-e2e - - node11-e2e - - node12-e2e diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index c55cee77cc..d697a050c8 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -5,7 +5,7 @@ on: branches: - "**" pull_request: - types: [ready_for_review, opened] # add synchronize to trigger builds for PR merges with the base branch, currently causes duplicate builds as synchronize is treated the same as push for everything besides merges + types: [ready_for_review, synchronize, opened] # fix: synchronize triggers builds for PR merges with the base branch, currently causes duplicate builds as synchronize is treated the same as push for everything besides merges jobs: crypto: @@ -13,7 +13,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 diff --git a/.github/workflows/functional.yml b/.github/workflows/functional.yml index 0edbe502dd..5a9c06b457 100644 --- a/.github/workflows/functional.yml +++ b/.github/workflows/functional.yml @@ -5,7 +5,7 @@ on: branches: - "**" pull_request: - types: [ready_for_review, synchronize, opened] # synchronize triggers builds for PR merges with the base branch, currently causes duplicate builds as synchronize is treated the same as push for everything besides merges + types: [ready_for_review, synchronize, opened] # fix: synchronize triggers builds for PR merges with the base branch, currently causes duplicate builds as synchronize is treated the same as push for everything besides merges jobs: bridgechain-registration: @@ -24,7 +24,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -66,7 +66,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -108,7 +108,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -150,7 +150,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -192,7 +192,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -234,7 +234,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -276,7 +276,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -318,7 +318,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -360,7 +360,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -402,7 +402,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -444,7 +444,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -486,7 +486,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -528,7 +528,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -570,7 +570,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -612,7 +612,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -654,7 +654,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -696,7 +696,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index dbe4becc82..0c2701ee95 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -5,7 +5,7 @@ on: branches: - "**" pull_request: - types: [ready_for_review, synchronize, opened] # synchronize triggers builds for PR merges with the base branch, currently causes duplicate builds as synchronize is treated the same as push for everything besides merges + types: [ready_for_review, synchronize, opened] # fix: synchronize triggers builds for PR merges with the base branch, currently causes duplicate builds as synchronize is treated the same as push for everything besides merges jobs: unit: @@ -13,7 +13,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -44,7 +44,7 @@ jobs: strategy: matrix: - node-version: [10.x, 11.x, 12.x] + node-version: [10.x, 12.x] steps: - uses: actions/checkout@v1 @@ -71,3 +71,37 @@ jobs: POSTGRES_USER: ark POSTGRES_PASSWORD: password POSTGRES_DB: ark_unitnet + + e2e: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [10.x, 12.x] + + steps: + - uses: actions/checkout@v1 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + - name: Docker swarm init + run: docker swarm init + - name: Install and build packages + run: yarn setup && cd __tests__/e2e && ../../node_modules/.bin/tsc && yarn install --frozen-lockfile + - name: Generate files + run: node --version && cd __tests__/e2e && yarn generate -c 3 -v ${{ matrix.node-version }} + - name: Make scripts executable + run: sudo chmod +x __tests__/e2e/dist/docker* && sudo chmod +x __tests__/e2e/dist/node0/docker/testnet-e2e/entrypoint.sh && sudo chmod +x __tests__/e2e/dist/node1/docker/testnet-e2e/entrypoint.sh && sudo chmod +x __tests__/e2e/dist/node2/docker/testnet-e2e/entrypoint.sh && sudo chmod +x __tests__/e2e/dist/node0/ark.sh && sudo chmod +x __tests__/e2e/dist/node1/ark.sh && sudo chmod +x __tests__/e2e/dist/node2/ark.sh + - name: Docker init and start + run: cd __tests__/e2e/dist && ./docker-init.sh && ./docker-start.sh + - name: Wait 10 sec for docker containers to be up + run: sleep 10 + - name: Run tests + run: cd __tests__/e2e && sudo chown -R $USER:$USER ./dist/ && yarn run-tests -s scenario1 + - name: Output results - node0 + run: cat __tests__/e2e/dist/node0/output.log && cat __tests__/e2e/dist/node0/errors.log + - name: Output results - node1 + run: cat __tests__/e2e/dist/node1/output.log && cat __tests__/e2e/dist/node1/errors.log + - name: Output results - node2 + run: cat __tests__/e2e/dist/node2/output.log && cat __tests__/e2e/dist/node2/errors.log diff --git a/__tests__/e2e/README.md b/__tests__/e2e/README.md index 426cfd20db..efa5a8c280 100644 --- a/__tests__/e2e/README.md +++ b/__tests__/e2e/README.md @@ -23,13 +23,13 @@ Now to run the tests locally, you need to have Docker installed. Then, run this You can now run the configured tests like this : bin/e2e generate -c 3 - sudo chmod +x dist/docker* - sudo chmod +x dist/node0/docker/testnet-e2e/entrypoint.sh - sudo chmod +x dist/node1/docker/testnet-e2e/entrypoint.sh - sudo chmod +x dist/node2/docker/testnet-e2e/entrypoint.sh - sudo chmod +x dist/node0/ark.sh - sudo chmod +x dist/node1/ark.sh - sudo chmod +x dist/node2/ark.sh + chmod +x dist/docker* + chmod +x dist/node0/docker/testnet-e2e/entrypoint.sh + chmod +x dist/node1/docker/testnet-e2e/entrypoint.sh + chmod +x dist/node2/docker/testnet-e2e/entrypoint.sh + chmod +x dist/node0/ark.sh + chmod +x dist/node1/ark.sh + chmod +x dist/node2/ark.sh cd dist && ./docker-init.sh && ./docker-start.sh && cd .. bin/e2e run-tests -s scenario1 diff --git a/__tests__/e2e/bin/e2e b/__tests__/e2e/bin/e2e index fb24c93955..c373066dea 100755 --- a/__tests__/e2e/bin/e2e +++ b/__tests__/e2e/bin/e2e @@ -5,32 +5,36 @@ const app = require('commander') app.version(require('../package.json').version) app - .command('generate') - .description('generate the file structure for e2e testing') - .option('-c, --nodes ', 'number of nodes to generate', 1) - .option('-v, --version ', 'node version to use', 11) - .option('-n, --network ', 'network to use as a base (testnet / devnet / mainnet)', "testnet") - .option('-r, --relay', 'relay only mode : generate files for relay use', false) - .action((options) => require('../lib/generate-files')(options)) + .command('generate') + .description('generate the file structure for e2e testing') + .option('-c, --nodes ', 'number of nodes to generate', 1) + .option('-v, --version ', 'node version to use', 10) + .option('-n, --network ', 'network to use as a base (testnet / devnet / mainnet)', "testnet") + .option('-r, --relay', 'relay only mode : generate files for relay use', false) + .action((options) => require('../lib/generate-files')(options)) app - .command('run-tests') - .description('Run the tests') - .option('-s, --scenario ', 'scenario to test', 'scenario1') - .action((options) => require('../lib/test-runner')(Object.assign(options, { network: "testnet" }))) + .command('run-tests') + .description('Run the tests') + .option('-s, --scenario ', 'scenario to test', 'scenario1') + .action((options) => require('../lib/test-runner')(Object.assign(options, { + network: "testnet" + }))) app - .command('run-sync') - .description('Run a full sync') - .option('-n, --network ', 'network to use as a base (testnet / devnet / mainnet)', "testnet") - .option('-t, --timeLimit ', 'time limit in minutes before stopping sync', 1200) - .action((options) => require('../lib/test-runner')(Object.assign(options, { sync: true }))) + .command('run-sync') + .description('Run a full sync') + .option('-n, --network ', 'network to use as a base (testnet / devnet / mainnet)', "testnet") + .option('-t, --timeLimit ', 'time limit in minutes before stopping sync', 1200) + .action((options) => require('../lib/test-runner')(Object.assign(options, { + sync: true + }))) app - .command('*') - .action(env => { - app.help() - process.exit(0) - }) + .command('*') + .action(env => { + app.help() + process.exit(0) + }) app.parse(process.argv) diff --git a/__tests__/e2e/lib/config/docker/Dockerfile-10 b/__tests__/e2e/lib/config/docker/Dockerfile-10.x similarity index 100% rename from __tests__/e2e/lib/config/docker/Dockerfile-10 rename to __tests__/e2e/lib/config/docker/Dockerfile-10.x diff --git a/__tests__/e2e/lib/config/docker/Dockerfile-11 b/__tests__/e2e/lib/config/docker/Dockerfile-11 deleted file mode 100644 index e408815191..0000000000 --- a/__tests__/e2e/lib/config/docker/Dockerfile-11 +++ /dev/null @@ -1,17 +0,0 @@ -FROM node:11 - -WORKDIR /ark-core - -COPY entrypoint.sh / - -RUN apt-get update && \ - apt-get -y install --no-install-recommends \ - build-essential \ - jq \ - iptables \ - python \ - vim && \ - rm -rf /var/lib/apt/lists/* - -EXPOSE 4000 4003 - diff --git a/__tests__/e2e/lib/config/docker/Dockerfile-12 b/__tests__/e2e/lib/config/docker/Dockerfile-12.x similarity index 100% rename from __tests__/e2e/lib/config/docker/Dockerfile-12 rename to __tests__/e2e/lib/config/docker/Dockerfile-12.x diff --git a/__tests__/e2e/lib/generate-files.js b/__tests__/e2e/lib/generate-files.js index b54529786b..f0582434ed 100644 --- a/__tests__/e2e/lib/generate-files.js +++ b/__tests__/e2e/lib/generate-files.js @@ -37,8 +37,8 @@ class GenerateManager { this.relay = options.relay; - this.rootPath = path.dirname("../"); - this.coreRootPath = path.dirname("../../.."); + this.rootPath = path.resolve(__dirname, "../"); + this.coreRootPath = path.resolve(__dirname, "../../.."); } async generate() { @@ -70,13 +70,11 @@ class GenerateManager { await exec(`mkdir ${distNginxPath}`); - copyFiles([ - { - from: thisNginxPath, - to: distNginxPath, - files: ["docker-compose.yml", "nginx.conf"], - }, - ]); + copyFiles([{ + from: thisNginxPath, + to: distNginxPath, + files: ["docker-compose.yml", "nginx.conf"], + }, ]); console.log(`[generate-files] Files copy done for nginx`); const thisNetworkPath = path.join(this.coreRootPath, `packages/core/bin/config/${this.network}`); @@ -93,8 +91,7 @@ class GenerateManager { const arkScript = index > 0 ? "ark.sh" : "ark-network-start.sh"; - copyFiles([ - { + copyFiles([{ from: thisDockerPath, to: distDockerPath, files: [ @@ -107,16 +104,18 @@ class GenerateManager { { from: thisDockerPath, to: distNodePath, - files: [[arkScript, "ark.sh"]], + files: [ + [arkScript, "ark.sh"] + ], }, ]); // rework ark.sh script if base network is not testnet + adapt if relay mode const arkDistScript = fs.readFileSync(path.join(distNodePath, "ark.sh"), "utf8"); let arkDistScriptUpdated = arkDistScript.replace(/config\/testnet/g, `config/${this.network}`); - arkDistScriptUpdated = this.relay - ? arkDistScriptUpdated.replace(/core:run/, "relay:run") - : arkDistScriptUpdated; + arkDistScriptUpdated = this.relay ? + arkDistScriptUpdated.replace(/core:run/, "relay:run") : + arkDistScriptUpdated; fs.writeFileSync(path.join(distNodePath, "ark.sh"), arkDistScriptUpdated); @@ -141,13 +140,11 @@ class GenerateManager { console.log(`[generate-files] Files copy done for ${node}`); } - copyFiles([ - { - from: thisDockerPath, - to: path.join(this.rootPath, "dist"), - files: ["docker-init.sh", "docker-start.sh"], - }, - ]); + copyFiles([{ + from: thisDockerPath, + to: path.join(this.rootPath, "dist"), + files: ["docker-init.sh", "docker-start.sh"], + }, ]); console.log(`[generate-files] Docker files copy done`); diff --git a/__tests__/e2e/lib/utils/sync.sh b/__tests__/e2e/lib/utils/sync.sh index 9b595d55e7..92306389e3 100644 --- a/__tests__/e2e/lib/utils/sync.sh +++ b/__tests__/e2e/lib/utils/sync.sh @@ -4,9 +4,9 @@ # The one with empty db (node2) should sync from the others (node0, node1). # To use with devnet just replace mainnet by devnet and download devnet snapshot instead of mainnet. -# install nvm and use node 11 +# install nvm and use node 10 wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.34.0/install.sh | bash -source ~/.bashrc && nvm install 11 && nvm use 11 +source ~/.bashrc && nvm install 10 && nvm use 10 # install yarn curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add - @@ -26,7 +26,7 @@ git clone https://github.com/ArkEcosystem/core.git && cd core && git checkout de yarn && yarn bootstrap && yarn build && cd __tests__/e2e && yarn install # generate files for 3 nodes (relays) network based on mainnet config -yarn generate -c 3 -v 11 -n mainnet -r +yarn generate -c 3 -v 10 -n mainnet -r # make some files executable sudo chmod +x dist/docker* diff --git a/__tests__/e2e/lib/utils/testnet.js b/__tests__/e2e/lib/utils/testnet.js index 1305e24681..d125b2f6cd 100644 --- a/__tests__/e2e/lib/utils/testnet.js +++ b/__tests__/e2e/lib/utils/testnet.js @@ -1,6 +1,5 @@ module.exports = { - delegates: [ - { + delegates: [{ passphrase: "clay harbor enemy utility margin pretty hub comic piece aerobic umbrella acquire", address: "ANBkoGqWeTSiaEVgVzSKZd3jS7UWzv9PSo", publicKey: "03287bfebba4c7881a0509717e71b34b63f31e40021c321f89ae04f84be6d6ac37", diff --git a/__tests__/integration/core-transaction-pool/processor.test.ts b/__tests__/integration/core-transaction-pool/processor.test.ts index 9714b852b2..08f0805f5a 100644 --- a/__tests__/integration/core-transaction-pool/processor.test.ts +++ b/__tests__/integration/core-transaction-pool/processor.test.ts @@ -780,8 +780,8 @@ describe("Transaction Guard", () => { const result = await processor.validate(transfers); expect(result.errors).toContainKey(transfers[0].id); - expect(result.errors[transfers[0].id][0].message).toStartWith("Cannot apply a transaction with nonce"); - expect(result.errors[transfers[0].id][0].type).toEqual("ERR_APPLY"); + expect(result.errors[transfers[0].id][0].message).toStartWith("Already forged"); + expect(result.errors[transfers[0].id][0].type).toEqual("ERR_FORGED"); }); it("should not validate an already forged transaction - trying to tweak tx id", async () => { @@ -791,13 +791,15 @@ describe("Transaction Guard", () => { .create(); await addBlock(transfers); + const originalId: string = transfers[0].id; + transfers[0].id = "c".repeat(64); const result = await processor.validate(transfers); - expect(result.errors).toContainKey(transfers[0].id); - expect(result.errors[transfers[0].id][0].message).toStartWith("Cannot apply a transaction with nonce"); - expect(result.errors[transfers[0].id][0].type).toEqual("ERR_APPLY"); + expect(result.errors).toContainKey(originalId); + expect(result.errors[originalId][0].message).toStartWith("Already forged"); + expect(result.errors[originalId][0].type).toEqual("ERR_FORGED"); }); }); diff --git a/__tests__/unit/core-blockchain/state-machine.test.ts b/__tests__/unit/core-blockchain/state-machine.test.ts index f672145795..015c0da98f 100644 --- a/__tests__/unit/core-blockchain/state-machine.test.ts +++ b/__tests__/unit/core-blockchain/state-machine.test.ts @@ -75,17 +75,17 @@ describe("State Machine", () => { - stateStorage.noBlockCounter > 5 and process queue is empty - stateStorage.p2pUpdateCounter + 1 > 3 (network keeps missing blocks) - blockchain.p2p.checkNetworkHealth() returns a forked network status`, async () => { - blockchain.isSynced = jest.fn(() => false); - blockchain.queue.idle = jest.fn(() => true); - stateStorage.noBlockCounter = 6; - stateStorage.p2pUpdateCounter = 3; + blockchain.isSynced = jest.fn(() => false); + blockchain.queue.idle = jest.fn(() => true); + stateStorage.noBlockCounter = 6; + stateStorage.p2pUpdateCounter = 3; - jest.spyOn(getMonitor, "checkNetworkHealth").mockImplementation(() => ({ - forked: true, - })); + jest.spyOn(getMonitor, "checkNetworkHealth").mockImplementation(() => ({ + forked: true, + })); - await expect(actionMap.checkLastDownloadedBlockSynced).toDispatch(blockchain, "FORK"); - }); + await expect(actionMap.checkLastDownloadedBlockSynced).toDispatch(blockchain, "FORK"); + }); it('should dispatch the event "SYNCED" if stateStorage.networkStart is true', async () => { blockchain.isSynced = jest.fn(() => false); @@ -294,11 +294,9 @@ describe("State Machine", () => { describe("downloadBlocks", () => { let loggerInfo; - let loggerWarn; beforeAll(() => { loggerInfo = jest.spyOn(logger, "info"); - loggerWarn = jest.spyOn(logger, "warn"); }); beforeEach(() => { @@ -336,19 +334,38 @@ describe("State Machine", () => { enQueueBlocks.mockRestore(); }); - it("should dispatch NOBLOCK if new blocks downloaded are not chained", async () => { + it("should dispatch NOBLOCK if new blocks downloaded are not chained and NETWORKHALTED if it keeps failing", async () => { + stateStorage.noBlockCounter = 0; + stateStorage.p2pUpdateCounter = 0; + const downloadedBlock = { numberOfTransactions: 2, previousBlock: genesisBlock.id, height: 3, timestamp: genesisBlock.timestamp + 115, }; + + blockchain.queue.idle = () => true; + blockchain.queue.length = () => 0; + jest.spyOn(getMonitor, "syncWithNetwork").mockReturnValue([downloadedBlock]); // tslint:disable-next-line: await-promise await expect(() => actionMap.downloadBlocks()).toDispatch(blockchain, "NOBLOCK"); - expect(loggerWarn).toHaveBeenCalledWith( - `Downloaded block not accepted: ${JSON.stringify(downloadedBlock)}`, - ); + + expect(stateStorage.noBlockCounter).toBe(1); + + for (let i = 0; i < 5; i++) { + // tslint:disable-next-line: await-promise + await expect(() => actionMap.downloadBlocks()).toDispatch(blockchain, "NOBLOCK"); + } + + expect(stateStorage.noBlockCounter).toBe(6); + expect(stateStorage.p2pUpdateCounter).toBe(0); + + // tslint:disable-next-line: await-promise + await expect(() => actionMap.checkLastDownloadedBlockSynced()).toDispatch(blockchain, "NETWORKHALTED"); + + blockchain.queue.idle = () => undefined; }); it("should dispatch NOBLOCK if new blocks downloaded are empty", async () => { @@ -399,11 +416,11 @@ describe("State Machine", () => { jest.spyOn(container.app, "resolveOptions").mockImplementation(plugin => plugin === "blockchain" ? { - databaseRollback: { - maxBlockRewind: 14, - steps: 3, - }, - } + databaseRollback: { + maxBlockRewind: 14, + steps: 3, + }, + } : {}, ); }); @@ -434,22 +451,22 @@ describe("State Machine", () => { it(`should try to remove X blocks based on databaseRollback config until database.verifyBlockchain() passes and dispatch FAILURE as verifyBlockchain never passed`, async () => { - // @ts-ignore - const removeTopBlocks = jest.spyOn(blockchain, "removeTopBlocks").mockReturnValue(true); - // @ts-ignore - jest.spyOn(blockchain.database, "verifyBlockchain").mockReturnValue(false); - jest.spyOn(blockchain.state, "getLastBlock").mockReturnValue({ // @ts-ignore - data: { - height: 1, - }, + const removeTopBlocks = jest.spyOn(blockchain, "removeTopBlocks").mockReturnValue(true); + // @ts-ignore + jest.spyOn(blockchain.database, "verifyBlockchain").mockReturnValue(false); + jest.spyOn(blockchain.state, "getLastBlock").mockReturnValue({ + // @ts-ignore + data: { + height: 1, + }, + }); + + // tslint:disable-next-line: await-promise + await expect(() => actionMap.rollbackDatabase()).toDispatch(blockchain, "FAILURE"); + + expect(removeTopBlocks).toHaveBeenCalledTimes(5); // because after 5 times we get past maxBlockRewind }); - - // tslint:disable-next-line: await-promise - await expect(() => actionMap.rollbackDatabase()).toDispatch(blockchain, "FAILURE"); - - expect(removeTopBlocks).toHaveBeenCalledTimes(5); // because after 5 times we get past maxBlockRewind - }); }); }); }); diff --git a/__tests__/unit/core-blockchain/stubs/state-storage.ts b/__tests__/unit/core-blockchain/stubs/state-storage.ts index bba75ad1c1..bc415a0f9f 100644 --- a/__tests__/unit/core-blockchain/stubs/state-storage.ts +++ b/__tests__/unit/core-blockchain/stubs/state-storage.ts @@ -10,8 +10,8 @@ export class StateStoreStub implements State.IStateStore { public started: boolean; public forkedBlock: Interfaces.IBlock | undefined; public wakeUpTimeout: any; - public noBlockCounter: number; - public p2pUpdateCounter: number; + public noBlockCounter: number = 0; + public p2pUpdateCounter: number = 0; public numberOfBlocksToRollback: number | undefined; public networkStart: boolean; @@ -21,9 +21,9 @@ export class StateStoreStub implements State.IStateStore { return undefined; } - public clear(): void { } + public clear(): void {} - public clearWakeUpTimeout(): void { } + public clearWakeUpTimeout(): void {} public getCachedTransactionIds(): string[] { return []; @@ -76,9 +76,9 @@ export class StateStoreStub implements State.IStateStore { }; } - public clearCachedTransactionIds(): void { } + public clearCachedTransactionIds(): void {} - public reset(): void { } + public reset(): void {} public setLastBlock(block: Blocks.Block): void { this.lastDownloadedBlock = block.data; diff --git a/__tests__/unit/core-transaction-pool/connection.test.ts b/__tests__/unit/core-transaction-pool/connection.test.ts index b1f3f667da..4740b56de3 100644 --- a/__tests__/unit/core-transaction-pool/connection.test.ts +++ b/__tests__/unit/core-transaction-pool/connection.test.ts @@ -779,30 +779,8 @@ describe("Connection", () => { expect(getTransaction).toHaveBeenCalled(); expect(findByPublicKey).not.toHaveBeenCalled(); - expect(throwIfCannotBeApplied).toHaveBeenCalled(); expect(applyToSender).toHaveBeenCalled(); }); - - it("should not apply transaction to wallet if throwIfCannotBeApplied() failed", async () => { - const transactionHandler = await Handlers.Registry.get(TransactionType.Transfer); - throwIfCannotBeApplied = jest.spyOn(transactionHandler, "throwIfCannotBeApplied").mockImplementation(() => { - throw new Error("throw from test"); - }); - const purgeByPublicKey = jest.spyOn(connection, "purgeByPublicKey").mockReturnValue(); - - updateSenderNonce(mockData.dummy1); - addTransactions([mockData.dummy1]); - - await connection.buildWallets(); - - expect(applyToSender).not.toHaveBeenCalled(); - expect(throwIfCannotBeApplied).toHaveBeenCalledWith( - mockData.dummy1, - findByPublicKeyWallet, - (connection as any).databaseService.walletManager, - ); - expect(purgeByPublicKey).not.toHaveBeenCalledWith(mockData.dummy1.data.senderPublicKey); - }); }); describe("senderHasTransactionsOfType", () => { @@ -845,14 +823,20 @@ describe("Connection", () => { it("save and restore transactions", async () => { await expect(connection.getPoolSize()).resolves.toBe(0); + // Reset the senders' nonces to cleanup leftovers from previous tests. + updateSenderNonce(mockData.dummy1); + updateSenderNonce(mockData.dummy10); + + // Be sure to use transactions with appropriate nonce - can't fire a transaction + // with nonce 5 if the sender wallet has nonce 1, for example. + const transactions = [mockData.dummy1, mockData.dummy10, mockData.dummyLarge1]; + indexWalletWithSufficientBalance(mockData.dummy1); indexWalletWithSufficientBalance(mockData.dummyLarge1); - const transactions = [mockData.dummy1, mockData.dummyLarge1]; - addTransactions(transactions); - await expect(connection.getPoolSize()).resolves.toBe(2); + await expect(connection.getPoolSize()).resolves.toBe(transactions.length); connection.disconnect(); @@ -862,7 +846,7 @@ describe("Connection", () => { await delay(200); - await expect(connection.getPoolSize()).resolves.toBe(2); + await expect(connection.getPoolSize()).resolves.toBe(transactions.length); for (const t of transactions) { expect((await connection.getTransaction(t.id)).serialized).toEqual(t.serialized); diff --git a/__tests__/unit/core-transactions/handler.test.ts b/__tests__/unit/core-transactions/handler.test.ts index 83e481ed4b..6423886906 100644 --- a/__tests__/unit/core-transactions/handler.test.ts +++ b/__tests__/unit/core-transactions/handler.test.ts @@ -206,6 +206,19 @@ describe("General Tests", () => { expect(recipientWallet.balance).toEqual(Utils.BigNumber.make(recipientBalance).plus(instance.data.amount)); }); + + it("should increase nonce when applying v1 transactions", async () => { + senderWallet.nonce = Utils.BigNumber.ZERO; + const legacyTransaction = TransactionFactory.transfer("AbfQq8iRSf9TFQRzQWo33dHYU7HFMS17Zd", 10000000) + .withVersion(1) + .withFee(10000000) + .withPassphrase("clay harbor enemy utility margin pretty hub comic piece aerobic umbrella acquire") + .build()[0]; + + expect(senderWallet.nonce).toEqual(Utils.BigNumber.ZERO); + await handler.apply(legacyTransaction, walletManager); + expect(senderWallet.nonce).toEqual(Utils.BigNumber.ONE); + }); }); describe("revert", () => { @@ -244,6 +257,19 @@ describe("General Tests", () => { expect(senderWallet.nonce.isZero()).toBeTrue(); expect(recipientWallet.balance).toEqual(Utils.BigNumber.make(recipientBalance).minus(instance.data.amount)); }); + + it("should decrease nonce when reverting v1 transactions", async () => { + senderWallet.nonce = Utils.BigNumber.ONE; + const legacyTransaction = TransactionFactory.transfer("AbfQq8iRSf9TFQRzQWo33dHYU7HFMS17Zd", 10000000) + .withVersion(1) + .withFee(10000000) + .withPassphrase("clay harbor enemy utility margin pretty hub comic piece aerobic umbrella acquire") + .build()[0]; + + expect(senderWallet.nonce).toEqual(Utils.BigNumber.ONE); + await handler.revert(legacyTransaction, walletManager); + expect(senderWallet.nonce).toEqual(Utils.BigNumber.ZERO); + }); }); describe("dynamicFees", () => { diff --git a/install.sh b/install.sh index 36739c76f2..296c670d34 100644 --- a/install.sh +++ b/install.sh @@ -107,12 +107,12 @@ sudo rm -rf ~/{.npm,.forever,.node*,.cache,.nvm} if [[ ! -z $DEB ]]; then sudo wget --quiet -O - https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add - - (echo "deb https://deb.nodesource.com/node_11.x $(lsb_release -s -c) main" | sudo tee /etc/apt/sources.list.d/nodesource.list) + (echo "deb https://deb.nodesource.com/node_10.x $(lsb_release -s -c) main" | sudo tee /etc/apt/sources.list.d/nodesource.list) sudo apt-get update sudo apt-get install nodejs -y elif [[ ! -z $RPM ]]; then sudo yum install gcc-c++ make -y - curl -sL https://rpm.nodesource.com/setup_11.x | sudo -E bash - > /dev/null 2>&1 + curl -sL https://rpm.nodesource.com/setup_10.x | sudo -E bash - > /dev/null 2>&1 fi success "Installed node.js & npm!" diff --git a/packages/core-blockchain/src/state-machine.ts b/packages/core-blockchain/src/state-machine.ts index 940b256888..119098bc93 100644 --- a/packages/core-blockchain/src/state-machine.ts +++ b/packages/core-blockchain/src/state-machine.ts @@ -72,9 +72,7 @@ blockchainMachine.actionMap = (blockchain: Blockchain) => ({ } else { stateStorage.p2pUpdateCounter++; } - } - - if (stateStorage.lastDownloadedBlock && blockchain.isSynced(stateStorage.lastDownloadedBlock)) { + } else if (stateStorage.lastDownloadedBlock && blockchain.isSynced(stateStorage.lastDownloadedBlock)) { stateStorage.noBlockCounter = 0; stateStorage.p2pUpdateCounter = 0; @@ -227,9 +225,6 @@ blockchainMachine.actionMap = (blockchain: Blockchain) => ({ )}`, ); - stateStorage.noBlockCounter = 0; - stateStorage.p2pUpdateCounter = 0; - try { blockchain.enqueueBlocks(blocks); blockchain.dispatch("DOWNLOADED"); diff --git a/packages/core-interfaces/src/core-state/wallets.ts b/packages/core-interfaces/src/core-state/wallets.ts index e339a9d035..df405d2e78 100644 --- a/packages/core-interfaces/src/core-state/wallets.ts +++ b/packages/core-interfaces/src/core-state/wallets.ts @@ -40,6 +40,20 @@ export interface IWallet { transaction: Interfaces.ITransactionData, multisignature?: Interfaces.IMultiSignatureAsset, ): boolean; + + /** + * Verify that the transaction's nonce is the wallet nonce plus one, so that the + * transaction can be applied to the wallet. + * Throw an exception if it is not. + */ + verifyTransactionNonceApply(transaction: Interfaces.ITransaction): void; + + /** + * Verify that the transaction's nonce is the same as the wallet nonce, so that the + * transaction can be reverted from the wallet. + * Throw an exception if it is not. + */ + verifyTransactionNonceRevert(transaction: Interfaces.ITransaction): void; } export interface IWalletDelegateAttributes { diff --git a/packages/core-state/src/stores/state.ts b/packages/core-state/src/stores/state.ts index 754957a344..49611be253 100644 --- a/packages/core-state/src/stores/state.ts +++ b/packages/core-state/src/stores/state.ts @@ -106,6 +106,9 @@ export class StateStore implements State.IStateStore { if (this.lastBlocks.size > app.resolveOptions("state").storage.maxLastBlocks) { this.lastBlocks = this.lastBlocks.delete(this.lastBlocks.first().data.height); } + + this.noBlockCounter = 0; + this.p2pUpdateCounter = 0; } /** diff --git a/packages/core-state/src/wallets/wallet.ts b/packages/core-state/src/wallets/wallet.ts index 69714787e8..e0ba5a3b8f 100644 --- a/packages/core-state/src/wallets/wallet.ts +++ b/packages/core-state/src/wallets/wallet.ts @@ -161,6 +161,28 @@ export class Wallet implements State.IWallet { return verified; } + /** + * Verify that the transaction's nonce is the wallet nonce plus one, so that the + * transaction can be applied to the wallet. + * Throw an exception if it is not. + */ + public verifyTransactionNonceApply(transaction: Interfaces.ITransaction): void { + if (transaction.data.version > 1 && !this.nonce.plus(1).isEqualTo(transaction.data.nonce)) { + throw new Errors.UnexpectedNonceError(transaction.data.nonce, this, false); + } + } + + /** + * Verify that the transaction's nonce is the same as the wallet nonce, so that the + * transaction can be reverted from the wallet. + * Throw an exception if it is not. + */ + public verifyTransactionNonceRevert(transaction: Interfaces.ITransaction): void { + if (transaction.data.version > 1 && !this.nonce.isEqualTo(transaction.data.nonce)) { + throw new Errors.UnexpectedNonceError(transaction.data.nonce, this, true); + } + } + public auditApply(transaction: Interfaces.ITransactionData): any[] { const audit = []; diff --git a/packages/core-transaction-pool/src/connection.ts b/packages/core-transaction-pool/src/connection.ts index 790e50f292..d865179278 100644 --- a/packages/core-transaction-pool/src/connection.ts +++ b/packages/core-transaction-pool/src/connection.ts @@ -71,7 +71,7 @@ export class Connection implements TransactionPool.IConnection { } public makeProcessor(): TransactionPool.IProcessor { - return new Processor(this, this.walletManager); + return new Processor(this); } public async getTransactionsByType(type: number, typeGroup?: number): Promise> { @@ -500,7 +500,6 @@ export class Connection implements TransactionPool.IConnection { transaction.type, transaction.typeGroup, ); - await handler.throwIfCannotBeApplied(transaction, sender, databaseWalletManager); await handler.applyToSender(transaction, localWalletManager); diff --git a/packages/core-transaction-pool/src/processor.ts b/packages/core-transaction-pool/src/processor.ts index cd5ab75719..b27adeb866 100644 --- a/packages/core-transaction-pool/src/processor.ts +++ b/packages/core-transaction-pool/src/processor.ts @@ -5,7 +5,6 @@ import { Crypto, Enums, Errors as CryptoErrors, Interfaces, Managers, Transactio import pluralize from "pluralize"; import { dynamicFeeMatcher } from "./dynamic-fee"; import { IDynamicFeeMatch, ITransactionsCached, ITransactionsProcessed } from "./interfaces"; -import { WalletManager } from "./wallet-manager"; /** * @TODO: this class has too many responsibilities at the moment. @@ -19,7 +18,7 @@ export class Processor implements TransactionPool.IProcessor { private readonly invalid: Map = new Map(); private readonly errors: { [key: string]: TransactionPool.ITransactionErrorResponse[] } = {}; - constructor(private readonly pool: TransactionPool.IConnection, private readonly walletManager: WalletManager) {} + constructor(private readonly pool: TransactionPool.IConnection) {} public async validate(transactions: Interfaces.ITransactionData[]): Promise { this.cacheTransactions(transactions); @@ -121,7 +120,6 @@ export class Processor implements TransactionPool.IProcessor { ); if (await handler.verify(transactionInstance, this.pool.walletManager)) { try { - await this.walletManager.throwIfCannotBeApplied(transactionInstance); const dynamicFee: IDynamicFeeMatch = await dynamicFeeMatcher(transactionInstance); if (!dynamicFee.enterPool && !dynamicFee.broadcast) { this.pushError( diff --git a/packages/core-transactions/src/handlers/htlc-claim.ts b/packages/core-transactions/src/handlers/htlc-claim.ts index d36e473299..a654ae1d9c 100644 --- a/packages/core-transactions/src/handlers/htlc-claim.ts +++ b/packages/core-transactions/src/handlers/htlc-claim.ts @@ -2,12 +2,7 @@ import { app } from "@arkecosystem/core-container"; import { Database, State, TransactionPool } from "@arkecosystem/core-interfaces"; import { Crypto, Enums, Interfaces, Managers, Transactions, Utils } from "@arkecosystem/crypto"; import assert = require("assert"); -import { - HtlcLockExpiredError, - HtlcLockTransactionNotFoundError, - HtlcSecretHashMismatchError, - UnexpectedNonceError, -} from "../errors"; +import { HtlcLockExpiredError, HtlcLockTransactionNotFoundError, HtlcSecretHashMismatchError } from "../errors"; import { HtlcLockTransactionHandler } from "./htlc-lock"; import { TransactionHandler, TransactionHandlerConstructor } from "./transaction"; @@ -137,9 +132,7 @@ export class HtlcClaimTransactionHandler extends TransactionHandler { await this.throwIfCannotBeApplied(transaction, sender, walletManager); - if (!sender.nonce.plus(1).isEqualTo(data.nonce)) { - throw new UnexpectedNonceError(data.nonce, sender, false); - } + sender.verifyTransactionNonceApply(transaction); sender.nonce = data.nonce; @@ -170,9 +163,7 @@ export class HtlcClaimTransactionHandler extends TransactionHandler { const sender: State.IWallet = walletManager.findByPublicKey(transaction.data.senderPublicKey); const data: Interfaces.ITransactionData = transaction.data; - if (!sender.nonce.isEqualTo(data.nonce)) { - throw new UnexpectedNonceError(data.nonce, sender, true); - } + sender.verifyTransactionNonceRevert(transaction); sender.nonce = sender.nonce.minus(1); diff --git a/packages/core-transactions/src/handlers/htlc-refund.ts b/packages/core-transactions/src/handlers/htlc-refund.ts index a5acbd1a69..d0f0def8d1 100644 --- a/packages/core-transactions/src/handlers/htlc-refund.ts +++ b/packages/core-transactions/src/handlers/htlc-refund.ts @@ -2,7 +2,7 @@ import { app } from "@arkecosystem/core-container"; import { Database, State, TransactionPool } from "@arkecosystem/core-interfaces"; import { Enums, Interfaces, Managers, Transactions, Utils } from "@arkecosystem/crypto"; import assert = require("assert"); -import { HtlcLockNotExpiredError, HtlcLockTransactionNotFoundError, UnexpectedNonceError } from "../errors"; +import { HtlcLockNotExpiredError, HtlcLockTransactionNotFoundError } from "../errors"; import { HtlcLockTransactionHandler } from "./htlc-lock"; import { TransactionHandler, TransactionHandlerConstructor } from "./transaction"; @@ -126,9 +126,7 @@ export class HtlcRefundTransactionHandler extends TransactionHandler { await this.throwIfCannotBeApplied(transaction, sender, walletManager); - if (!sender.nonce.plus(1).isEqualTo(data.nonce)) { - throw new UnexpectedNonceError(data.nonce, sender, false); - } + sender.verifyTransactionNonceApply(transaction); sender.nonce = data.nonce; @@ -153,11 +151,8 @@ export class HtlcRefundTransactionHandler extends TransactionHandler { walletManager: State.IWalletManager, ): Promise { const sender: State.IWallet = walletManager.findByPublicKey(transaction.data.senderPublicKey); - const data: Interfaces.ITransactionData = transaction.data; - if (!sender.nonce.isEqualTo(data.nonce)) { - throw new UnexpectedNonceError(data.nonce, sender, true); - } + sender.verifyTransactionNonceRevert(transaction); sender.nonce = sender.nonce.minus(1); diff --git a/packages/core-transactions/src/handlers/transaction.ts b/packages/core-transactions/src/handlers/transaction.ts index 45c9143ee4..04d289ee85 100644 --- a/packages/core-transactions/src/handlers/transaction.ts +++ b/packages/core-transactions/src/handlers/transaction.ts @@ -12,7 +12,6 @@ import { LegacyMultiSignatureError, SenderWalletMismatchError, UnexpectedMultiSignatureError, - UnexpectedNonceError, UnexpectedSecondSignatureError, } from "../errors"; import { ITransactionHandler } from "../interfaces"; @@ -72,9 +71,7 @@ export abstract class TransactionHandler implements ITransactionHandler { return; } - if (data.version > 1 && data.nonce.isLessThanOrEqualTo(sender.nonce)) { - throw new UnexpectedNonceError(data.nonce, sender, false); - } + sender.verifyTransactionNonceApply(transaction); if ( sender.balance @@ -176,14 +173,16 @@ export abstract class TransactionHandler implements ITransactionHandler { await this.throwIfCannotBeApplied(transaction, sender, walletManager); + let nonce: Utils.BigNumber; if (data.version > 1) { - if (!sender.nonce.plus(1).isEqualTo(data.nonce)) { - throw new UnexpectedNonceError(data.nonce, sender, false); - } - - sender.nonce = data.nonce; + sender.verifyTransactionNonceApply(transaction); + nonce = data.nonce; + } else { + nonce = sender.nonce.plus(1); } + sender.nonce = nonce; + const newBalance: Utils.BigNumber = sender.balance.minus(data.amount).minus(data.fee); if (process.env.CORE_ENV === "test") { @@ -205,12 +204,10 @@ export abstract class TransactionHandler implements ITransactionHandler { sender.balance = sender.balance.plus(data.amount).plus(data.fee); if (data.version > 1) { - if (!sender.nonce.isEqualTo(data.nonce)) { - throw new UnexpectedNonceError(data.nonce, sender, true); - } - - sender.nonce = sender.nonce.minus(1); + sender.verifyTransactionNonceRevert(transaction); } + + sender.nonce = sender.nonce.minus(1); } public abstract async applyToRecipient(