From 18ddcf7dd9d197786fbed673b6f9f67f71b05a72 Mon Sep 17 00:00:00 2001 From: Patricio Palladino Date: Sat, 3 Aug 2024 18:31:42 +0000 Subject: [PATCH] Solidity Build System: first iteration --- package.json | 3 +- pnpm-lock.yaml | 100 +- scripts/check-v-next-dependencies.js | 8 +- v-next/example-project/contracts/A.sol | 7 + v-next/example-project/contracts/B.sol | 6 + v-next/example-project/contracts/C.sol | 8 + v-next/example-project/contracts/D.sol | 1 + .../example-project/contracts/NoImports.sol | 4 + .../contracts/UserRemappedImport.sol | 4 + v-next/example-project/hardhat.config.ts | 29 + v-next/example-project/package.json | 9 +- .../scripts/typed-artifacts.ts | 6 + v-next/hardhat-build-system/package.json | 2 +- v-next/hardhat-build-system/test/helpers.ts | 1 + v-next/hardhat-errors/src/descriptors.ts | 258 +++ .../test-tmp/helpers/artifact-manager-mock.ts | 13 +- .../helpers/artifacts/ambiguous-library.ts | 2 +- .../helpers/artifacts/error-messages.ts | 2 +- .../test-tmp/helpers/artifacts/gas-config.ts | 2 +- .../artifacts/greeter-with-constructor-arg.ts | 2 +- .../test-tmp/helpers/artifacts/greeter.ts | 2 +- .../test-tmp/helpers/artifacts/igreeter.ts | 2 +- .../helpers/artifacts/non-unique-lib.ts | 2 +- .../artifacts/test-ambiguous-library.ts | 2 +- .../helpers/artifacts/test-contract-lib.ts | 2 +- .../helpers/artifacts/test-library.ts | 2 +- .../helpers/artifacts/test-non-unique-lib.ts | 2 +- v-next/hardhat-utils/package.json | 1 + v-next/hardhat-utils/src/fs.ts | 39 + v-next/hardhat-utils/src/path.ts | 26 + v-next/hardhat-utils/src/resolve.ts | 63 + .../resolve-fixture/.gitignore | 1 + .../node_modules/dep/package.json | 5 + v-next/hardhat-utils/test/path.ts | 104 +- v-next/hardhat-utils/test/resolve.ts | 69 + v-next/hardhat/package.json | 15 +- v-next/hardhat/src/index.ts | 4 + .../artifacts/artifacts-manager.ts | 41 +- .../internal/builtin-plugins/compile/index.ts | 18 - .../builtin-plugins/compile/task-action.ts | 65 - .../src/internal/builtin-plugins/index.ts | 3 - .../solidity/build-profiles.ts | 12 + .../solidity/build-system/artifacts.ts | 139 ++ .../solidity/build-system/build-system.ts | 717 ++++++++ .../solidity/build-system/compilation-job.ts | 127 ++ .../build-system/compiler/compiler.ts | 144 ++ .../build-system/compiler/downloader.ts | 404 +++++ .../solidity/build-system/compiler/index.ts | 107 ++ .../build-system/compiler/solcjs-runner.ts | 39 + .../solidity/build-system/debug-utils.ts | 80 + .../build-system/dependency-graph-building.ts | 57 + .../solidity/build-system/dependency-graph.ts | 146 ++ .../resolver/dependency-resolver.ts | 1520 +++++++++++++++++ .../build-system/resolver/remappings.ts | 91 + .../solidity/build-system/resolver/types.ts | 91 + .../solidity/build-system/root-paths-utils.ts | 81 + .../build-system/solc-config-selection.ts | 202 +++ .../solidity/build-system/solc-info.ts | 74 + .../builtin-plugins/solidity/config.ts | 303 ++++ .../solidity/hook-handlers/config.ts | 54 +- .../solidity/hook-handlers/hre.ts | 115 ++ .../builtin-plugins/solidity/index.ts | 36 + .../builtin-plugins/solidity/tasks/compile.ts | 67 + .../solidity/type-extensions.ts | 78 +- v-next/hardhat/src/internal/constants.ts | 4 + v-next/hardhat/src/internal/core/hre.ts | 8 +- v-next/hardhat/src/types/artifacts.ts | 267 ++- v-next/hardhat/src/types/hre.ts | 3 +- v-next/hardhat/src/types/solidity.ts | 6 + .../src/types/solidity/build-system.ts | 285 ++++ .../src/types/solidity/compilation-job.ts | 41 + .../hardhat/src/types/solidity/compiler-io.ts | 82 + .../src/types/solidity/dependency-graph.ts | 57 + .../src/types/solidity/resolved-file.ts | 113 ++ .../src/types/solidity/solidity-artifacts.ts | 62 + .../resolver/dependency-resolver.ts | 928 ++++++++++ .../build-system/resolver/remappings.ts | 212 +++ .../resolver/test-fixtures/.gitignore | 1 + .../test-fixtures/entirely-local/A.sol | 2 + .../@scope/dependency/contracts/File.sol | 1 + .../@scope/dependency/package.json | 4 + .../node_modules/exports/package.json | 7 + .../monorepo/node_modules/hardhat-project | 1 + .../monorepo/node_modules/hardhat/console.sol | 1 + .../node_modules/hardhat/package.json | 4 + .../monorepo/node_modules/hoisted/File.sol | 1 + .../node_modules/hoisted/package.json | 4 + .../monorepo/node_modules/local-dependency | 1 + .../packages/hardhat-project/File.sol | 1 + .../hardhat-project/contracts/File.sol | 1 + .../hardhat-project/contracts/File2.sol | 1 + .../packages/hardhat-project/hardhat/File.sol | 1 + .../node_modules/dependency/File.sol | 1 + .../dependency/contracts/File.sol | 1 + .../dependencydependency/File.sol | 1 + .../dependencydependency/package.json | 4 + .../node_modules/dependency/npm/File.sol | 1 + .../node_modules/dependency/package.json | 4 + .../other-name/contracts/File.sol | 1 + .../node_modules/other-name/package.json | 4 + .../packages/hardhat-project/npm/File.sol | 1 + .../packages/hardhat-project/package.json | 4 + .../local-dependency/contracts/File.sol | 1 + .../dependency/contracts/File.sol | 1 + .../node_modules/dependency/package.json | 4 + .../packages/local-dependency/package.json | 4 + .../builtin-plugins/solidity/config.ts | 270 +++ v-next/hardhat/test/internal/cli/main.ts | 3 +- ...xample-mock-artifacts-plugin-using-test.ts | 3 +- .../test/internal/hre-intialization.ts | 1 + ...create-mock-hardhat-runtime-environment.ts | 6 +- .../test-helpers/mock-artifacts-manager.ts | 25 +- v-next/hardhat/tsconfig.json | 3 - 113 files changed, 7633 insertions(+), 428 deletions(-) create mode 100644 v-next/example-project/contracts/A.sol create mode 100644 v-next/example-project/contracts/B.sol create mode 100644 v-next/example-project/contracts/C.sol create mode 100644 v-next/example-project/contracts/D.sol create mode 100644 v-next/example-project/contracts/NoImports.sol create mode 100644 v-next/example-project/contracts/UserRemappedImport.sol create mode 100644 v-next/example-project/scripts/typed-artifacts.ts create mode 100644 v-next/hardhat-utils/src/resolve.ts create mode 100644 v-next/hardhat-utils/test/fixture-projects/resolve-fixture/.gitignore create mode 100644 v-next/hardhat-utils/test/fixture-projects/resolve-fixture/node_modules/dep/package.json create mode 100644 v-next/hardhat-utils/test/resolve.ts delete mode 100644 v-next/hardhat/src/internal/builtin-plugins/compile/index.ts delete mode 100644 v-next/hardhat/src/internal/builtin-plugins/compile/task-action.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-profiles.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/artifacts.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/build-system.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compilation-job.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/compiler.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/downloader.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/index.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/solcjs-runner.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/debug-utils.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/dependency-graph-building.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/dependency-graph.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/resolver/dependency-resolver.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/resolver/remappings.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/resolver/types.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/root-paths-utils.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/solc-config-selection.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/solc-info.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/config.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/hook-handlers/hre.ts create mode 100644 v-next/hardhat/src/internal/builtin-plugins/solidity/tasks/compile.ts create mode 100644 v-next/hardhat/src/types/solidity.ts create mode 100644 v-next/hardhat/src/types/solidity/build-system.ts create mode 100644 v-next/hardhat/src/types/solidity/compilation-job.ts create mode 100644 v-next/hardhat/src/types/solidity/compiler-io.ts create mode 100644 v-next/hardhat/src/types/solidity/dependency-graph.ts create mode 100644 v-next/hardhat/src/types/solidity/resolved-file.ts create mode 100644 v-next/hardhat/src/types/solidity/solidity-artifacts.ts create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/dependency-resolver.ts create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/remappings.ts create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/.gitignore create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/entirely-local/A.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/@scope/dependency/contracts/File.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/@scope/dependency/package.json create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/exports/package.json create mode 120000 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hardhat-project create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hardhat/console.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hardhat/package.json create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hoisted/File.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hoisted/package.json create mode 120000 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/local-dependency create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/File.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/contracts/File.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/contracts/File2.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/hardhat/File.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/File.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/contracts/File.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/node_modules/dependencydependency/File.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/node_modules/dependencydependency/package.json create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/npm/File.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/package.json create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/other-name/contracts/File.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/other-name/package.json create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/npm/File.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/package.json create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/contracts/File.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/node_modules/dependency/contracts/File.sol create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/node_modules/dependency/package.json create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/package.json create mode 100644 v-next/hardhat/test/internal/builtin-plugins/solidity/config.ts diff --git a/package.json b/package.json index 1224d51a67..1438bc516b 100644 --- a/package.json +++ b/package.json @@ -22,6 +22,5 @@ "lint:fix": "pnpm run --recursive lint:fix && pnpm prettier --write", "prettier": "prettier *.md \"{docs,.github}/**/*.{md,yml,ts,js}\" \"scripts/**/*.js\"", "vnext-full-check": "pnpm run --recursive --filter \"./v-next/**\" build && pnpm run --recursive --filter \"./v-next/**\" lint && pnpm run --recursive --filter \"./v-next/**\" test" - }, - "dependencies": {} + } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 72290e3e44..c09a4b7035 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1443,6 +1443,9 @@ importers: '@ignored/hardhat-vnext-node-test-runner': specifier: workspace:^3.0.0-next.0 version: link:../hardhat-node-test-runner + '@openzeppelin/contracts': + specifier: ^5.0.2 + version: 5.0.2 '@types/mocha': specifier: '>=9.1.0' version: 10.0.7 @@ -1467,9 +1470,6 @@ importers: '@ignored/edr': specifier: 0.6.2-alpha.0 version: 0.6.2-alpha.0 - '@ignored/hardhat-vnext-build-system': - specifier: workspace:^3.0.0-next.0 - version: link:../hardhat-build-system '@ignored/hardhat-vnext-errors': specifier: workspace:^3.0.0-next.3 version: link:../hardhat-errors @@ -1479,9 +1479,15 @@ importers: '@ignored/hardhat-vnext-zod-utils': specifier: workspace:^3.0.0-next.3 version: link:../hardhat-zod-utils + '@nomicfoundation/solidity-analyzer': + specifier: ^0.1.0 + version: 0.1.2 '@sentry/node': specifier: ^5.18.1 version: 5.30.0 + adm-zip: + specifier: ^0.4.16 + version: 0.4.16 chalk: specifier: ^5.3.0 version: 5.3.0 @@ -1494,9 +1500,15 @@ importers: ethereum-cryptography: specifier: ^2.2.1 version: 2.2.1 + p-map: + specifier: ^7.0.2 + version: 7.0.2 semver: specifier: ^7.6.3 version: 7.6.3 + solc: + specifier: ^0.8.27 + version: 0.8.27(debug@4.3.7) tsx: specifier: ^4.11.0 version: 4.19.0 @@ -1513,6 +1525,9 @@ importers: '@nomicfoundation/hardhat-test-utils': specifier: workspace:^ version: link:../hardhat-test-utils + '@types/adm-zip': + specifier: ^0.5.5 + version: 0.5.5 '@types/debug': specifier: ^4.1.4 version: 4.1.12 @@ -1613,8 +1628,8 @@ importers: specifier: ^7.6.3 version: 7.6.3 solc: - specifier: 0.7.3 - version: 0.7.3(debug@4.3.7) + specifier: ^0.8.27 + version: 0.8.27(debug@4.3.7) undici: specifier: ^6.16.1 version: 6.19.8 @@ -3201,6 +3216,9 @@ packages: resolution: {integrity: sha512-q4n32/FNKIhQ3zQGGw5CvPF6GTvDCpYwIf7bEY/dZTZbgfDsHyjJwURxUJf3VQuuJj+fDIFl4+KkBVbw4Ef6jA==} engines: {node: '>= 12'} + '@openzeppelin/contracts@5.0.2': + resolution: {integrity: sha512-ytPc6eLGcHHnapAZ9S+5qsdomhjo6QBHTDRRBFfTxXIpsicMhVPouPgmUPebZZZGX7vt9USA+Z+0M0dSVtSUEA==} + '@pkgjs/parseargs@0.11.0': resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} @@ -3299,6 +3317,9 @@ packages: hardhat: ^2.9.9 typechain: ^8.3.2 + '@types/adm-zip@0.5.5': + resolution: {integrity: sha512-YCGstVMjc4LTY5uK9/obvxBya93axZOVOyf2GSUulADzmLhYE45u2nAssCs/fWBs1Ifq5Vat75JTPwd5XZoPJw==} + '@types/async-eventemitter@0.2.4': resolution: {integrity: sha512-2Bq61VD01kgLf1XkK2xPtoBcu7fgn/km5JyEX9v0BlG5VQBzA+BlF9umFk+8gR8S4+eK7MgDY2oyVZCu6ar3Jw==} @@ -4115,9 +4136,6 @@ packages: resolution: {integrity: sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==} engines: {node: '>=14'} - commander@3.0.2: - resolution: {integrity: sha512-Gar0ASD4BDyKC4hl4DwHqDrmvjoxWKZigVnAbn5H1owvm4CxCPdb0HQDehwNYMJpla5+M2tPmPARzhtYuwpHow==} - commander@8.3.0: resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==} engines: {node: '>= 12'} @@ -4766,9 +4784,6 @@ packages: fs-constants@1.0.0: resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} - fs-extra@0.30.0: - resolution: {integrity: sha512-UvSPKyhMn6LEd/WpUaV9C9t3zATuqoqfWc3QdPhPLb58prN9tqYPlPWi8Krxi44loBoUzlobqZ3+8tGpxxSzwA==} - fs-extra@10.1.0: resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==} engines: {node: '>=12'} @@ -5302,9 +5317,6 @@ packages: engines: {node: '>=6'} hasBin: true - jsonfile@2.4.0: - resolution: {integrity: sha512-PKllAqbgLgxHaj8TElYymKCAgrASebJrWpTnEkOaTowt23VKXXN0sUeriJ+eh7y6ufb/CC5ap11pz71/cM0hUw==} - jsonfile@4.0.0: resolution: {integrity: sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==} @@ -5328,9 +5340,6 @@ packages: resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==} engines: {node: '>=0.10.0'} - klaw@1.3.1: - resolution: {integrity: sha512-TED5xi9gGQjGpNnvRWknrwAB1eL5GciPfVFOt3Vk1OJCVDQbzuSfrF3hkUQKlsgKrG1F+0t5W0m+Fje1jIt8rw==} - kleur@3.0.3: resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} engines: {node: '>=6'} @@ -5701,6 +5710,10 @@ packages: resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} engines: {node: '>=10'} + p-map@7.0.2: + resolution: {integrity: sha512-z4cYYMMdKHzw4O5UkWJImbZynVIo0lSGTXc7bzB1e/rrDqkgGUNysK/o4bTr+0+xKvvLoTyGqYC4Fgljy9qe1Q==} + engines: {node: '>=18'} + p-try@1.0.0: resolution: {integrity: sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==} engines: {node: '>=4'} @@ -5988,11 +6001,6 @@ packages: rfdc@1.4.1: resolution: {integrity: sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==} - rimraf@2.7.1: - resolution: {integrity: sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==} - deprecated: Rimraf versions prior to v4 are no longer supported - hasBin: true - rimraf@3.0.2: resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} deprecated: Rimraf versions prior to v4 are no longer supported @@ -6145,16 +6153,16 @@ packages: resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==} engines: {node: '>=10'} - solc@0.7.3: - resolution: {integrity: sha512-GAsWNAjGzIDg7VxzP6mPjdurby3IkGCjQcM8GFYZT6RyaoUZKmMU6Y7YwG+tFGhv7dwZ8rmR4iwFDrrD99JwqA==} - engines: {node: '>=8.0.0'} - hasBin: true - solc@0.8.26: resolution: {integrity: sha512-yiPQNVf5rBFHwN6SIf3TUUvVAFKcQqmSUFeq+fb6pNRCo0ZCgpYOZDi3BVoezCPIAcKrVYd/qXlBLUP9wVrZ9g==} engines: {node: '>=10.0.0'} hasBin: true + solc@0.8.27: + resolution: {integrity: sha512-BNxMol2tUAbkH7HKlXBcBqrGi2aqgv+uMHz26mJyTtlVgWmBA4ktiw0qVKHfkjf2oaHbwtbtaSeE2dhn/gTAKw==} + engines: {node: '>=10.0.0'} + hasBin: true + solidity-coverage@0.8.13: resolution: {integrity: sha512-RiBoI+kF94V3Rv0+iwOj3HQVSqNzA9qm/qDP1ZDXK5IX0Cvho1qiz8hAXTsAo6KOIUeP73jfscq0KlLqVxzGWA==} hasBin: true @@ -7891,6 +7899,8 @@ snapshots: '@nomicfoundation/solidity-analyzer-linux-x64-musl': 0.1.2 '@nomicfoundation/solidity-analyzer-win32-x64-msvc': 0.1.2 + '@openzeppelin/contracts@5.0.2': {} + '@pkgjs/parseargs@0.11.0': optional: true @@ -8020,6 +8030,10 @@ snapshots: hardhat: link:packages/hardhat-core typechain: 8.3.2(typescript@5.0.4) + '@types/adm-zip@0.5.5': + dependencies: + '@types/node': 20.16.1 + '@types/async-eventemitter@0.2.4': dependencies: '@types/events': 3.0.3 @@ -8951,8 +8965,6 @@ snapshots: commander@10.0.1: {} - commander@3.0.2: {} - commander@8.3.0: {} commondir@1.0.1: {} @@ -9836,14 +9848,6 @@ snapshots: fs-constants@1.0.0: {} - fs-extra@0.30.0: - dependencies: - graceful-fs: 4.2.11 - jsonfile: 2.4.0 - klaw: 1.3.1 - path-is-absolute: 1.0.1 - rimraf: 2.7.1 - fs-extra@10.1.0: dependencies: graceful-fs: 4.2.11 @@ -10396,10 +10400,6 @@ snapshots: json5@2.2.3: {} - jsonfile@2.4.0: - optionalDependencies: - graceful-fs: 4.2.11 - jsonfile@4.0.0: optionalDependencies: graceful-fs: 4.2.11 @@ -10426,10 +10426,6 @@ snapshots: kind-of@6.0.3: {} - klaw@1.3.1: - optionalDependencies: - graceful-fs: 4.2.11 - kleur@3.0.3: {} levn@0.3.0: @@ -10829,6 +10825,8 @@ snapshots: dependencies: aggregate-error: 3.1.0 + p-map@7.0.2: {} + p-try@1.0.0: {} p-try@2.2.0: {} @@ -11100,10 +11098,6 @@ snapshots: rfdc@1.4.1: {} - rimraf@2.7.1: - dependencies: - glob: 7.2.0 - rimraf@3.0.2: dependencies: glob: 7.2.0 @@ -11280,21 +11274,19 @@ snapshots: astral-regex: 2.0.0 is-fullwidth-code-point: 3.0.0 - solc@0.7.3(debug@4.3.7): + solc@0.8.26(debug@4.3.7): dependencies: command-exists: 1.2.9 - commander: 3.0.2 + commander: 8.3.0 follow-redirects: 1.15.9(debug@4.3.7) - fs-extra: 0.30.0 js-sha3: 0.8.0 memorystream: 0.3.1 - require-from-string: 2.0.2 semver: 5.7.2 tmp: 0.0.33 transitivePeerDependencies: - debug - solc@0.8.26(debug@4.3.7): + solc@0.8.27(debug@4.3.7): dependencies: command-exists: 1.2.9 commander: 8.3.0 diff --git a/scripts/check-v-next-dependencies.js b/scripts/check-v-next-dependencies.js index 7bfaa34c2a..ebc93fb8e1 100644 --- a/scripts/check-v-next-dependencies.js +++ b/scripts/check-v-next-dependencies.js @@ -2,6 +2,8 @@ const fs = require("fs"); const path = require("path"); +const IGNORED_PACKAGE_FOLDERS = new Set(["hardhat-build-system"]); + /** * @typedef {Object} Package * @property {string} name @@ -56,9 +58,9 @@ function main() { function getAllPackageJsonPaths() { const packageNames = fs.readdirSync(path.join(__dirname, "..", "v-next")); - const packageJsons = packageNames.map((p) => - path.join(__dirname, "..", "v-next", p, "package.json") - ); + const packageJsons = packageNames + .filter((p) => !IGNORED_PACKAGE_FOLDERS.has(p)) + .map((p) => path.join(__dirname, "..", "v-next", p, "package.json")); return packageJsons; } diff --git a/v-next/example-project/contracts/A.sol b/v-next/example-project/contracts/A.sol new file mode 100644 index 0000000000..b5f2113b13 --- /dev/null +++ b/v-next/example-project/contracts/A.sol @@ -0,0 +1,7 @@ +// SPDX-License-Identifier: UNLICENSED +pragma solidity ^0.8.0; +pragma solidity *; + +import "./B.sol"; + +contract A is B {} diff --git a/v-next/example-project/contracts/B.sol b/v-next/example-project/contracts/B.sol new file mode 100644 index 0000000000..60e5f75723 --- /dev/null +++ b/v-next/example-project/contracts/B.sol @@ -0,0 +1,6 @@ +// SPDX-License-Identifier: UNLICENSED +pragma solidity ^0.8.0; + +import "@openzeppelin/contracts/access/Ownable.sol"; + +contract B {} diff --git a/v-next/example-project/contracts/C.sol b/v-next/example-project/contracts/C.sol new file mode 100644 index 0000000000..c2fb1cb26a --- /dev/null +++ b/v-next/example-project/contracts/C.sol @@ -0,0 +1,8 @@ +// SPDX-License-Identifier: UNLICENSED +pragma solidity ^0.8.0; + +import "./B.sol"; + +contract C {} + +contract C2 {} diff --git a/v-next/example-project/contracts/D.sol b/v-next/example-project/contracts/D.sol new file mode 100644 index 0000000000..90831835ea --- /dev/null +++ b/v-next/example-project/contracts/D.sol @@ -0,0 +1 @@ +import "./C.sol"; diff --git a/v-next/example-project/contracts/NoImports.sol b/v-next/example-project/contracts/NoImports.sol new file mode 100644 index 0000000000..4b3dbb32c0 --- /dev/null +++ b/v-next/example-project/contracts/NoImports.sol @@ -0,0 +1,4 @@ +// SPDX-License-Identifier: SEE LICENSE IN LICENSE +pragma solidity ^0.7.0; + +contract NoImports {} diff --git a/v-next/example-project/contracts/UserRemappedImport.sol b/v-next/example-project/contracts/UserRemappedImport.sol new file mode 100644 index 0000000000..6967f6d86a --- /dev/null +++ b/v-next/example-project/contracts/UserRemappedImport.sol @@ -0,0 +1,4 @@ +// SPDX-License-Identifier: SEE LICENSE IN LICENSE +pragma solidity ^0.8.0; + +import "remapped/Ownable.sol"; diff --git a/v-next/example-project/hardhat.config.ts b/v-next/example-project/hardhat.config.ts index 55b73b6654..b0ef221349 100644 --- a/v-next/example-project/hardhat.config.ts +++ b/v-next/example-project/hardhat.config.ts @@ -11,6 +11,8 @@ import { viemScketchPlugin } from "./viem-scketch-plugin.js"; import hardhatNetworkHelpersPlugin from "@ignored/hardhat-vnext-network-helpers"; import hardhatEthersPlugin from "@ignored/hardhat-vnext-ethers"; +util.inspect.defaultOptions.depth = null; + const exampleEmptyTask = emptyTask("empty", "An example empty task").build(); const exampleEmptySubtask = task(["empty", "task"]) @@ -135,6 +137,33 @@ const config: HardhatUserConfig = { nodeTest: "test/node", }, }, + solidity: { + profiles: { + default: { + compilers: [ + { + version: "0.8.22", + }, + { + version: "0.7.1", + }, + ], + overrides: { + "foo/bar.sol": { + version: "0.8.1", + }, + }, + }, + test: { + version: "0.8.2", + }, + coverage: { + version: "0.8.2", + }, + }, + dependenciesToCompile: ["@openzeppelin/contracts/token/ERC20/ERC20.sol"], + remappings: ["remapped/=npm/@openzeppelin/contracts@5.0.2/access/"], + }, }; export default config; diff --git a/v-next/example-project/package.json b/v-next/example-project/package.json index de7bd6d7bd..dd56bf1bbf 100644 --- a/v-next/example-project/package.json +++ b/v-next/example-project/package.json @@ -21,11 +21,12 @@ }, "devDependencies": { "@ignored/hardhat-vnext": "workspace:^3.0.0-next.4", - "@ignored/hardhat-vnext-ethers": "workspace:^3.0.0-next.0", - "@ignored/hardhat-vnext-keystore": "workspace:^3.0.0-next.0", - "@ignored/hardhat-vnext-mocha-test-runner": "workspace:^3.0.0-next.0", - "@ignored/hardhat-vnext-network-helpers": "workspace:^3.0.0-next.0", "@ignored/hardhat-vnext-node-test-runner": "workspace:^3.0.0-next.0", + "@ignored/hardhat-vnext-network-helpers": "workspace:^3.0.0-next.0", + "@ignored/hardhat-vnext-mocha-test-runner": "workspace:^3.0.0-next.0", + "@ignored/hardhat-vnext-keystore": "workspace:^3.0.0-next.0", + "@ignored/hardhat-vnext-ethers": "workspace:^3.0.0-next.0", + "@openzeppelin/contracts": "^5.0.2", "@types/mocha": ">=9.1.0", "@types/node": "^20.14.9", "mocha": "^10.0.0", diff --git a/v-next/example-project/scripts/typed-artifacts.ts b/v-next/example-project/scripts/typed-artifacts.ts new file mode 100644 index 0000000000..0125108920 --- /dev/null +++ b/v-next/example-project/scripts/typed-artifacts.ts @@ -0,0 +1,6 @@ +import { artifacts } from "@ignored/hardhat-vnext"; + +const Rocket = await artifacts.readArtifact("Rocket"); +// ^? +const Rocket2 = await artifacts.readArtifact("contracts/Rocket.sol:Rocket"); +// ^? diff --git a/v-next/hardhat-build-system/package.json b/v-next/hardhat-build-system/package.json index cef5c0fde5..9ee1fbc06e 100644 --- a/v-next/hardhat-build-system/package.json +++ b/v-next/hardhat-build-system/package.json @@ -57,7 +57,7 @@ "p-map": "^4.0.0", "resolve": "1.17.0", "semver": "^7.6.3", - "solc": "0.7.3", + "solc": "^0.8.27", "undici": "^6.16.1" }, "devDependencies": { diff --git a/v-next/hardhat-build-system/test/helpers.ts b/v-next/hardhat-build-system/test/helpers.ts index ad797a8a92..4aff0aee1b 100644 --- a/v-next/hardhat-build-system/test/helpers.ts +++ b/v-next/hardhat-build-system/test/helpers.ts @@ -50,6 +50,7 @@ const defaultSolcOutputSelection = { "": ["ast"], }, }; + export function cleanFixtureProjectDir(fixtureProjectName: string): void { const folderPath = path.join( _dirname, diff --git a/v-next/hardhat-errors/src/descriptors.ts b/v-next/hardhat-errors/src/descriptors.ts index 99aef6b633..a0218bfa92 100644 --- a/v-next/hardhat-errors/src/descriptors.ts +++ b/v-next/hardhat-errors/src/descriptors.ts @@ -83,6 +83,7 @@ export const ERROR_CATEGORIES: { max: 1199, websiteTitle: "Ethers errors", }, + SOLIDITY: { min: 1200, max: 1299, websiteTitle: "Solidity errors" }, }; export const ERRORS = { @@ -789,4 +790,261 @@ This might be caused by using hardhat_reset and loadFixture calls in a testcase. websiteDescription: "Unsupported type for deep copy", }, }, + SOLIDITY: { + RESOLVING_INCORRECT_FILE_AS_PROJECT_FILE: { + number: 1200, + messageTemplate: + "File {file} is being resolved as a project file, but it's not part of the project.", + websiteTitle: "Solidity project file is outside the project", + websiteDescription: `Tried to resolve a file as a project file, but it's not part of the project.`, + }, + RESOLVING_NONEXISTENT_PROJECT_FILE: { + number: 1201, + messageTemplate: + "File {file} is being resolved as a project file, but it doesn't exist.", + websiteTitle: "Solidity project file doesn't exist", + websiteDescription: `Tried to resolve a file as a project file, but it doesn't exist.`, + }, + IMPORTED_FILE_DOESNT_EXIST: { + number: 1202, + messageTemplate: 'The import "{importPath} from "{from}" doesn\'t exist.', + websiteTitle: "Imported file doesn't exist", + websiteDescription: `An imported file doesn't exist.`, + }, + IMPORTED_FILE_WITH_ICORRECT_CASING: { + number: 1203, + messageTemplate: + 'The import "{importPath} from "{from}" exists, but its casing is incorrect. The correct casing is "{correctCasing}".', + websiteTitle: "Imported file with incorrect casing", + websiteDescription: `Hardhat enforces that you import your files with the correct casing (as stored in the filesystem). + +This error is thrown when you import a file with the wrong casing under a case insensitve filesystem.`, + }, + NPM_DEPEDNDENCY_NOT_INSTALLED: { + number: 1204, + messageTemplate: + 'The npm package "{packageName}" isn\'t installed in the {from}.', + websiteTitle: "Uninstalled npm dependency", + websiteDescription: `Trying to use an npm package as a solidity dependency, but it's not installed.`, + }, + NPM_DEPEDNDENCY_USES_EXPORTS: { + number: 1205, + messageTemplate: + 'The npm package "{packageName}" is installed in {from}, but it uses package.json#exports, which is not supported by Hardhat.', + websiteTitle: "Npm dependency uses the unsupported package.json#exports", + websiteDescription: `Trying to use an npm package as a solidity dependency, but it uses package.json#exports, which is not supported by Hardhat.`, + }, + IMPORTED_NPM_DEPENDENCY_NOT_INSTALLED: { + number: 1206, + messageTemplate: + 'The import "{importPath}" from "{from}" is trying to use an uinstalled npm dependency.', + websiteTitle: "Uninstalled npm solidity dependency", + websiteDescription: `One of your files is traying to import a dependency using npm, but it hasn't been installed`, + }, + IMPORTED_NPM_DEPENDENCY_THAT_USES_EXPORTS: { + number: 1207, + messageTemplate: + 'The import "{importPath}" from "{from}" is trying to use an npm dependency that uses pacakge#exports, which is not supported by Hardhat.', + websiteTitle: + "Using a npm solidity dependency with pacakge.json#exports is not supported", + websiteDescription: `One of your files is traying to import a dependency using npm, but it uses pacakge.json#exports, which Hardhat doesn't support`, + }, + USER_REMAPPING_WITH_NPM_CONTEXT: { + number: 1208, + messageTemplate: + 'The remapping "{remapping}" has a context starting with "npm/", which is forbidden. Hardhat doesn\'t allow changing the behaviour of npm package\'s imports.', + websiteTitle: "Remapping imports in npm packages is not allowed", + websiteDescription: `This error happened because you are trying to change how the imports within an npm package, which is not allowed. + +While Hardhat supports user-defined remappings, it doesn't support remapping the behavior of npm packages to ensure that everything what's imported via npm uses the same npm resolution logic.`, + }, + REMAPPING_WITH_INVALID_SYNTAX: { + number: 1209, + messageTemplate: `The remapping "{remapping}" is invalid.`, + websiteTitle: "Invalid remapping", + websiteDescription: `You are trying to set a user remapping, but it's syntax is invalid. + +Please double check your remmpaings' syntax.`, + }, + REMAPPING_TO_UNINSTALLED_PACKAGE: { + number: 1210, + messageTemplate: `The remapping "{remapping}" is trying to use the npm package "{package}", which is not installed`, + websiteTitle: "Remapping into an uninstaleld npm package", + websiteDescription: `You are trying to set a user remapping that uses an npm pacakge as target, but it's not installed. + +Please make sure to install the package or fix the remapping.`, + }, + REMAPPING_TO_PACKAGE_USING_EXPORTS: { + number: 1211, + messageTemplate: `The remapping "{remapping}" is using the npm package "{package}", which uses pacakge.json#exports, which is not supported by Hardhat`, + websiteTitle: + "Remapping into an npm package that uses pacakge.json#exports", + websiteDescription: `You are trying to set a user remapping that uses an npm pacakge as target, but it uses pacakge.json#exports, which Hardhat doesn't support.`, + }, + REMAPPING_NPM_PACKAGE_AS_MONOREPO: { + number: 1212, + messageTemplate: `The remapping "{remapping}" targets the npm pacakge "{pacakge}" as if it were part of this repository, but version "{version}" is installed instead`, + websiteTitle: + "Remapping into a monorepo package but found an npm package instead", + websiteDescription: `You are trying to set a remapping setting a monorepo package as target, but Hardhat found the pacakge to be installed from the npm regristry instead.`, + }, + REMAPPING_HARDHAT_PROJECT_AS_MONOREPO_PACKAGE: { + number: 1213, + messageTemplate: `The remapping "{remapping}" is trying to set the npm package "{package}" as target, but that's the project is the Hardhat project, so it shouldn't be remapped through npm/, but as internal project remappings.`, + websiteTitle: `Remapping into the project using npm`, + websiteDescription: `You are trying to set a remapping whose target uses the npm/ syntax, but is within your Hardhat project. + +Please don't use npm/... as target, but use normal internal project remapping istead.`, + }, + REMAPPING_INCORRECT_VERSION: { + number: 1214, + messageTemplate: `The remapping "{remapping}" is trying to set the npm package "{package}" version "{expectedVersion}" as target, but found version "{actualVersion}" instead.`, + websiteTitle: `Remapping into incorrect npm package version`, + websiteDescription: `You are trying to set a remapping into an npm package, but the version that you are using is not the currently installed one. + +Please change your remapping to match the installed version, or installed the correct one.`, + }, + INVALID_NPM_IMPORT: { + number: 1215, + messageTemplate: `The import "{importPath}" in "{from}" is treated as an npm import as it's first directory doesn't exist in your project, but it's syntax is not that of a valid npm import either.`, + websiteTitle: `Invalid npm import`, + websiteDescription: `You are trying to import a file that is not a valid npm import. Please double check that you are using the correct syntax.`, + }, + ILLEGAL_PACKAGE_IMPORT: { + number: 1216, + messageTemplate: `The import "{importPath}" in "{from}" is not a legal import as it's trying to import a file outside of its package.`, + websiteTitle: `Illegal package import`, + websiteDescription: `One of your npm packages has a Solidity file that is trying to import a file outside of its package using a relative import. This is disabled for security reasons.`, + }, + ILEGALL_PROJECT_IMPORT: { + number: 1217, + messageTemplate: `The import "{importPath}" in "{from}" is not a legal import as it's trying to import a file outside of the project.`, + websiteTitle: `Illegal project import`, + websiteDescription: `One of your Solidity files is trying to import a file outside of the Hardhat project using a relative import. This is disabled for security reasons.`, + }, + ILLEGAL_PROJECT_IMPORT_AFTER_REMAPPING: { + number: 1218, + messageTemplate: `Applying the remapping "{remapping}" to the import "{importPath}" from "{from}" results in an invalid import "{remappedDirectImport}", as it's not a local file. If you are trying to remap into an npm module use the npm/ syntax instead.`, + websiteTitle: `Illegal project import after remapping`, + websiteDescription: `One of your Solidity files has an import which after applying a user remapping becomes an illegal import, as it tries to import a file outside of the project. This is disabled for security reasons. + +If you are trying to remap into an npm module use the npm/ syntax instead.`, + }, + IMPORT_PATH_WITH_WINDOWS_SEPARATOR: { + number: 1219, + messageTemplate: `The import "{importPath}" in "{from}" is not a valid import as it contains a Windows path separator.`, + websiteTitle: `Import path with Windows path separator`, + websiteDescription: `One of your Solidity files is trying to import a file with a Windows path separator, and this is not supported. Please use a Unix-style path instead.`, + }, + INVALID_SOLC_VERSION: { + number: 1220, + messageTemplate: `Solidity version {version} is invalid or hasn't been released yet. + +If you are certain it has been released, run "npx hardhat clean --global" and try again`, + websiteTitle: "Invalid or unreleased `solc` version", + websiteDescription: `The Solidity version in your config is invalid or hasn't been released yet. + +If you are certain it has been released, run \`npx hardhat clean --global\` and try again.`, + }, + RESOLVE_NPM_FILE_WITH_INVALID_FORMAT: { + number: 1221, + messageTemplate: `Couldn't resolve the npm file "{module}" because it has an invalid format. + +Make sure that you are providing valid npm file paths (e.g. package/File.sol) in your config and programatically.`, + websiteTitle: "Resolving invalid npm file", + websiteDescription: `Tried to resolve an npm file directly (i.e. not imported by another file) but its format is invalid. + +This can happen if you setting npm files to be compiled as local files, with invalid file paths, or by misusing the solidity build system.`, + }, + RESOLVE_NPM_FILE_CLASHES_WITH_LOCAL_FILES: { + number: 1222, + messageTemplate: `You are tying to resolve the npm file "{module}", for example to compile it as a local one, but it can clash with your project as the "{directory}" directory is present in your project. + +Please try renaming the directory.`, + websiteTitle: "Resolution of npm file clashes with local files", + websiteDescription: `You are tying to resolve an npm file, for example to compile it as a local one, but it can clash with your project files.`, + }, + RESOLVE_NON_EXISTENT_NPM_FILE: { + number: 1223, + messageTemplate: `You are tying to resolve the npm file "{module}", but it doesn't exist within its package.`, + websiteTitle: "Resolution of non-existent npm file", + websiteDescription: `You are tying to resolve an npm file that doesn't exist within its package.`, + }, + DOWNLOAD_FAILED: { + number: 1224, + messageTemplate: + "Couldn't download compiler version {remoteVersion}. Please check your internet connection and try again.", + websiteTitle: "`solc` download failed", + websiteDescription: `Couldn't download \`solc\`. + +Please check your internet connection and try again.`, + }, + VERSION_LIST_DOWNLOAD_FAILED: { + number: 1225, + messageTemplate: + "Couldn't download compiler version list. Please check your internet connection and try again.", + websiteTitle: "Couldn't obtain `solc` version list", + websiteDescription: `Couldn't download \`solc\`'s version list. + +Please check your internet connection and try again.`, + }, + INVALID_DOWNLOAD: { + number: 1226, + messageTemplate: `Couldn't download compiler version {remoteVersion}: Checksum verification failed. + +Please check your internet connection and try again. + +If this error persists, run "npx hardhat clean --global".`, + websiteTitle: "Downloaded `solc` checksum verification failed", + websiteDescription: `Hardhat downloaded a version of the Solidity compiler, and its checksum verification failed. + +Please check your internet connection and try again. + +If this error persists, run \`npx hardhat clean --global\`.`, + }, + CANT_RUN_NATIVE_COMPILER: { + number: 1227, + messageTemplate: `A native version of solc failed to run. + +If you are running MacOS, try installing Apple Rosetta. + +If this error persists, run "npx hardhat clean --global".`, + websiteTitle: "Failed to run native solc", + websiteDescription: `Hardhat successfully downloaded a native version of solc but it doesn't run. + +If you are running MacOS, try installing Apple Rosetta. + +If this error persists, run "npx hardhat clean --global".`, + }, + CANT_RUN_SOLCJS_COMPILER: { + number: 1228, + messageTemplate: `A wasm version of solc failed to run. + +If this error persists, run "npx hardhat clean --global".`, + websiteTitle: "Failed to run solcjs", + websiteDescription: `Hardhat successfully downloaded a WASM version of solc but it doesn't run. + +If you are running MacOS, try installing Apple Rosetta. + +If this error persists, run "npx hardhat clean --global".`, + }, + COMPILATION_JOB_CREATION_ERROR: { + number: 1229, + messageTemplate: `Failed to create compilation job for file {rootFilePath} using the build profile "{buildProfile}". + +{reason}`, + websiteTitle: "Failed to create compilation job", + websiteDescription: `Hardhat failed to create a compilation job for a file in your project. + +This happens when your files require incompatible versions of solc or you haven't configured a version that works with them`, + }, + BUILD_FAILED: { + number: 1230, + messageTemplate: "Compilation failed", + websiteTitle: "Compilation failed", + websiteDescription: `Your smart contracts failed to compile. + +Please check Hardhat's output for more details.`, + }, + }, } as const; diff --git a/v-next/hardhat-ethers/test-tmp/helpers/artifact-manager-mock.ts b/v-next/hardhat-ethers/test-tmp/helpers/artifact-manager-mock.ts index 0a58864ae5..5afe52eb98 100644 --- a/v-next/hardhat-ethers/test-tmp/helpers/artifact-manager-mock.ts +++ b/v-next/hardhat-ethers/test-tmp/helpers/artifact-manager-mock.ts @@ -2,11 +2,14 @@ import type { Artifact, ArtifactsManager, BuildInfo, - CompilerInput, - CompilerOutput, + GetAtifactByName, } from "@ignored/hardhat-vnext/types/artifacts"; import { HardhatError } from "@ignored/hardhat-vnext-errors"; +import { + CompilerInput, + CompilerOutput, +} from "../../../hardhat/src/types/solidity/compiler-io.js"; export class MockArtifactsManager implements ArtifactsManager { readonly #artifacts: Map; @@ -27,9 +30,9 @@ export class MockArtifactsManager implements ArtifactsManager { } } - public async readArtifact( - contractNameOrFullyQualifiedName: string, - ): Promise { + public async readArtifact( + contractNameOrFullyQualifiedName: ContractNameT, + ): Promise> { const artifactFileName = this.#artifactsPaths.get( contractNameOrFullyQualifiedName, ); diff --git a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/ambiguous-library.ts b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/ambiguous-library.ts index b08595e88d..b8bc6cd13c 100644 --- a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/ambiguous-library.ts +++ b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/ambiguous-library.ts @@ -1,7 +1,7 @@ import type { Artifact } from "@ignored/hardhat-vnext/types/artifacts"; export const CONTRACT: Artifact = { - _format: "hh-sol-artifact-1", + _format: "hh3-artifact-1", contractName: "AmbiguousLibrary", sourceName: "contracts/AmbiguousLibrary.sol", abi: [], diff --git a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/error-messages.ts b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/error-messages.ts index 7a9d8fe979..89cb355bb3 100644 --- a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/error-messages.ts +++ b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/error-messages.ts @@ -1,7 +1,7 @@ import type { Artifact } from "@ignored/hardhat-vnext/types/artifacts"; export const CONTRACT: Artifact = { - _format: "hh-sol-artifact-1", + _format: "hh3-artifact-1", contractName: "Contract", sourceName: "contracts/Contract.sol", abi: [ diff --git a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/gas-config.ts b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/gas-config.ts index e2e0bb5721..8142aac209 100644 --- a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/gas-config.ts +++ b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/gas-config.ts @@ -1,7 +1,7 @@ import type { Artifact } from "@ignored/hardhat-vnext/types/artifacts"; export const CONTRACT: Artifact = { - _format: "hh-sol-artifact-1", + _format: "hh3-artifact-1", contractName: "Example", sourceName: "contracts/Example.sol", abi: [ diff --git a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/greeter-with-constructor-arg.ts b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/greeter-with-constructor-arg.ts index ec8af680df..527e9af042 100644 --- a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/greeter-with-constructor-arg.ts +++ b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/greeter-with-constructor-arg.ts @@ -1,7 +1,7 @@ import type { Artifact } from "@ignored/hardhat-vnext/types/artifacts"; export const CONTRACT: Artifact = { - _format: "hh-sol-artifact-1", + _format: "hh3-artifact-1", contractName: "GreeterWithConstructorArg", sourceName: "contracts/GreeterWithConstructorArg.sol", abi: [ diff --git a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/greeter.ts b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/greeter.ts index 31f93b53fd..7047abe254 100644 --- a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/greeter.ts +++ b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/greeter.ts @@ -1,7 +1,7 @@ import type { Artifact } from "@ignored/hardhat-vnext/types/artifacts"; export const CONTRACT: Artifact = { - _format: "hh-sol-artifact-1", + _format: "hh3-artifact-1", contractName: "Greeter", sourceName: "contracts/Greeter.sol", abi: [ diff --git a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/igreeter.ts b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/igreeter.ts index 26496c6e3c..5991758ff3 100644 --- a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/igreeter.ts +++ b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/igreeter.ts @@ -1,7 +1,7 @@ import type { Artifact } from "@ignored/hardhat-vnext/types/artifacts"; export const CONTRACT: Artifact = { - _format: "hh-sol-artifact-1", + _format: "hh3-artifact-1", contractName: "IGreeter", sourceName: "contracts/IGreeter.sol", abi: [ diff --git a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/non-unique-lib.ts b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/non-unique-lib.ts index ee5dd5ae86..3375d29979 100644 --- a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/non-unique-lib.ts +++ b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/non-unique-lib.ts @@ -1,7 +1,7 @@ import type { Artifact } from "@ignored/hardhat-vnext/types/artifacts"; export const CONTRACT: Artifact = { - _format: "hh-sol-artifact-1", + _format: "hh3-artifact-1", contractName: "NonUniqueLibrary", sourceName: "contracts/TestNonUniqueLib.sol", abi: [], diff --git a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-ambiguous-library.ts b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-ambiguous-library.ts index a18eff9208..0931937d16 100644 --- a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-ambiguous-library.ts +++ b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-ambiguous-library.ts @@ -1,7 +1,7 @@ import type { Artifact } from "@ignored/hardhat-vnext/types/artifacts"; export const CONTRACT: Artifact = { - _format: "hh-sol-artifact-1", + _format: "hh3-artifact-1", contractName: "TestAmbiguousLib", sourceName: "contracts/AmbiguousLibrary.sol", abi: [ diff --git a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-contract-lib.ts b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-contract-lib.ts index a4c7416832..0b3aa3daf4 100644 --- a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-contract-lib.ts +++ b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-contract-lib.ts @@ -1,7 +1,7 @@ import type { Artifact } from "@ignored/hardhat-vnext/types/artifacts"; export const CONTRACT: Artifact = { - _format: "hh-sol-artifact-1", + _format: "hh3-artifact-1", contractName: "TestContractLib", sourceName: "contracts/TestContractLib.sol", abi: [ diff --git a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-library.ts b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-library.ts index 9f0530fc14..8121d2dcfc 100644 --- a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-library.ts +++ b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-library.ts @@ -1,7 +1,7 @@ import type { Artifact } from "@ignored/hardhat-vnext/types/artifacts"; export const CONTRACT: Artifact = { - _format: "hh-sol-artifact-1", + _format: "hh3-artifact-1", contractName: "TestLibrary", sourceName: "contracts/TestContractLib.sol", abi: [], diff --git a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-non-unique-lib.ts b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-non-unique-lib.ts index 3aab9e19ad..b86bd0ca51 100644 --- a/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-non-unique-lib.ts +++ b/v-next/hardhat-ethers/test-tmp/helpers/artifacts/test-non-unique-lib.ts @@ -1,7 +1,7 @@ import type { Artifact } from "@ignored/hardhat-vnext/types/artifacts"; export const CONTRACT: Artifact = { - _format: "hh-sol-artifact-1", + _format: "hh3-artifact-1", contractName: "TestNonUniqueLib", sourceName: "contracts/TestNonUniqueLib.sol", abi: [ diff --git a/v-next/hardhat-utils/package.json b/v-next/hardhat-utils/package.json index 0c1db5149d..6bb9fef675 100644 --- a/v-next/hardhat-utils/package.json +++ b/v-next/hardhat-utils/package.json @@ -29,6 +29,7 @@ "./package": "./dist/src/package.js", "./path": "./dist/src/path.js", "./request": "./dist/src/request.js", + "./resolve": "./dist/src/resolve.js", "./string": "./dist/src/string.js", "./subprocess": "./dist/src/subprocess.js" }, diff --git a/v-next/hardhat-utils/src/fs.ts b/v-next/hardhat-utils/src/fs.ts index be5681fff5..f38224e3fa 100644 --- a/v-next/hardhat-utils/src/fs.ts +++ b/v-next/hardhat-utils/src/fs.ts @@ -67,6 +67,45 @@ export async function getAllFilesMatching( return results.flat(); } +/** + * Recursively searches a directory and its subdirectories for directories that + * satisfy the specified condition, returning their absolute paths. Once a + * directory is found, its subdirectories are not searched. + * + * Note: dirFrom is never returned, nor `matches` is called on it. + * + * @param dirFrom The absolute path of the directory to start the search from. + * @param matches A function to filter directories (not files). + * @returns An array of absolute paths. Each path has its true case, except + * for the initial dirFrom part, which preserves the given casing. + * No order is guaranteed. If dirFrom doesn't exist `[]` is returned. + * @throws NotADirectoryError if dirFrom is not a directory. + * @throws FileSystemAccessError for any other error. + */ +export async function getAllDirectoriesMatching( + dirFrom: string, + matches?: (absolutePathToDir: string) => boolean, +): Promise { + const dirContent = await readdirOrEmpty(dirFrom); + + const results = await Promise.all( + dirContent.map(async (file) => { + const absolutePathToFile = path.join(dirFrom, file); + if (!(await isDirectory(absolutePathToFile))) { + return []; + } + + if (matches === undefined || matches(absolutePathToFile)) { + return absolutePathToFile; + } + + return getAllDirectoriesMatching(absolutePathToFile, matches); + }), + ); + + return results.flat(); +} + /** * Determines the true case path of a given relative path from a specified * directory, without resolving symbolic links, and returns it. diff --git a/v-next/hardhat-utils/src/path.ts b/v-next/hardhat-utils/src/path.ts index 2622f79072..cb25fdcbde 100644 --- a/v-next/hardhat-utils/src/path.ts +++ b/v-next/hardhat-utils/src/path.ts @@ -17,3 +17,29 @@ export function resolveFromRoot(root: string, target: string): string { return path.resolve(root, target); } + +/** + * Tries to return a shorter version of the path if its inside the given folder. + * + * This is useful for displaying paths in the terminal, as they can be shorter + * when they are insidde the current working directory. For example, if the + * current working directory is `/home/user/project`, and the path is + * `/home/user/project/contracts/File.sol`, the shorter path is + * `contracts/File.sol`. + * + * @param absolutePath The path to shorten. + * @param folder The absolute path to the folder. + * @returns The shorter path, if possible, or the original path. + */ +export function shortenPath( + absolutePath: string, + folder: string = process.cwd(), +): string { + const relativePath = path.relative(folder, absolutePath); + + if (relativePath.length < absolutePath.length) { + return relativePath; + } + + return absolutePath; +} diff --git a/v-next/hardhat-utils/src/resolve.ts b/v-next/hardhat-utils/src/resolve.ts new file mode 100644 index 0000000000..677bc3f91a --- /dev/null +++ b/v-next/hardhat-utils/src/resolve.ts @@ -0,0 +1,63 @@ +import { createRequire } from "node:module"; + +import { ensureError } from "./error.js"; + +/** + * The different errors that can be found when resolving a module. + */ +export enum ResolutionError { + /** + * The node resolution failed to find the module and/or package. + */ + MODULE_NOT_FOUND = "MODULE_NOT_FOUND", + + /** + * The node resolution found the package, but it uses package.json#exports + * and doesn't export the requested module. + */ + NOT_EXPORTED = "NOT_EXPORTED", +} + +/** + * The of trying to resolve a module. + */ +export type ResolutionResult = + | { success: true; absolutePath: string } + | { success: false; error: ResolutionError }; + +/** + * Resolves the module identifier into an absolute path, following the Node.js + * resolution algorithm, starting the resolution from the given `from` path. + */ +export function resolve( + moduleIdentifierToResolve: string, + from: string, +): ResolutionResult { + // We need to create a new require with `from` as `require.resolve` seems to + // ignore its `paths` option, at least on Windows. To play safe, we still + // provide the `paths`. + const require = createRequire(from); + + try { + return { + success: true, + absolutePath: require.resolve(moduleIdentifierToResolve, { + paths: [from], + }), + }; + } catch (e) { + ensureError(e); + + if ("code" in e) { + if (e.code === "MODULE_NOT_FOUND") { + return { success: false, error: ResolutionError.MODULE_NOT_FOUND }; + } + + if (e.code === "ERR_PACKAGE_PATH_NOT_EXPORTED") { + return { success: false, error: ResolutionError.NOT_EXPORTED }; + } + } + + throw e; + } +} diff --git a/v-next/hardhat-utils/test/fixture-projects/resolve-fixture/.gitignore b/v-next/hardhat-utils/test/fixture-projects/resolve-fixture/.gitignore new file mode 100644 index 0000000000..cf4bab9ddd --- /dev/null +++ b/v-next/hardhat-utils/test/fixture-projects/resolve-fixture/.gitignore @@ -0,0 +1 @@ +!node_modules diff --git a/v-next/hardhat-utils/test/fixture-projects/resolve-fixture/node_modules/dep/package.json b/v-next/hardhat-utils/test/fixture-projects/resolve-fixture/node_modules/dep/package.json new file mode 100644 index 0000000000..0d4b2fddec --- /dev/null +++ b/v-next/hardhat-utils/test/fixture-projects/resolve-fixture/node_modules/dep/package.json @@ -0,0 +1,5 @@ +{ + "name": "dep", + "version": "1.0.0", + "private": true +} diff --git a/v-next/hardhat-utils/test/path.ts b/v-next/hardhat-utils/test/path.ts index fe8d382449..69fd4be857 100644 --- a/v-next/hardhat-utils/test/path.ts +++ b/v-next/hardhat-utils/test/path.ts @@ -2,7 +2,7 @@ import assert from "node:assert/strict"; import path from "node:path"; import { describe, it } from "node:test"; -import { resolveFromRoot } from "../src/path.js"; +import { resolveFromRoot, shortenPath } from "../src/path.js"; describe("path", () => { describe("resolveFromRoot", () => { @@ -33,4 +33,106 @@ describe("path", () => { ); }); }); + + describe("shortenPath", () => { + it("Should shorten a path that's inside the folder", () => { + assert.equal( + shortenPath(import.meta.filename, import.meta.dirname), + path.basename(import.meta.filename), + ); + + assert.equal( + shortenPath(import.meta.filename, path.dirname(import.meta.dirname)), + path.join( + path.basename(import.meta.dirname), + path.basename(import.meta.filename), + ), + ); + + // Test that it works with a path.sep at the end + assert.equal( + shortenPath( + import.meta.filename, + path.dirname(import.meta.dirname) + path.sep, + ), + path.join( + path.basename(import.meta.dirname), + path.basename(import.meta.filename), + ), + ); + }); + + it("Should shorten a path that's not inside the folder", () => { + assert.equal( + shortenPath(import.meta.filename, import.meta.dirname + "nope"), + path.join("..", "test", "path.ts"), + ); + }); + + it("Should shorten a path that's not inside the folder whose path relative path would be longer", () => { + assert.equal( + shortenPath( + import.meta.dirname, + // We define a folder so nested that the relative path will be longer + // than the absolute one, due to too many ".." in the path + path.join( + import.meta.dirname, + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + "a", + ), + ), + import.meta.dirname, + ); + }); + }); }); diff --git a/v-next/hardhat-utils/test/resolve.ts b/v-next/hardhat-utils/test/resolve.ts new file mode 100644 index 0000000000..0c4066a17c --- /dev/null +++ b/v-next/hardhat-utils/test/resolve.ts @@ -0,0 +1,69 @@ +import assert from "node:assert/strict"; +import path from "node:path"; +import { describe, it } from "node:test"; + +import { ResolutionError, resolve } from "../src/resolve.js"; + +describe("Node.js resolution", () => { + it("Should resolve modules correctly", () => { + const resolved = resolve( + "@ignored/hardhat-vnext-utils/error", + import.meta.dirname, + ); + + assert.ok(resolved.success, "A successful resolution returns an object"); + + // We don't actually validate this behavior, we just want to make sure that + // it finds it. + assert.ok( + resolved.absolutePath.includes(path.sep + "hardhat-utils" + path.sep), + "The file is inside the hardhat-utils package", + ); + }); + + it("Should return ResolutionError.MODULE_NOT_FOUND for non-existing packages", () => { + const resolved = resolve("fooo", import.meta.dirname); + + assert.equal(resolved.success, false); + assert.equal(resolved.error, ResolutionError.MODULE_NOT_FOUND); + }); + + it("Should return ResolutionError.MODULE_NOT_FOUND for non-existing modules of existing packages", () => { + // Note: If this test fails after upgrading the version of the package below + // you should pick a different one that doesn't use #exports + const resolved = resolve("undici/foo", import.meta.dirname); + + assert.equal(resolved.success, false); + assert.equal(resolved.error, ResolutionError.MODULE_NOT_FOUND); + }); + + it("Should return ResolutionError.NOT_EXPORTED for existing packages that don't export the desired module", () => { + const resolved = resolve( + "@ignored/hardhat-vnext-utils/foo", + import.meta.dirname, + ); + + assert.equal(resolved.success, false); + assert.equal(resolved.error, ResolutionError.NOT_EXPORTED); + }); + + it("Should be able to finde from a different directory", () => { + const fixtureProjectPath = path.join( + import.meta.dirname, + "fixture-projects", + "resolve-fixture", + ); + + const resolved = resolve("dep/package.json", fixtureProjectPath); + assert.ok(resolved.success, "A successful resolution returns an object"); + assert.equal( + resolved.absolutePath, + path.join(fixtureProjectPath, "node_modules/dep/package.json"), + ); + + // It shouldn't find it in this directory + const notResolved = resolve("dep/package.json", import.meta.dirname); + assert.ok(!notResolved.success, "A failed resolution returns an object"); + assert.equal(notResolved.error, ResolutionError.MODULE_NOT_FOUND); + }); +}); diff --git a/v-next/hardhat/package.json b/v-next/hardhat/package.json index cc20f54b9f..dc65af15c8 100644 --- a/v-next/hardhat/package.json +++ b/v-next/hardhat/package.json @@ -31,7 +31,8 @@ "./types/providers": "./dist/src/types/providers.js", "./types/tasks": "./dist/src/types/tasks.js", "./types/user-interruptions": "./dist/src/types/user-interruptions.js", - "./types/utils": "./dist/src/types/utils.js" + "./types/utils": "./dist/src/types/utils.js", + "./types/solidity": "./dist/src/types/solidity.js" }, "keywords": [ "ethereum", @@ -62,6 +63,7 @@ "@eslint-community/eslint-plugin-eslint-comments": "^4.3.0", "@ignored/hardhat-vnext-node-test-reporter": "workspace:^3.0.0-next.3", "@nomicfoundation/hardhat-test-utils": "workspace:^", + "@types/adm-zip": "^0.5.5", "@types/debug": "^4.1.4", "@types/node": "^20.14.9", "@types/semver": "^7.5.8", @@ -79,17 +81,20 @@ "typescript-eslint": "7.7.1" }, "dependencies": { - "@ignored/edr": "0.6.2-alpha.0", - "@ignored/hardhat-vnext-build-system": "workspace:^3.0.0-next.0", - "@ignored/hardhat-vnext-errors": "workspace:^3.0.0-next.3", - "@ignored/hardhat-vnext-utils": "workspace:^3.0.0-next.3", + "@nomicfoundation/solidity-analyzer": "^0.1.0", "@ignored/hardhat-vnext-zod-utils": "workspace:^3.0.0-next.3", + "@ignored/hardhat-vnext-utils": "workspace:^3.0.0-next.3", + "@ignored/hardhat-vnext-errors": "workspace:^3.0.0-next.3", + "@ignored/edr": "0.6.2-alpha.0", "@sentry/node": "^5.18.1", + "adm-zip": "^0.4.16", "chalk": "^5.3.0", "debug": "^4.1.1", "enquirer": "^2.3.0", "ethereum-cryptography": "^2.2.1", + "p-map": "^7.0.2", "semver": "^7.6.3", + "solc": "^0.8.27", "tsx": "^4.11.0", "zod": "^3.23.8" } diff --git a/v-next/hardhat/src/index.ts b/v-next/hardhat/src/index.ts index f0c59dbed8..a91d6ad007 100644 --- a/v-next/hardhat/src/index.ts +++ b/v-next/hardhat/src/index.ts @@ -1,8 +1,10 @@ +import type { ArtifactsManager } from "./types/artifacts.js"; import type { HardhatConfig } from "./types/config.js"; import type { GlobalOptions } from "./types/global-options.js"; import type { HookManager } from "./types/hooks.js"; import type { HardhatRuntimeEnvironment } from "./types/hre.js"; import type { NetworkManager } from "./types/network.js"; +import type { SolidityBuildSystem } from "./types/solidity/build-system.js"; import type { TaskManager } from "./types/tasks.js"; import type { UserInterruptionManager } from "./types/user-interruptions.js"; @@ -25,5 +27,7 @@ export const interruptions: UserInterruptionManager = hre.interruptions; // NOTE: This is a small architectural violation, as the network manager comes // from a builtin plugin, and plugins can't add their own exports here. export const network: NetworkManager = hre.network; +export const artifacts: ArtifactsManager = hre.artifacts; +export const solidity: SolidityBuildSystem = hre.solidity; export default hre; diff --git a/v-next/hardhat/src/internal/builtin-plugins/artifacts/artifacts-manager.ts b/v-next/hardhat/src/internal/builtin-plugins/artifacts/artifacts-manager.ts index 6a279eb34a..b58ae8c1ed 100644 --- a/v-next/hardhat/src/internal/builtin-plugins/artifacts/artifacts-manager.ts +++ b/v-next/hardhat/src/internal/builtin-plugins/artifacts/artifacts-manager.ts @@ -1,9 +1,7 @@ import type { - Artifact, ArtifactsManager, + GetAtifactByName, BuildInfo, - CompilerInput, - CompilerOutput, } from "../../../types/artifacts.js"; import path from "node:path"; @@ -38,9 +36,9 @@ export class ArtifactsManagerImplementation implements ArtifactsManager { this.#artifactsPath = artifactsPath; } - public async readArtifact( - contractNameOrFullyQualifiedName: string, - ): Promise { + public async readArtifact( + contractNameOrFullyQualifiedName: ContractNameT, + ): Promise> { const artifactPath = await this.#getArtifactPath( contractNameOrFullyQualifiedName, ); @@ -240,16 +238,20 @@ export class ArtifactsManagerImplementation implements ArtifactsManager { public async getBuildInfo( fullyQualifiedName: string, ): Promise { - const artifactPath = - this.#formArtifactPathFromFullyQualifiedName(fullyQualifiedName); + const artifact = await this.readArtifact(fullyQualifiedName); - const debugFilePath = this.#getDebugFilePath(artifactPath); - const buildInfoPath = await this.#getBuildInfoFromDebugFile(debugFilePath); + const buildInfoId = artifact.buildInfoId; - if (buildInfoPath === undefined) { + if (buildInfoId === undefined) { return undefined; } + const buildInfoPath = path.join( + this.#artifactsPath, + `build-info`, + `${buildInfoId}.json`, + ); + return readJsonFile(buildInfoPath); } @@ -308,23 +310,6 @@ export class ArtifactsManagerImplementation implements ArtifactsManager { }); } - public saveArtifact(_artifact: Artifact): Promise { - throw new HardhatError(HardhatError.ERRORS.INTERNAL.NOT_IMPLEMENTED_ERROR, { - message: "Not implemented in fake artifacts manager", - }); - } - - public saveBuildInfo( - _solcVersion: string, - _solcLongVersion: string, - _input: CompilerInput, - _output: CompilerOutput, - ): Promise { - throw new HardhatError(HardhatError.ERRORS.INTERNAL.NOT_IMPLEMENTED_ERROR, { - message: "Not implemented in fake artifacts manager", - }); - } - public getArtifactPath(_fullyQualifiedName: string): Promise { throw new HardhatError(HardhatError.ERRORS.INTERNAL.NOT_IMPLEMENTED_ERROR, { message: "Not implemented in fake artifacts manager", diff --git a/v-next/hardhat/src/internal/builtin-plugins/compile/index.ts b/v-next/hardhat/src/internal/builtin-plugins/compile/index.ts deleted file mode 100644 index 1885d507e7..0000000000 --- a/v-next/hardhat/src/internal/builtin-plugins/compile/index.ts +++ /dev/null @@ -1,18 +0,0 @@ -import type { HardhatPlugin } from "../../../types/plugins.js"; - -import { task } from "../../core/config.js"; - -const hardhatPlugin: HardhatPlugin = { - id: "compile", - tasks: [ - task("compile", "Compiles the entire project, building all artifacts") - .addFlag({ - name: "quiet", - description: "Makes the compilation process less verbose", - }) - .setAction(import.meta.resolve("./task-action.js")) - .build(), - ], -}; - -export default hardhatPlugin; diff --git a/v-next/hardhat/src/internal/builtin-plugins/compile/task-action.ts b/v-next/hardhat/src/internal/builtin-plugins/compile/task-action.ts deleted file mode 100644 index 300e4d1811..0000000000 --- a/v-next/hardhat/src/internal/builtin-plugins/compile/task-action.ts +++ /dev/null @@ -1,65 +0,0 @@ -import type { HardhatRuntimeEnvironment } from "../../../types/hre.js"; -import type { NewTaskActionFunction } from "../../../types/tasks.js"; - -import path from "node:path"; - -import { BuildSystem } from "@ignored/hardhat-vnext-build-system"; -import { getCacheDir } from "@ignored/hardhat-vnext-utils/global-dir"; - -interface CompileActionArguments { - quiet: boolean; -} - -const compileWithHardhat: NewTaskActionFunction< - CompileActionArguments -> = async ({ quiet }, hre) => { - const config = await _resolveCompilationConfigFrom(hre); - - const buildSystem = new BuildSystem(config); - - await buildSystem.build({ - quiet, - }); -}; - -async function _resolveCompilationConfigFrom(hre: HardhatRuntimeEnvironment) { - const root = hre.config.paths.root; - const cache: string = - hre.config.paths.cache !== "" - ? hre.config.paths.cache - : // TODO(#5599): Replace with hre.config.paths.cache once it is available - await getCacheDir(); - - const artifacts = path.join(root, "artifacts"); - const sources = path.join(root, "contracts"); - - const compilationConfig = { - paths: { - root, - cache, - artifacts, - sources, - }, - solidity: { - compilers: [ - { - // WARNING: The version of the compiler has been hardcoded here, - // attempts to test with different versions will not work, - // and will have to await the new build system. - version: "0.8.25", - settings: { - optimizer: { - enabled: true, - runs: 200, - }, - }, - }, - ], - overrides: {}, - }, - }; - - return compilationConfig; -} - -export default compileWithHardhat; diff --git a/v-next/hardhat/src/internal/builtin-plugins/index.ts b/v-next/hardhat/src/internal/builtin-plugins/index.ts index e388c4cc3b..b2fe0fbb04 100644 --- a/v-next/hardhat/src/internal/builtin-plugins/index.ts +++ b/v-next/hardhat/src/internal/builtin-plugins/index.ts @@ -2,7 +2,6 @@ import type { HardhatPlugin } from "../../types/plugins.js"; import artifacts from "./artifacts/index.js"; import clean from "./clean/index.js"; -import compile from "./compile/index.js"; import console from "./console/index.js"; import networkManager from "./network-manager/index.js"; import run from "./run/index.js"; @@ -16,7 +15,6 @@ export type * from "./solidity/index.js"; export type * from "./solidity-test/index.js"; export type * from "./network-manager/index.js"; export type * from "./clean/index.js"; -export type * from "./compile/index.js"; export type * from "./console/index.js"; export type * from "./run/index.js"; @@ -26,7 +24,6 @@ export const builtinPlugins: HardhatPlugin[] = [ solidityTest, networkManager, clean, - compile, console, run, ]; diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-profiles.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-profiles.ts new file mode 100644 index 0000000000..c6d908252c --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-profiles.ts @@ -0,0 +1,12 @@ +export const DEFAULT_BUILD_PROFILE = "default"; + +export const DEFAULT_BUILD_PROFILES = [ + "default", + "production", + "solidity-tests", + "javascript-tests", +] as const; + +export function shouldMergeCompilationJobs(buildProfile: string): boolean { + return buildProfile !== "production"; +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/artifacts.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/artifacts.ts new file mode 100644 index 0000000000..1a2e6914cd --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/artifacts.ts @@ -0,0 +1,139 @@ +import type { Artifact, BuildInfo } from "../../../../types/artifacts.js"; +import type { CompilationJob } from "../../../../types/solidity/compilation-job.js"; +import type { + CompilerOutput, + CompilerOutputContract, +} from "../../../../types/solidity/compiler-io.js"; +import type { + SolidityBuildInfo, + SolidityBuildInfoOutput, +} from "../../../../types/solidity/solidity-artifacts.js"; + +export function getContractArtifact( + buildInfoId: string, + publicSourceName: string, + inputSourceName: string, + contractName: string, + contract: CompilerOutputContract, +): Artifact { + const evmBytecode = contract.evm?.bytecode; + let bytecode: string = evmBytecode?.object ?? ""; + + if (bytecode.slice(0, 2).toLowerCase() !== "0x") { + bytecode = `0x${bytecode}`; + } + + const evmDeployedBytecode = contract.evm?.deployedBytecode; + let deployedBytecode: string = evmDeployedBytecode?.object ?? ""; + + if (deployedBytecode.slice(0, 2).toLowerCase() !== "0x") { + deployedBytecode = `0x${deployedBytecode}`; + } + + const linkReferences = evmBytecode?.linkReferences ?? {}; + const deployedLinkReferences = evmDeployedBytecode?.linkReferences ?? {}; + + const immutableReferences = evmDeployedBytecode?.immutableReferences ?? {}; + + const artifact: Required = { + _format: "hh3-artifact-1", + contractName, + sourceName: publicSourceName, + abi: contract.abi, + bytecode, + deployedBytecode, + linkReferences, + deployedLinkReferences, + immutableReferences, + inputSourceName, + buildInfoId, + }; + + return artifact; +} + +export function getArtifactsDeclarationFile(artifacts: Artifact[]): string { + if (artifacts.length === 0) { + return ""; + } + + const artifactTypes = artifacts.map( + (artifact) => + `export interface ${artifact.contractName}$Type { + ${Object.entries(artifact) + .map(([name, value]) => `readonly ${name}: ${JSON.stringify(value)};`) + .join("\n ")} +};`, + ); + + return `// This file was autogenerated by Hardhat-viem, do not edit it. +// prettier-ignore +// tslint:disable +// eslint-disable +// biome-ignore format: see above + +${artifactTypes.join("\n\n")} + +import "@ignored/hardhat-vnext/types/artifacts"; +declare module "@ignored/hardhat-vnext/types/artifacts" { + interface ArtifactMap { + ${artifacts.map((artifact) => `["${artifact.contractName}"]: ${artifact.contractName}$Type`).join("\n ")}; + ${artifacts.map((artifact) => `["${artifact.sourceName}:${artifact.contractName}"]: ${artifact.contractName}$Type`).join("\n ")}; + } +}`; +} + +export function getDuplicatedContractNamesDeclarationFile( + duplicatedContractNames: string[], +): string { + if (duplicatedContractNames.length === 0) { + return ""; + } + + return `// This file was autogenerated by Hardhat-viem, do not edit it. +// prettier-ignore +// tslint:disable +// eslint-disable +// biome-ignore format: see above + +import "@ignored/hardhat-vnext/types/artifacts"; +declare module "@ignored/hardhat-vnext/types/artifacts" { + interface ArtifactMap { + ${duplicatedContractNames.map((name) => `["${name}"]: never`).join("\n ")}; + } +}`; +} + +export function getBuildInfo( + compilationJob: CompilationJob, +): SolidityBuildInfo { + const publicSourceNameMap = Object.fromEntries( + [...compilationJob.dependencyGraph.getRoots().entries()].map( + ([publicSourceName, root]) => [publicSourceName, root.sourceName], + ), + ); + + const buildInfo: Required = { + _format: "hh3-sol-build-info-1", + id: compilationJob.getBuildId(), + solcVersion: compilationJob.solcConfig.version, + solcLongVersion: compilationJob.solcLongVersion, + publicSourceNameMap, + input: compilationJob.getSolcInput(), + }; + + return buildInfo; +} + +export function getBuildInfoOutput( + compilationJob: CompilationJob, + compilerOutput: CompilerOutput, +): SolidityBuildInfoOutput { + const buildInfoOutput: Required = { + _format: "hh3-sol-build-info-output-1", + id: compilationJob.getBuildId(), + output: compilerOutput, + }; + + return buildInfoOutput; +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/build-system.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/build-system.ts new file mode 100644 index 0000000000..61d59aee00 --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/build-system.ts @@ -0,0 +1,717 @@ +import type { DependencyGraphImplementation } from "./dependency-graph.js"; +import type { Artifact } from "../../../../types/artifacts.js"; +import type { SolcConfig, SolidityConfig } from "../../../../types/config.js"; +import type { + SolidityBuildSystem, + BuildOptions, + CompilationJobCreationError, + FileBuildResult, + GetCompilationJobsOptions, + CompileBuildInfoOptions, + RunCompilationJobOptions, +} from "../../../../types/solidity/build-system.js"; +import type { CompilationJob } from "../../../../types/solidity/compilation-job.js"; +import type { + CompilerOutput, + CompilerOutputError, +} from "../../../../types/solidity/compiler-io.js"; +import type { SolidityBuildInfo } from "../../../../types/solidity.js"; + +import os from "node:os"; +import path from "node:path"; + +import { assertHardhatInvariant } from "@ignored/hardhat-vnext-errors"; +import { + getAllDirectoriesMatching, + getAllFilesMatching, + readJsonFile, + remove, + writeUtf8File, +} from "@ignored/hardhat-vnext-utils/fs"; +import { shortenPath } from "@ignored/hardhat-vnext-utils/path"; +import { pluralize } from "@ignored/hardhat-vnext-utils/string"; +import chalk from "chalk"; +import debug from "debug"; +import pMap from "p-map"; + +import { FileBuildResultType } from "../../../../types/solidity/build-system.js"; +import { DEFAULT_BUILD_PROFILE } from "../build-profiles.js"; + +import { + getArtifactsDeclarationFile, + getBuildInfo, + getBuildInfoOutput, + getContractArtifact, + getDuplicatedContractNamesDeclarationFile, +} from "./artifacts.js"; +import { CompilationJobImplementation } from "./compilation-job.js"; +import { downloadConfiguredCompilers, getCompiler } from "./compiler/index.js"; +import { buildDependencyGraph } from "./dependency-graph-building.js"; +import { + formatRootPath, + isNpmParsedRootPath, + npmModuleToNpmRootPath, + parseRootPath, +} from "./root-paths-utils.js"; +import { SolcConfigSelector } from "./solc-config-selection.js"; + +const log = debug("hardhat:core:solidity:build-system"); + +export interface SolidityBuildSystemOptions { + readonly solidityConfig: SolidityConfig; + readonly projectRoot: string; + readonly soliditySourcesPaths: string[]; + readonly artifactsPath: string; + readonly cachePath: string; +} + +export class SolidityBuildSystemImplementation implements SolidityBuildSystem { + readonly #options: SolidityBuildSystemOptions; + readonly #defaultConcurrenty = Math.max(os.cpus().length - 1, 1); + #downloadedCompilers = false; + + constructor(options: SolidityBuildSystemOptions) { + this.#options = options; + } + + public async getRootFilePaths(): Promise { + const localFilesToCompile = ( + await Promise.all( + this.#options.soliditySourcesPaths.map((dir) => + getAllFilesMatching(dir, (f) => f.endsWith(".sol")), + ), + ) + ).flat(1); + + const dependenciesToCompile = + this.#options.solidityConfig.dependenciesToCompile.map( + npmModuleToNpmRootPath, + ); + + return [...localFilesToCompile, ...dependenciesToCompile]; + } + + public async build( + rootFilePaths: string[], + options?: BuildOptions, + ): Promise> { + await this.#downloadConfiguredCompilers(options?.quiet); + + const compilationJobsPerFile = await this.getCompilationJobs( + rootFilePaths, + options, + ); + + if (!(compilationJobsPerFile instanceof Map)) { + return compilationJobsPerFile; + } + + const compilationJobs = [...new Set(compilationJobsPerFile.values())]; + + // TODO: Filter the compilation jobs based on the cache + + const results: CompilerOutput[] = await pMap( + compilationJobs, + (compilationJob) => this.runCompilationJob(compilationJob), + { + concurrency: options?.concurrency ?? this.#defaultConcurrenty, + // An error when running the compiler is not a compilation failure, but + // a fatal failure trying to run it, so we just throw on the first error + stopOnError: true, + }, + ); + + const isSuccessfulBuild = results.every( + (result) => !this.#hasCompilationErrors(result), + ); + + const contractArtifactsGeneratedByCompilationJob: Map< + CompilationJob, + ReadonlyMap + > = new Map(); + + if (isSuccessfulBuild) { + log("Emitting artifacts of successful build"); + await Promise.all( + compilationJobs.map(async (compilationJob, i) => { + const artifactsPerFile = await this.emitArtifacts( + compilationJob, + results[i], + ); + + contractArtifactsGeneratedByCompilationJob.set( + compilationJob, + artifactsPerFile, + ); + }), + ); + } + + const resultsMap: Map = new Map(); + + for (let i = 0; i < results.length; i++) { + const compilationJob = compilationJobs[i]; + const result = results[i]; + + const contractArtifactsGenerated = isSuccessfulBuild + ? contractArtifactsGeneratedByCompilationJob.get(compilationJob) + : new Map(); + + assertHardhatInvariant( + contractArtifactsGenerated !== undefined, + "We emitted contract artifacts for all the jobs if the build was successful", + ); + + const buildId = compilationJob.getBuildId(); + + const errors = await Promise.all( + (result.errors ?? []).map((error) => + this.remapCompilerError(compilationJob, error, true), + ), + ); + + if (options?.quiet !== true) { + this.#printSolcErrorsAndWarnings(errors); + } + + const successfulResult = !this.#hasCompilationErrors(result); + + for (const [publicSourceName, root] of compilationJob.dependencyGraph + .getRoots() + .entries()) { + if (!successfulResult) { + resultsMap.set(formatRootPath(publicSourceName, root), { + type: FileBuildResultType.BUILD_FAILURE, + buildId, + errors, + }); + + continue; + } + + resultsMap.set(formatRootPath(publicSourceName, root), { + type: FileBuildResultType.BUILD_SUCCESS, + buildId, + contractArtifactsGenerated: + contractArtifactsGenerated.get(publicSourceName) ?? [], + warnings: errors, + }); + } + } + + if (options?.quiet !== true) { + if (isSuccessfulBuild) { + this.#printCompilationResult(compilationJobs); + } + } + + return resultsMap; + } + + public async getCompilationJobs( + rootFilePaths: string[], + options?: GetCompilationJobsOptions, + ): Promise> { + await this.#downloadConfiguredCompilers(options?.quiet); + + const { dependencyGraph, resolver } = await buildDependencyGraph( + rootFilePaths.toSorted(), // We sort them to have a deterministic order + this.#options.projectRoot, + this.#options.solidityConfig.remappings, + ); + + const buildProfileName = options?.buildProfile ?? DEFAULT_BUILD_PROFILE; + + log(`Using build profile ${buildProfileName}`); + + const solcConfigSelector = new SolcConfigSelector( + buildProfileName, + this.#options.solidityConfig.profiles[buildProfileName], + dependencyGraph, + ); + + let subgraphsWithConfig: Array< + [SolcConfig, DependencyGraphImplementation] + > = []; + for (const [rootFile, resolvedFile] of dependencyGraph.getRoots()) { + log( + `Building compilation job for root file ${rootFile} with source name ${resolvedFile.sourceName}`, + ); + + const subgraph = dependencyGraph.getSubgraph(rootFile); + + const configOrError = + solcConfigSelector.selectBestSolcConfigForSingleRootGraph(subgraph); + + if ("reason" in configOrError) { + return configOrError; + } + + subgraphsWithConfig.push([configOrError, subgraph]); + } + + if (options?.mergeCompilationJobs === true) { + log(`Merging compilation jobs`); + + const mergedSubgraphsByConfig: Map< + SolcConfig, + DependencyGraphImplementation + > = new Map(); + + // Note: This is merging solc config object identity, not a deep + // comparison. This is Hardhat v2's behavior. + for (const [solcConfig, subgraph] of subgraphsWithConfig) { + const mergedSubgraph = mergedSubgraphsByConfig.get(solcConfig); + + if (mergedSubgraph === undefined) { + mergedSubgraphsByConfig.set(solcConfig, subgraph); + } else { + mergedSubgraphsByConfig.set( + solcConfig, + mergedSubgraph.merge(subgraph), + ); + } + } + + subgraphsWithConfig = [...mergedSubgraphsByConfig.entries()]; + } + + const solcVersionToLongVersion = new Map(); + + const compilationJobsPerFile = new Map(); + for (const [solcConfig, subgraph] of subgraphsWithConfig) { + let solcLongVersion = solcVersionToLongVersion.get(solcConfig.version); + + if (solcLongVersion === undefined) { + const compiler = await getCompiler(solcConfig.version); + solcLongVersion = compiler.longVersion; + solcVersionToLongVersion.set(solcConfig.version, solcLongVersion); + } + + const compilationJob = new CompilationJobImplementation( + subgraph, + solcConfig, + solcLongVersion, + resolver.getRemappings(), // TODO: Only get the ones relevant to the subgraph? + ); + + for (const [publicSourceName, root] of subgraph.getRoots().entries()) { + compilationJobsPerFile.set( + formatRootPath(publicSourceName, root), + compilationJob, + ); + } + } + + return compilationJobsPerFile; + } + + public async runCompilationJob( + compilationJob: CompilationJob, + options?: RunCompilationJobOptions, + ): Promise { + await this.#downloadConfiguredCompilers(options?.quiet); + + let numberOfFiles = 0; + for (const _ of compilationJob.dependencyGraph.getAllFiles()) { + numberOfFiles++; + } + + const numberOfRootFiles = compilationJob.dependencyGraph.getRoots().size; + + const compiler = await getCompiler(compilationJob.solcConfig.version); + + log( + `Compiling ${numberOfRootFiles} root files and ${numberOfFiles - numberOfRootFiles} dependency files with solc ${compilationJob.solcConfig.version} using ${compiler.compilerPath}`, + ); + + assertHardhatInvariant( + compilationJob.solcLongVersion === compiler.longVersion, + "The long version of the compiler should match the long version of the compilation job", + ); + + return compiler.compile(compilationJob.getSolcInput()); + } + + public async remapCompilerError( + compilationJob: CompilationJob, + error: CompilerOutputError, + shouldShortenPaths: boolean = false, + ): Promise { + return { + type: error.type, + component: error.component, + message: error.message, + severity: error.severity, + errorCode: error.errorCode, + formattedMessage: error.formattedMessage?.replace( + /(-->\s+)([^\s:\n]+)/g, + (_match, prefix, sourceName) => { + const file = + compilationJob.dependencyGraph.getFileBySourceName(sourceName); + + if (file === undefined) { + return `${prefix}${sourceName}`; + } + + const replacement = shouldShortenPaths + ? shortenPath(file.fsPath) + : file.fsPath; + + return `${prefix}${replacement}`; + }, + ), + }; + } + + public async emitArtifacts( + compilationJob: CompilationJob, + compilerOutput: CompilerOutput, + ): Promise> { + const result = new Map(); + + // We emit the artifacts for each root file, first emitting one artifact + // for each contract, and then one declaration file for the entire file, + // which defines their types and augments the ArtifactMap type. + for (const [publicSourceName, root] of compilationJob.dependencyGraph + .getRoots() + .entries()) { + const fileFolder = path.join( + this.#options.artifactsPath, + publicSourceName, + ); + + // If the folder exists, we remove it first, as we don't want to leave + // any old artifacts there. + await remove(fileFolder); + + const contracts = compilerOutput.contracts?.[root.sourceName]; + const paths: string[] = []; + const artifacts: Artifact[] = []; + + // This can be undefined if no contract is present in the source file + if (contracts !== undefined) { + for (const [contractName, contract] of Object.entries(contracts)) { + const contractArtifactPath = path.join( + fileFolder, + `${contractName}.json`, + ); + + const artifact = getContractArtifact( + compilationJob.getBuildId(), + publicSourceName, + root.sourceName, + contractName, + contract, + ); + + await writeUtf8File( + contractArtifactPath, + JSON.stringify(artifact, undefined, 2), + ); + + paths.push(contractArtifactPath); + artifacts.push(artifact); + } + } + + result.set(publicSourceName, paths); + + const artifactsDeclarationFilePath = path.join( + fileFolder, + "artifacts.d.ts", + ); + + const artifactsDeclarationFile = getArtifactsDeclarationFile(artifacts); + + await writeUtf8File( + artifactsDeclarationFilePath, + artifactsDeclarationFile, + ); + } + + // Once we have emitted all the contract artifacts and its declaration + // file, we emit the build info file and its output file. + const buildInfoId = compilationJob.getBuildId(); + + const buildInfoPath = path.join( + this.#options.artifactsPath, + `build-info`, + `${buildInfoId}.json`, + ); + + const buildInfoOutputPath = path.join( + this.#options.artifactsPath, + `build-info`, + `${buildInfoId}.output.json`, + ); + + // BuildInfo and BuildInfoOutput files are large, so we write them + // concurrently, and keep their lifetimes sperated and small. + await Promise.all([ + (async () => { + const buildInfo = getBuildInfo(compilationJob); + + await writeUtf8File( + buildInfoPath, + // TODO: Maybe formatting the build info is slow, but it's mostly + // strings, so it probably shouldn't be a problem. + JSON.stringify(buildInfo, undefined, 2), + ); + })(), + (async () => { + const buildInfoOutput = getBuildInfoOutput( + compilationJob, + compilerOutput, + ); + + await writeUtf8File( + buildInfoOutputPath, + JSON.stringify(buildInfoOutput), + ); + })(), + ]); + + return result; + } + + public async cleanupArtifacts(): Promise { + log(`Cleaning up artifacts`); + + const rootFilePaths = await this.getRootFilePaths(); + + const publicSourceNames = rootFilePaths.map((rootFilePath) => { + const parsed = parseRootPath(rootFilePath); + return isNpmParsedRootPath(parsed) + ? parsed.npmPath + : path.relative(this.#options.projectRoot, parsed.fsPath); + }); + + const publicSourceNamesSet = new Set(publicSourceNames); + + for (const file of await getAllDirectoriesMatching( + this.#options.artifactsPath, + (d) => d.endsWith(".sol"), + )) { + const relativePath = path.relative(this.#options.artifactsPath, file); + + if (!publicSourceNamesSet.has(relativePath)) { + await remove(file); + } + } + + const artifactPaths = await getAllFilesMatching( + this.#options.artifactsPath, + (f) => + !f.startsWith(path.join(this.#options.artifactsPath, "build-info")) && + f.endsWith(".json"), + ); + + const reachableBuildInfoIds = await Promise.all( + artifactPaths.map(async (artifactPath) => { + const artifact: Artifact = await readJsonFile(artifactPath); + return artifact.buildInfoId; + }), + ); + + const reachableBuildInfoIdsSet = new Set( + reachableBuildInfoIds.filter((id) => id !== undefined), + ); + + // Get all the reachable build info files + const buildInfoFiles = await getAllFilesMatching( + this.#options.artifactsPath, + (f) => f.startsWith(path.join(this.#options.artifactsPath, "build-info")), + ); + + for (const buildInfoFile of buildInfoFiles) { + const basename = path.basename(buildInfoFile); + + const id = basename.substring(0, basename.indexOf(".")); + + if (!reachableBuildInfoIdsSet.has(id)) { + await remove(buildInfoFile); + } + } + + // Get duplicated contract names + const artifactNameCounts = new Map(); + for (const artifactPath of artifactPaths) { + const basename = path.basename(artifactPath); + const name = basename.substring(0, basename.indexOf(".")); + + let count = artifactNameCounts.get(name); + if (count === undefined) { + count = 0; + } + + artifactNameCounts.set(name, count + 1); + } + + const duplicatedNames = [...artifactNameCounts.entries()] + .filter(([_, count]) => count > 1) + .map(([name, _]) => name); + + const duplicatedContractNamesDeclarationFilePath = path.join( + this.#options.artifactsPath, + "artifacts.d.ts", + ); + + await writeUtf8File( + duplicatedContractNamesDeclarationFilePath, + getDuplicatedContractNamesDeclarationFile(duplicatedNames), + ); + } + + public async compileBuildInfo( + _buildInfo: SolidityBuildInfo, + _options?: CompileBuildInfoOptions, + ): Promise { + // TODO: Download the buildinfo compiler version + assertHardhatInvariant(false, "Method not implemented."); + } + + async #downloadConfiguredCompilers(quiet = false): Promise { + if (this.#downloadedCompilers) { + return; + } + + await downloadConfiguredCompilers(this.#getAllCompilerVersions(), quiet); + this.#downloadedCompilers = true; + } + + #getAllCompilerVersions(): Set { + return new Set( + Object.values(this.#options.solidityConfig.profiles) + .map((profile) => [ + ...profile.compilers.map((compiler) => compiler.version), + ...Object.values(profile.overrides).map( + (override) => override.version, + ), + ]) + .flat(1), + ); + } + + #isConsoleLogError(error: CompilerOutputError): boolean { + const message = error.message; + + return ( + error.type === "TypeError" && + typeof message === "string" && + message.includes("log") && + message.includes("type(library console)") + ); + } + + #hasCompilationErrors(output: CompilerOutput): boolean { + return output.errors?.some((x: any) => x.severity === "error") ?? false; + } + + /** + * This function returns a properly formatted Internal Compiler Error message. + * + * This is present due to a bug in Solidity. See: https://github.com/ethereum/solidity/issues/9926 + * + * If the error is not an ICE, or if it's properly formatted, this function returns undefined. + */ + #getFormattedInternalCompilerErrorMessage( + error: CompilerOutputError, + ): string | undefined { + if (error.formattedMessage?.trim() !== "InternalCompilerError:") { + return; + } + + // We trim any final `:`, as we found some at the end of the error messages, + // and then trim just in case a blank space was left + return `${error.type}: ${error.message}`.replace(/[:\s]*$/g, "").trim(); + } + + #printSolcErrorsAndWarnings(errors?: CompilerOutputError[]): void { + if (errors === undefined) { + return; + } + + for (const error of errors) { + if (error.severity === "error") { + const errorMessage: string = + this.#getFormattedInternalCompilerErrorMessage(error) ?? + error.formattedMessage ?? + error.message; + + console.error(errorMessage.replace(/^\w+:/, (t) => chalk.red.bold(t))); + } else { + console.warn( + (error.formattedMessage ?? error.message).replace(/^\w+:/, (t) => + chalk.yellow.bold(t), + ), + ); + } + } + + const hasConsoleErrors: boolean = errors.some((e) => + this.#isConsoleLogError(e), + ); + + if (hasConsoleErrors) { + console.error( + chalk.red( + `The console.log call you made isn't supported. See https://hardhat.org/console-log for the list of supported methods.`, + ), + ); + console.log(); + } + } + + #printCompilationResult(compilationJobs: CompilationJob[]) { + const jobsPerVersionAndEvmVersion = new Map< + string, + Map + >(); + + for (const job of compilationJobs) { + const solcVersion = job.solcConfig.version; + const evmVersion = + job.getSolcInput().settings.evmVersion ?? + `Check solc ${solcVersion}'s doc for its default evm version`; + + let jobsPerVersion = jobsPerVersionAndEvmVersion.get(solcVersion); + if (jobsPerVersion === undefined) { + jobsPerVersion = new Map(); + jobsPerVersionAndEvmVersion.set(solcVersion, jobsPerVersion); + } + + let jobsPerEvmVersion = jobsPerVersion.get(evmVersion); + if (jobsPerEvmVersion === undefined) { + jobsPerEvmVersion = []; + jobsPerVersion.set(evmVersion, jobsPerEvmVersion); + } + + jobsPerEvmVersion.push(job); + } + + for (const solcVersion of [...jobsPerVersionAndEvmVersion.keys()].sort()) { + /* eslint-disable-next-line @typescript-eslint/no-non-null-assertion -- + This is a valid key, just sorted */ + const jobsPerEvmVersion = jobsPerVersionAndEvmVersion.get(solcVersion)!; + + for (const evmVersion of [...jobsPerEvmVersion.keys()].sort()) { + /* eslint-disable-next-line @typescript-eslint/no-non-null-assertion -- + This is a valid key, just sorted */ + const jobs = jobsPerEvmVersion.get(evmVersion)!; + + const rootFiles = jobs.reduce( + (count, job) => count + job.dependencyGraph.getRoots().size, + 0, + ); + + console.log( + `Compiled ${rootFiles} Solidity ${pluralize( + "file", + rootFiles, + )} with solc ${solcVersion} (evm target: ${evmVersion})`, + ); + } + } + } +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compilation-job.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compilation-job.ts new file mode 100644 index 0000000000..268bf15fa4 --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compilation-job.ts @@ -0,0 +1,127 @@ +import type { DependencyGraphImplementation } from "./dependency-graph.js"; +import type { Remapping } from "./resolver/types.js"; +import type { BuildInfo } from "../../../../types/artifacts.js"; +import type { SolcConfig } from "../../../../types/config.js"; +import type { CompilationJob } from "../../../../types/solidity/compilation-job.js"; +import type { CompilerInput } from "../../../../types/solidity/compiler-io.js"; +import type { DependencyGraph } from "../../../../types/solidity/dependency-graph.js"; + +import { createNonCryptographicHashId } from "@ignored/hardhat-vnext-utils/crypto"; + +import { formatRemapping } from "./resolver/remappings.js"; +import { getEvmVersionFromSolcVersion } from "./solc-info.js"; + +export class CompilationJobImplementation implements CompilationJob { + public readonly dependencyGraph: DependencyGraph; + public readonly solcConfig: SolcConfig; + public readonly solcLongVersion: string; + + readonly #remappings: Remapping[]; + + #buildId: string | undefined; + #solcInput: CompilerInput | undefined; + + constructor( + dependencyGraph: DependencyGraphImplementation, + solcConfig: SolcConfig, + solcLongVersion: string, + remappings: Remapping[], + ) { + this.dependencyGraph = dependencyGraph; + this.solcConfig = solcConfig; + this.solcLongVersion = solcLongVersion; + this.#remappings = remappings; + } + + public getSolcInput(): CompilerInput { + if (this.#solcInput === undefined) { + this.#solcInput = this.#buildSolcInput(); + } + + return this.#solcInput; + } + + public getBuildId(): string { + if (this.#buildId === undefined) { + this.#buildId = this.#computeBuildId(); + } + + return this.#buildId; + } + + #buildSolcInput(): CompilerInput { + const sources: { [sourceName: string]: { content: string } } = {}; + + // we sort the files so that we always get the same compilation input + const resolvedFiles = [...this.dependencyGraph.getAllFiles()].sort((a, b) => + a.sourceName.localeCompare(b.sourceName), + ); + + for (const file of resolvedFiles) { + sources[file.sourceName] = { + content: file.content.text, + }; + } + + const settings = this.solcConfig.settings; + + const rootsOutputSelection: CompilerInput["settings"]["outputSelection"] = + Object.fromEntries( + [...this.dependencyGraph.getRoots().values()] + .sort((a, b) => a.sourceName.localeCompare(b.sourceName)) + .map((root) => [ + root.sourceName, + { + "*": [ + "abi", + "evm.bytecode", + "evm.deployedBytecode", + "evm.methodIdentifiers", + "metadata", + ], + }, + ]), + ); + + const defaultOutputSelection: CompilerInput["settings"]["outputSelection"] = + { + "*": { + "": ["ast"], + }, + ...rootsOutputSelection, + }; + + // TODO: Deep merge the user output selection with the default one + const outputSelection = defaultOutputSelection; + + return { + language: "Solidity", + sources, + settings: { + ...settings, + evmVersion: + settings.evmVersion ?? + getEvmVersionFromSolcVersion(this.solcConfig.version), + outputSelection, + remappings: this.#remappings.map(formatRemapping), + }, + }; + } + + #computeBuildId(): string { + // NOTE: We type it this way so that this stop compiling if we ever change + // the format of the BuildInfo type. + const format: BuildInfo["_format"] = "hh3-sol-build-info-1"; + + // The preimage should include all the information that makes this + // compilation job unique, and as this is used to identify the build info + // file, it also includes its format string. + const preimage = + format + + this.solcLongVersion + + JSON.stringify(this.getSolcInput()) + + JSON.stringify(this.solcConfig); + + return createNonCryptographicHashId(preimage); + } +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/compiler.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/compiler.ts new file mode 100644 index 0000000000..bae37b9406 --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/compiler.ts @@ -0,0 +1,144 @@ +import type { + CompilerInput, + CompilerOutput, +} from "../../../../../types/solidity/compiler-io.js"; + +import { execFile } from "node:child_process"; +import os from "node:os"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; + +import { + HardhatError, + assertHardhatInvariant, +} from "@ignored/hardhat-vnext-errors"; +import { ensureError } from "@ignored/hardhat-vnext-utils/error"; +import { mkdir } from "@ignored/hardhat-vnext-utils/fs"; +import * as semver from "semver"; + +export interface Compiler { + readonly version: string; + readonly longVersion: string; + readonly compilerPath: string; + readonly isSolcJs: boolean; + + compile(input: CompilerInput): Promise; +} + +const COMPILATION_SUBPROCESS_IO_BUFFER_SIZE = 1024 * 1024 * 500; + +export class SolcJsCompiler implements Compiler { + public readonly isSolcJs = true; + + constructor( + public readonly version: string, + public readonly longVersion: string, + public readonly compilerPath: string, + ) {} + + public async compile(input: CompilerInput): Promise { + const scriptPath = fileURLToPath(import.meta.resolve("./solcjs-runner.js")); + + // If the script is a TypeScript file, we need to pass the --import tsx/esm + // which is available, as we are running the tests + const nodeOptions = scriptPath.endsWith(".ts") + ? ["--import", "tsx/esm"] + : []; + + const output: string = await new Promise((resolve, reject) => { + try { + const subprocess = execFile( + process.execPath, + [...nodeOptions, scriptPath, this.compilerPath], + { + maxBuffer: COMPILATION_SUBPROCESS_IO_BUFFER_SIZE, + }, + (err, stdout) => { + if (err !== null) { + return reject(err); + } + resolve(stdout); + }, + ); + + assertHardhatInvariant( + subprocess.stdin !== null, + "process.stdin should be defined", + ); + + subprocess.stdin.write(JSON.stringify(input)); + subprocess.stdin.end(); + } catch (e) { + ensureError(e); + + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.CANT_RUN_SOLCJS_COMPILER, + e, + ); + } + }); + + return JSON.parse(output); + } +} + +export class NativeCompiler implements Compiler { + public readonly isSolcJs = false; + + constructor( + public readonly version: string, + public readonly longVersion: string, + public readonly compilerPath: string, + ) {} + + public async compile(input: CompilerInput): Promise { + const args = ["--standard-json"]; + + // Logic to make sure that solc default import callback is not being used. + // If solcVersion is not defined or <= 0.6.8, do not add extra args. + if (this.version !== undefined) { + if (semver.gte(this.version, "0.8.22")) { + // version >= 0.8.22 + args.push("--no-import-callback"); + } else if (semver.gte(this.version, "0.6.9")) { + // version >= 0.6.9 + const tmpFolder = path.join(os.tmpdir(), "hardhat-solc"); + await mkdir(tmpFolder); + args.push(`--base-path`); + args.push(tmpFolder); + } + } + + const output: string = await new Promise((resolve, reject) => { + try { + const process = execFile( + this.compilerPath, + args, + { + maxBuffer: COMPILATION_SUBPROCESS_IO_BUFFER_SIZE, + }, + (err, stdout) => { + if (err !== null) { + return reject(err); + } + resolve(stdout); + }, + ); + + assertHardhatInvariant(process.stdin !== null, "process.stdin is null"); + + process.stdin.write(JSON.stringify(input)); + process.stdin.end(); + } catch (e) { + ensureError(e); + + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.CANT_RUN_NATIVE_COMPILER, + e, + ); + } + }); + + return JSON.parse(output); + } +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/downloader.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/downloader.ts new file mode 100644 index 0000000000..cfaa15b1d4 --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/downloader.ts @@ -0,0 +1,404 @@ +import type { Compiler } from "./compiler.js"; + +import { execFile } from "node:child_process"; +import os from "node:os"; +import path from "node:path"; +import { promisify } from "node:util"; + +import { + HardhatError, + assertHardhatInvariant, +} from "@ignored/hardhat-vnext-errors"; +import { bytesToHexString } from "@ignored/hardhat-vnext-utils/bytes"; +import { keccak256 } from "@ignored/hardhat-vnext-utils/crypto"; +import { ensureError } from "@ignored/hardhat-vnext-utils/error"; +import { + chmod, + createFile, + ensureDir, + exists, + readBinaryFile, + readJsonFile, + remove, +} from "@ignored/hardhat-vnext-utils/fs"; +import { download } from "@ignored/hardhat-vnext-utils/request"; +import debug from "debug"; + +import { NativeCompiler, SolcJsCompiler } from "./compiler.js"; + +const log = debug("hardhat:solidity:downloader"); + +const COMPILER_REPOSITORY_URL = "https://binaries.soliditylang.org"; + +export enum CompilerPlatform { + LINUX = "linux-amd64", + WINDOWS = "windows-amd64", + MACOS = "macosx-amd64", + WASM = "wasm", +} + +interface CompilerBuild { + path: string; + version: string; + build: string; + longVersion: string; + keccak256: string; + urls: string[]; + platform: CompilerPlatform; +} + +interface CompilerList { + builds: CompilerBuild[]; + releases: { [version: string]: string }; + latestRelease: string; +} + +/** + * A compiler downloader which must be specialized per-platform. It can't and + * shouldn't support multiple platforms at the same time. + * + * This is expected to be used like this: + * 1. First, the downloader is created for the given platform. + * 2. Then, call `downloader.updateCompilerListIfNeeded(versionsToUse)` to + * update the compiler list if one of the versions is not found. + * 3. Then, call `downloader.isCompilerDownloaded()` to check if the + * compiler is already downloaded. + * 4. If the compiler is not downloaded, call + * `downloader.downloadCompiler()` to download it. + * 5. Finally, call `downloader.getCompiler()` to get the compiler. + * + * Important things to note: + * 1. If a compiler version is not found, this downloader may fail. + * 1.1.1 If a user tries to download a new compiler before X amount of time + * has passed since its release, they may need to clean the cache, as + * indicated in the error messages. + */ +export interface CompilerDownloader { + /** + * Updates the compiler list if any of the versions is not found in the + * currently downloaded list, or if none has been downlaoded yet. + */ + updateCompilerListIfNeeded(versions: Set): Promise; + + /** + * Returns true if the compiler has been downloaded. + * + * This function access the filesystem, but doesn't modify it. + */ + isCompilerDownloaded(version: string): Promise; + + /** + * Downloads the compiler for a given version, which can later be obtained + * with getCompiler. + * + * @returns `true` if the compiler was downloaded and verified correctly, + * including validating the checksum and if the native compiler can be run. + */ + downloadCompiler(version: string): Promise; + + /** + * Returns the compiler, which MUST be downloaded before calling this function. + * + * Returns undefined if the compiler has been downloaded but can't be run. + * + * This function access the filesystem, but doesn't modify it. + */ + getCompiler(version: string): Promise; +} + +/** + * Default implementation of CompilerDownloader. + */ +export class CompilerDownloaderImplementation implements CompilerDownloader { + public static getCompilerPlatform(): CompilerPlatform { + // TODO: This check is seriously wrong. It doesn't take into account + // the architecture nor the toolchain. This should check the triplet of + // system instead (see: https://wiki.osdev.org/Target_Triplet). + // + // The only reason this downloader works is that it validates if the + // binaries actually run. + // + // On top of that, AppleSillicon with Rosetta2 makes things even more + // complicated, as it allows x86 binaries to run on ARM, not on MacOS but + // on Linux Docker containers too! + switch (os.platform()) { + case "win32": + return CompilerPlatform.WINDOWS; + case "linux": + return CompilerPlatform.LINUX; + case "darwin": + return CompilerPlatform.MACOS; + default: + return CompilerPlatform.WASM; + } + } + + readonly #platform: CompilerPlatform; + readonly #compilersDir: string; + readonly #downloadFunction: typeof download; + + /** + * Use CompilerDownloader.getConcurrencySafeDownloader instead + */ + constructor( + platform: CompilerPlatform, + compilersDir: string, + downloadFunction: typeof download = download, + ) { + this.#platform = platform; + this.#compilersDir = compilersDir; + this.#downloadFunction = downloadFunction; + } + + public async updateCompilerListIfNeeded( + versions: Set, + ): Promise { + if (await this.#shouldDownloadCompilerList(versions)) { + try { + log( + `Downloading the list of solc builds for platform ${this.#platform}`, + ); + await this.#downloadCompilerList(); + } catch (e) { + ensureError(e); + + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.VERSION_LIST_DOWNLOAD_FAILED, + e, + ); + } + } + } + + public async isCompilerDownloaded(version: string): Promise { + const build = await this.#getCompilerBuild(version); + + const downloadPath = this.#getCompilerBinaryPathFromBuild(build); + + return exists(downloadPath); + } + + public async downloadCompiler(version: string): Promise { + const isCompilerDownloaded = await this.isCompilerDownloaded(version); + + if (isCompilerDownloaded === true) { + return true; + } + + const build = await this.#getCompilerBuild(version); + + let downloadPath: string; + try { + downloadPath = await this.#downloadCompiler(build); + } catch (e) { + ensureError(e); + + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.DOWNLOAD_FAILED, + { + remoteVersion: build.longVersion, + }, + e, + ); + } + + const verified = await this.#verifyCompilerDownload(build, downloadPath); + if (!verified) { + throw new HardhatError(HardhatError.ERRORS.SOLIDITY.INVALID_DOWNLOAD, { + remoteVersion: build.longVersion, + }); + } + + return this.#postProcessCompilerDownload(build, downloadPath); + } + + public async getCompiler(version: string): Promise { + const build = await this.#getCompilerBuild(version); + + assertHardhatInvariant( + build !== undefined, + `Trying to get a compiler ${version} before it was downloaded`, + ); + + const compilerPath = this.#getCompilerBinaryPathFromBuild(build); + + assertHardhatInvariant( + await exists(compilerPath), + `Trying to get a compiler ${version} before it was downloaded`, + ); + + if (await exists(this.#getCompilerDoesntWorkFile(build))) { + return undefined; + } + + if (this.#platform === CompilerPlatform.WASM) { + return new SolcJsCompiler(version, build.longVersion, compilerPath); + } + + return new NativeCompiler(version, build.longVersion, compilerPath); + } + + async #getCompilerBuild(version: string): Promise { + const listPath = this.#getCompilerListPath(); + assertHardhatInvariant( + await exists(listPath), + `Trying to get the compiler list for ${this.#platform} before it was downloaded`, + ); + + const list = await this.#readCompilerList(listPath); + + const build = list.builds.find((b) => b.version === version); + + if (build === undefined) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.INVALID_SOLC_VERSION, + { + version, + }, + ); + } + + return build; + } + + #getCompilerListPath(): string { + return path.join(this.#compilersDir, this.#platform, "list.json"); + } + + async #readCompilerList(listPath: string): Promise { + return readJsonFile(listPath); + } + + #getCompilerDownloadPathFromBuild(build: CompilerBuild): string { + return path.join(this.#compilersDir, this.#platform, build.path); + } + + #getCompilerBinaryPathFromBuild(build: CompilerBuild): string { + const downloadPath = this.#getCompilerDownloadPathFromBuild(build); + + if ( + this.#platform !== CompilerPlatform.WINDOWS || + !downloadPath.endsWith(".zip") + ) { + return downloadPath; + } + + return path.join(this.#compilersDir, build.version, "solc.exe"); + } + + #getCompilerDoesntWorkFile(build: CompilerBuild): string { + return `${this.#getCompilerBinaryPathFromBuild(build)}.does.not.work`; + } + + async #shouldDownloadCompilerList(versions: Set): Promise { + const listPath = this.#getCompilerListPath(); + + log( + `Checking if the compiler list for ${this.#platform} should be downloaded at ${listPath}`, + ); + + if (!(await exists(listPath))) { + return true; + } + + const list = await this.#readCompilerList(listPath); + + const listVersions = new Set(list.builds.map((b) => b.version)); + + for (const version of versions) { + if (!listVersions.has(version)) { + // TODO: We should also check if it wasn't downloaded soon ago + return true; + } + } + + return false; + } + + async #downloadCompilerList(): Promise { + log(`Downloading compiler list for platform ${this.#platform}`); + const url = `${COMPILER_REPOSITORY_URL}/${this.#platform}/list.json`; + const downloadPath = this.#getCompilerListPath(); + + await this.#downloadFunction(url, downloadPath); + } + + async #downloadCompiler(build: CompilerBuild): Promise { + log(`Downloading compiler ${build.longVersion}`); + const url = `${COMPILER_REPOSITORY_URL}/${this.#platform}/${build.path}`; + const downloadPath = this.#getCompilerDownloadPathFromBuild(build); + + await this.#downloadFunction(url, downloadPath); + + return downloadPath; + } + + async #verifyCompilerDownload( + build: CompilerBuild, + downloadPath: string, + ): Promise { + const expectedKeccak256 = build.keccak256; + const compiler = await readBinaryFile(downloadPath); + + const compilerKeccak256 = bytesToHexString(await keccak256(compiler)); + + if (expectedKeccak256 !== compilerKeccak256) { + await remove(downloadPath); + return false; + } + + return true; + } + + async #postProcessCompilerDownload( + build: CompilerBuild, + downloadPath: string, + ): Promise { + if (this.#platform === CompilerPlatform.WASM) { + return true; + } + + if ( + this.#platform === CompilerPlatform.LINUX || + this.#platform === CompilerPlatform.MACOS + ) { + await chmod(downloadPath, 0o755); + } else if ( + this.#platform === CompilerPlatform.WINDOWS && + downloadPath.endsWith(".zip") + ) { + // some window builds are zipped, some are not + const { default: AdmZip } = await import("adm-zip"); + + const solcFolder = path.join(this.#compilersDir, build.version); + await ensureDir(solcFolder); + + const zip = new AdmZip(downloadPath); + zip.extractAllTo(solcFolder); + } + + log("Checking native solc binary"); + const nativeSolcWorks = await this.#checkNativeSolc(build); + + if (nativeSolcWorks) { + return true; + } + + await createFile(this.#getCompilerDoesntWorkFile(build)); + + return false; + } + + async #checkNativeSolc(build: CompilerBuild): Promise { + const solcPath = this.#getCompilerBinaryPathFromBuild(build); + const execFileP = promisify(execFile); + + try { + await execFileP(solcPath, ["--version"]); + return true; + } catch { + log(`solc binary at ${solcPath} is not working`); + return false; + } + } +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/index.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/index.ts new file mode 100644 index 0000000000..44da160b77 --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/index.ts @@ -0,0 +1,107 @@ +import type { Compiler } from "./compiler.js"; + +import path from "node:path"; + +import { + assertHardhatInvariant, + HardhatError, +} from "@ignored/hardhat-vnext-errors"; +import { getCacheDir } from "@ignored/hardhat-vnext-utils/global-dir"; + +import { + CompilerDownloaderImplementation, + CompilerPlatform, +} from "./downloader.js"; + +async function getGlobalCompilersCacheDir(): Promise { + const globalCompilersCacheDir = await getCacheDir(); + + return path.join(globalCompilersCacheDir, "compilers-v3"); +} + +export async function downloadConfiguredCompilers( + versions: Set, + quiet: boolean, +): Promise { + const platform = CompilerDownloaderImplementation.getCompilerPlatform(); + + const mainCompilerDownloader = new CompilerDownloaderImplementation( + platform, + await getGlobalCompilersCacheDir(), + ); + + await mainCompilerDownloader.updateCompilerListIfNeeded(versions); + + const wasmCompilersToGet = new Set(); + + for (const version of versions) { + if (!(await mainCompilerDownloader.isCompilerDownloaded(version))) { + if (!quiet) { + console.log(`Downloading solc ${version}`); + } + + const success = await mainCompilerDownloader.downloadCompiler(version); + + if (!success) { + wasmCompilersToGet.add(version); + + if (!quiet) { + console.log(`Download failed for solc ${version}`); + } + } + } + } + + if (platform === CompilerPlatform.WASM || wasmCompilersToGet.size === 0) { + return; + } + + const wasmCompilerDownloader = new CompilerDownloaderImplementation( + CompilerPlatform.WASM, + await getGlobalCompilersCacheDir(), + ); + + await wasmCompilerDownloader.updateCompilerListIfNeeded(wasmCompilersToGet); + + for (const version of wasmCompilersToGet) { + if (!quiet) { + console.log(`Downloading solc ${version} (WASM build)`); + } + + const success = await wasmCompilerDownloader.downloadCompiler(version); + + if (!success) { + throw new HardhatError(HardhatError.ERRORS.SOLIDITY.DOWNLOAD_FAILED, { + remoteVersion: version, + }); + } + } +} + +export async function getCompiler(version: string): Promise { + const platform = CompilerDownloaderImplementation.getCompilerPlatform(); + const compilerDownloader = new CompilerDownloaderImplementation( + platform, + await getGlobalCompilersCacheDir(), + ); + + const compiler = await compilerDownloader.getCompiler(version); + + if (compiler !== undefined) { + return compiler; + } + + const wasmCompilerDownloader = new CompilerDownloaderImplementation( + CompilerPlatform.WASM, + await getGlobalCompilersCacheDir(), + ); + + const wasmCompiler = await wasmCompilerDownloader.getCompiler(version); + + assertHardhatInvariant( + wasmCompiler !== undefined, + `WASM build of solc ${version} isn't working`, + ); + + return wasmCompiler; +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/solcjs-runner.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/solcjs-runner.ts new file mode 100644 index 0000000000..51e215e106 --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/compiler/solcjs-runner.ts @@ -0,0 +1,39 @@ +async function readStream( + stream: NodeJS.ReadStream, + encoding: BufferEncoding = "utf8", +) { + stream.setEncoding(encoding); + + return new Promise((resolve, reject) => { + let data = ""; + + stream.on("data", (chunk) => (data += chunk.toString(encoding))); + stream.on("end", () => resolve(data)); + stream.on("error", (error) => reject(error)); + }); +} + +async function getSolcJs(solcJsPath: string) { + /* eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- + We cast to string because it doesn't have types, and otherwise TS complains */ + const { default: solcWrapper } = await import("solc/wrapper.js" as string); + const { default: solc } = await import(solcJsPath); + + return solcWrapper(solc); +} + +async function main() { + const input = await readStream(process.stdin); + + const solcjsPath = process.argv[2]; + const solc = await getSolcJs(solcjsPath); + + const output = solc.compile(input); + + console.log(output); +} + +main().catch((error: unknown) => { + console.error(error); + process.exitCode = 1; +}); diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/debug-utils.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/debug-utils.ts new file mode 100644 index 0000000000..537859c13d --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/debug-utils.ts @@ -0,0 +1,80 @@ +import type { Resolver } from "./resolver/types.js"; +import type { DependencyGraph } from "../../../../types/solidity/dependency-graph.js"; + +import { ResolvedFileType } from "../../../../types/solidity/resolved-file.js"; + +import { formatRemapping } from "./resolver/remappings.js"; + +export function printDependencyGraphAndRemappingsSummary( + dependencyGraph: DependencyGraph, + resolver?: Resolver, +): void { + const roots = dependencyGraph.getRoots(); + const rootFiles = new Set(roots.values()); + const allFiles = dependencyGraph.getAllFiles(); + + const rootRepresentations: string[] = []; + + for (const [rootFile, resolvedFile] of roots.entries()) { + if (resolvedFile.type === ResolvedFileType.NPM_PACKGE_FILE) { + rootRepresentations.push(`- ${rootFile} -> ${resolvedFile.sourceName} + ${resolvedFile.fsPath}`); + } else { + rootRepresentations.push(`- ${resolvedFile.sourceName} + ${resolvedFile.fsPath}`); + } + } + + console.log(`Printing dependency graph and remappings summary`); + + console.log(` +Roots: + ${rootRepresentations.join("\n ")} +`); + + const otherFiles = [...allFiles].filter((file) => !rootFiles.has(file)); + + if (otherFiles.length > 0) { + console.log(` +Other files: + ${otherFiles + .map((file) => `- ` + file.sourceName + `\n ` + file.fsPath) + .join("\n ")} +`); + } + + const files = [...[...rootFiles].toSorted(), ...[...otherFiles].toSorted()]; + const dependencies: string[] = []; + + for (const file of files) { + const dependenciesForFile = [...dependencyGraph.getDependencies(file)] + .map((d) => d.sourceName) + .sort(); + + for (const dependency of dependenciesForFile) { + dependencies.push(`- ${file.sourceName} -> ${dependency}`); + } + } + + if (dependencies.length > 0) { + console.log(` +Dependencies: + ${dependencies.join("\n ")} +`); + } + + if (resolver === undefined) { + return; + } + + const remappings = resolver.getRemappings(); + + if (remappings.length > 0) { + console.log(` +Remappings: + ${remappings.map((r) => `- ${formatRemapping(r)}`).join("\n ")} +`); + + console.log("\n\n"); + } +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/dependency-graph-building.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/dependency-graph-building.ts new file mode 100644 index 0000000000..113109741c --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/dependency-graph-building.ts @@ -0,0 +1,57 @@ +import type { Resolver } from "./resolver/types.js"; +import type { ResolvedFile } from "../../../../types/solidity/resolved-file.js"; + +import { DependencyGraphImplementation } from "./dependency-graph.js"; +import { ResolverImplementation } from "./resolver/dependency-resolver.js"; +import { isNpmParsedRootPath, parseRootPath } from "./root-paths-utils.js"; + +export async function buildDependencyGraph( + rootFiles: string[], + projectRoot: string, + userRemappings: string[], +): Promise<{ + dependencyGraph: DependencyGraphImplementation; + resolver: Resolver; +}> { + const resolver = await ResolverImplementation.create( + projectRoot, + userRemappings, + ); + + const dependencyGraph = new DependencyGraphImplementation(); + + const filesToProcess: ResolvedFile[] = []; + + for (const file of rootFiles) { + let resolvedFile; + + const rootPath = parseRootPath(file); + if (isNpmParsedRootPath(rootPath)) { + resolvedFile = await resolver.resolveNpmDependencyFile(rootPath.npmPath); + dependencyGraph.addRootFile(rootPath.npmPath, resolvedFile); + } else { + resolvedFile = await resolver.resolveProjectFile(rootPath.fsPath); + dependencyGraph.addRootFile(resolvedFile.sourceName, resolvedFile); + } + + filesToProcess.push(resolvedFile); + } + + let fileToProcess; + while ((fileToProcess = filesToProcess.pop()) !== undefined) { + for (const importPath of fileToProcess.content.importPaths) { + const importedFile = await resolver.resolveImport( + fileToProcess, + importPath, + ); + + if (!dependencyGraph.hasFile(importedFile)) { + filesToProcess.push(importedFile); + } + + dependencyGraph.addDependency(fileToProcess, importedFile); + } + } + + return { dependencyGraph, resolver }; +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/dependency-graph.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/dependency-graph.ts new file mode 100644 index 0000000000..e6ddec6a8c --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/dependency-graph.ts @@ -0,0 +1,146 @@ +import type { DependencyGraph } from "../../../../types/solidity/dependency-graph.js"; +import type { ResolvedFile } from "../../../../types/solidity/resolved-file.js"; + +import { assertHardhatInvariant } from "@ignored/hardhat-vnext-errors"; + +export class DependencyGraphImplementation implements DependencyGraph { + readonly #fileBySourceName = new Map(); + readonly #rootByPublicSourceName = new Map(); + readonly #dependencies = new Map>(); + + /** + * Adds a root file to the graph. All the roots of the dependency graph must + * be added before any dependencry. + * + * @param publicSourceName The source name used to identify the file, as it + * would appear in the artifacts and used by the user. This is not always the + * same as the source name used by solc, as it differs when an npm file is + * acting as a root. + * @param root The root file. + */ + public addRootFile(publicSourceName: string, root: ResolvedFile): void { + this.#addFile(root); + this.#rootByPublicSourceName.set(publicSourceName, root); + } + + /** + * Adds a dependency from a file to another one. + * + * @param from The file that depends on another one, which must be already + * present in the graph. + * @param to The dependency, which will be added to the list of dependencies + * of the file, and addded to the graph if needed. + */ + public addDependency(from: ResolvedFile, to: ResolvedFile): void { + const dependencies = this.#dependencies.get(from); + assertHardhatInvariant( + dependencies !== undefined, + "File `from` from not present", + ); + + if (!this.hasFile(to)) { + this.#addFile(to); + } + + dependencies.add(to); + } + + /** + * Returns a map of public source names to root files. + */ + public getRoots(): ReadonlyMap { + return this.#rootByPublicSourceName; + } + + /** + * Returns a set of all the files in the graph. + */ + public getAllFiles(): Iterable { + return this.#dependencies.keys(); + } + + public hasFile(file: ResolvedFile): boolean { + return this.#dependencies.has(file); + } + + public getDependencies(file: ResolvedFile): ReadonlySet { + return this.#dependencies.get(file) ?? new Set(); + } + + public getFileBySourceName(sourceName: string): ResolvedFile | undefined { + return this.#fileBySourceName.get(sourceName); + } + + public getSubgraph( + ...rootPublicSourceNames: string[] + ): DependencyGraphImplementation { + const subgraph = new DependencyGraphImplementation(); + + const filesToTraverse: ResolvedFile[] = []; + + for (const rootPublicSourceName of rootPublicSourceNames) { + const root = this.#rootByPublicSourceName.get(rootPublicSourceName); + + assertHardhatInvariant( + root !== undefined, + "We should have a root for every root public source name", + ); + + subgraph.addRootFile(rootPublicSourceName, root); + filesToTraverse.push(root); + } + + let fileToAnalyze; + while ((fileToAnalyze = filesToTraverse.pop()) !== undefined) { + for (const dependency of this.getDependencies(fileToAnalyze)) { + if (!subgraph.hasFile(dependency)) { + filesToTraverse.push(dependency); + } + + subgraph.addDependency(fileToAnalyze, dependency); + } + } + + return subgraph; + } + + public merge( + other: DependencyGraphImplementation, + ): DependencyGraphImplementation { + const merged = new DependencyGraphImplementation(); + + for (const [publicSourceName, root] of this.#rootByPublicSourceName) { + merged.addRootFile(publicSourceName, root); + } + + for (const [publicSourceName, root] of other.#rootByPublicSourceName) { + merged.addRootFile(publicSourceName, root); + } + + for (const [from, toes] of this.#dependencies) { + for (const to of toes) { + merged.addDependency(from, to); + } + } + + for (const [from, toes] of other.#dependencies) { + for (const to of toes) { + merged.addDependency(from, to); + } + } + + return merged; + } + + #addFile(file: ResolvedFile): void { + assertHardhatInvariant(!this.hasFile(file), "File already present"); + + assertHardhatInvariant( + this.#fileBySourceName.get(file.sourceName) === undefined, + "File already present", + ); + + this.#fileBySourceName.set(file.sourceName, file); + this.#dependencies.set(file, new Set()); + } +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/resolver/dependency-resolver.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/resolver/dependency-resolver.ts new file mode 100644 index 0000000000..b8434b28c4 --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/resolver/dependency-resolver.ts @@ -0,0 +1,1520 @@ +import type { Remapping, Resolver } from "./types.js"; +import type { + ResolvedNpmPackage, + ResolvedFile, + ProjectResolvedFile, + NpmPackageResolvedFile, + FileContent, +} from "../../../../../types/solidity/resolved-file.js"; + +import path from "node:path"; + +import { + HardhatError, + assertHardhatInvariant, +} from "@ignored/hardhat-vnext-errors"; +import { ensureError } from "@ignored/hardhat-vnext-utils/error"; +import { + FileNotFoundError, + exists, + getFileTrueCase, + readJsonFile, + readUtf8File, +} from "@ignored/hardhat-vnext-utils/fs"; +import { shortenPath } from "@ignored/hardhat-vnext-utils/path"; +import { ResolutionError, resolve } from "@ignored/hardhat-vnext-utils/resolve"; +import { analyze } from "@nomicfoundation/solidity-analyzer"; + +import { ResolvedFileType } from "../../../../../types/solidity/resolved-file.js"; +import { AsyncMutex } from "../../../../core/async-mutex.js"; + +import { + applyValidRemapping, + parseRemappingString, + selectBestRemapping, +} from "./remappings.js"; + +// Things to note: +// - This resolver assumes that the root of the project is the folder with the +// closest package.json to the config file. +// - Each file system file is resolved to a single ResolvedFile, with a unique +// source name. +// - Files within the project have their relative path as their source name. +// - Files within npm packages have source names that start with `npm/` and +// contain the package name and version. e.g. `npm/package@1.2.3/path.sol`. +// - Files within npm packages that are part of a monorepo are resolved like +// npm pacakges, but with the version `local`. +// - This resolver does not support `package.json#exports`. +// - This resolver fails if an import has a casing different from that of the +// file system. +// - We do not allow users to remap the imports present in files within npm +// packages, by forbidding user remappings with context and starting with +// `npm/`. +// - We do allow users to remap imports of non-npm files, including remmapping +// them into file within npm packages. +// - Every import in an npm file is either relative or remapped by a remapping +// generated by the resolver. +// - A direct import (i.e. not relative) is considered to be local within the +// the project/package if it is a bare file name (not in a directory), or if +// the first directory exists in the project/package. +// - The direct import "hardhat/console.sol" is a special case and it is never +// considered to be local. And we only remap `hardhat/console.sol`. +// - Local imports within the project may be remapped by user remappings, but +// not by the resolver. +// - Imports into npm packages are always remapped, if not by the user, by the +// resolver. +// - Direct local improts within npm package are always remapped by the +// resolver. + +/** + * A user remapping, parsed, and with its npm package resolved, if any. + */ +interface ResolvedUserRemapping { + rawFormat: string; + context: string; + prefix: string; + target: string; + targetNpmPackage?: ResolvedNpmPackage; +} + +/** + * A sentinel value that represents the root of the Hardhat project. + */ +const PROJECT_ROOT_SENTINEL: unique symbol = Symbol(); + +export class ResolverImplementation implements Resolver { + readonly #projectRoot: string; + readonly #userRemappings: ResolvedUserRemapping[]; + + /** + * IMPORTANT: This mutex must be acquired before writing to any of the mutable + * fields of this class. We do this by using the mutex in the public methods, + * which don't call each other. + */ + readonly #mutex = new AsyncMutex(); + + /** + * A map of all the npm dependencies used in the project, and their + * dependencies. + * + * This is more complex than you may expect, as we have to handle: + * + * - The Hardhat project itself, which is treated differently than npm + * packages, and is represented by the value `PROJECT_ROOT_SENTINEL`. + * - The case where a package is installed with a different name than the + * package's name in its `package.json`. We refer to the name with which + * the package is installed as the "installation name". + * - Imports from monorepo packages into the Hardhat project. + */ + readonly #dependencyMaps: Map< + ResolvedNpmPackage | typeof PROJECT_ROOT_SENTINEL, + Map< + string, // The installation-name of the package that is being imported + ResolvedNpmPackage | typeof PROJECT_ROOT_SENTINEL // The package imported with that name + > + > = new Map(); + + /** + * A map of all the prefixes that an npm package needs to set as remappings + * to avoid being affected by a user remapping. + * + * For example, if a package `foo` has an import `import "dep/File.sol";`, + * and the user remaps `dep/=nope/`, it could break `foo`'s import. + * + * To avoid this situation we set all the prefixes that `foo` needs unaffected + * by the user remapping, with a higher presedence than user remappings. + */ + readonly #localPrefixesByPackage: Map> = + new Map(); + + /** + * We use this map to ensure that we only resolve each file once. + **/ + readonly #resolvedFileBySourceName: Map = new Map(); + + /** + * Creates a new Resolver. + * + * @param projectRoot The absolute path to the Hardhat project root. + * @param userRemappingStrings The remappings provided by the user. + * @param workingDirectory The absolute path to the working directory. + */ + public static async create( + projectRoot: string, + userRemappingStrings: string[], + ): Promise { + const userRemappings = await Promise.all( + userRemappingStrings.map((remappingString) => + validateAndResolveUserRemapping(projectRoot, remappingString), + ), + ); + + return new ResolverImplementation(projectRoot, userRemappings); + } + + private constructor( + projectRoot: string, + userRemappings: ResolvedUserRemapping[], + ) { + this.#projectRoot = projectRoot; + this.#userRemappings = userRemappings; + this.#dependencyMaps.set(PROJECT_ROOT_SENTINEL, new Map()); + } + + public async resolveProjectFile( + absoluteFilePath: string, + ): Promise { + return this.#mutex.exclusiveRun(async () => { + if (!absoluteFilePath.startsWith(this.#projectRoot)) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.RESOLVING_INCORRECT_FILE_AS_PROJECT_FILE, + { + file: shortenPath(absoluteFilePath), + }, + ); + } + + const relativeFilePath = path.relative( + this.#projectRoot, + absoluteFilePath, + ); + + // We first check if the file has already been resolved. + // + // Note that it may have recevied the right path, but with the wrong + // casing. We don't care at this point, as it would just mean a cache + // miss, and we proceed to get the right casing in that case. + // + // However, as most of the time these absolute paths are read from the file + // system, they'd have the right casing in general. + // + // If we need to fetch the right casing, we'd have to recheck the cache, + // to avoid re-resolving the file. + let sourceName = fsPathToSourceNamePath(relativeFilePath); + const existing = this.#resolvedFileBySourceName.get(sourceName); + + if (existing !== undefined) { + /* eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- + The cache is type-unsafe, but we are sure this is a ProjectResolvedFile */ + return existing as ProjectResolvedFile; + } + + let trueCaseFsPath: string; + try { + trueCaseFsPath = await getFileTrueCase( + this.#projectRoot, + relativeFilePath, + ); + } catch (error) { + ensureError(error, FileNotFoundError); + + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.RESOLVING_NONEXISTENT_PROJECT_FILE, + { file: shortenPath(absoluteFilePath) }, + error, + ); + } + + // Now that we have the correct casing, we "fix" the source name. + sourceName = fsPathToSourceNamePath(trueCaseFsPath); + + // Maybe it was already resolved, so we need to check with the right + // casing + const resolvedWithTheRightCasing = + this.#resolvedFileBySourceName.get(sourceName); + if (resolvedWithTheRightCasing !== undefined) { + /* eslint-disable-next-line @typescript-eslint/consistent-type-assertions + -- If it was, it's a ProjectResolvedFile */ + return resolvedWithTheRightCasing as ProjectResolvedFile; + } + + const fsPathWithTheRightCasing = path.join( + this.#projectRoot, + trueCaseFsPath, + ); + + const resolvedFile: ProjectResolvedFile = { + type: ResolvedFileType.PROJECT_FILE, + sourceName, + fsPath: fsPathWithTheRightCasing, + content: await readFileContent(fsPathWithTheRightCasing), + }; + + this.#resolvedFileBySourceName.set(sourceName, resolvedFile); + + return resolvedFile; + }); + } + + public async resolveNpmDependencyFile( + npmModule: string, + ): Promise { + return this.#mutex.exclusiveRun(async () => { + const parsedNpmModule = this.#parseNpmDirectImport(npmModule); + + if (parsedNpmModule === undefined) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.RESOLVE_NPM_FILE_WITH_INVALID_FORMAT, + { module: npmModule }, + ); + } + + if (await this.#isDirectImportLocal(this.#projectRoot, npmModule)) { + const directory = + this.#getDirectImportLocalDesambiguationPrefix(npmModule); + + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.RESOLVE_NPM_FILE_CLASHES_WITH_LOCAL_FILES, + { + module: npmModule, + directory, + }, + ); + } + + const modulePackageName = parsedNpmModule.package; + + const npmPackage = await this.#resolveNpmPackage({ + from: PROJECT_ROOT_SENTINEL, + packageName: modulePackageName, + }); + + assertHardhatInvariant( + npmPackage !== PROJECT_ROOT_SENTINEL, + "Resolving a local file as if it were an npm module", + ); + + let trueCaseFsPath: string; + try { + trueCaseFsPath = await getFileTrueCase( + npmPackage.rootFsPath, + parsedNpmModule.path, + ); + } catch (error) { + ensureError(error, FileNotFoundError); + + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.RESOLVE_NON_EXISTENT_NPM_FILE, + { module: npmModule }, + error, + ); + } + + // Just like with the project files, we are more forgiving with the casing + // here, as this is not used for imports. + + const sourceName = sourceNamePathJoin( + npmPackageToRootSourceName(npmPackage.name, npmPackage.version), + fsPathToSourceNamePath(trueCaseFsPath), + ); + + const resolvedWithTheRightCasing = + this.#resolvedFileBySourceName.get(sourceName); + + if (resolvedWithTheRightCasing !== undefined) { + /* eslint-disable-next-line @typescript-eslint/consistent-type-assertions + -- If it was, it's a ProjectResolvedFile */ + return resolvedWithTheRightCasing as NpmPackageResolvedFile; + } + + const fsPath = path.join(npmPackage.rootFsPath, trueCaseFsPath); + + const resolvedFile: NpmPackageResolvedFile = { + type: ResolvedFileType.NPM_PACKGE_FILE, + sourceName, + fsPath, + content: await readFileContent(fsPath), + package: npmPackage, + }; + + this.#resolvedFileBySourceName.set(sourceName, resolvedFile); + + return resolvedFile; + }); + } + + public async resolveImport( + from: ResolvedFile, + importPath: string, + ): Promise { + return this.#mutex.exclusiveRun(async () => { + let directImport = importPath; + + if (path.sep !== "/" && importPath.includes(path.sep)) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.IMPORT_PATH_WITH_WINDOWS_SEPARATOR, + { + importPath, + from: shortenPath(from.fsPath), + }, + ); + } + + if (importPath.startsWith("./") || importPath.startsWith("../")) { + directImport = sourceNamePathJoin( + path.dirname(from.sourceName), + importPath, + ); + + if (from.type === ResolvedFileType.NPM_PACKGE_FILE) { + if (!directImport.startsWith(from.package.rootSourceName)) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.ILLEGAL_PACKAGE_IMPORT, + { + importPath, + from: shortenPath(from.fsPath), + }, + ); + } + } else { + if (directImport.startsWith("../")) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.ILEGALL_PROJECT_IMPORT, + { + importPath, + from: shortenPath(from.fsPath), + }, + ); + } + } + } + + switch (from.type) { + case ResolvedFileType.PROJECT_FILE: + return this.#resolveImportFromProjectFile({ + from, + importPath, + directImport, + }); + + case ResolvedFileType.NPM_PACKGE_FILE: + return this.#resolveImportFromNpmPackageFile({ + from, + importPath, + directImport, + }); + } + }); + } + + public getRemappings(): Remapping[] { + const userRemappings = this.#userRemappings.map((remapping) => ({ + context: remapping.context, + prefix: remapping.prefix, + target: remapping.target, + })); + + const remappings: Remapping[] = []; + + for (const [ + thePackage, + dependenciesMap, + ] of this.#dependencyMaps.entries()) { + let context: string; + + if (thePackage === PROJECT_ROOT_SENTINEL) { + context = ""; + } else { + context = thePackage.rootSourceName; + } + + for (const [importedPackage, dependency] of dependenciesMap.entries()) { + // As `hardhat/console.sol` is resolved through npm, even if the + // `hardhat/` folder exists in the root of the package/project, we + // only remap that file. + // We should revisit this if we exported more solidity files in the + // hardhat package in the future. + if ( + dependency !== PROJECT_ROOT_SENTINEL && + dependency.name === "hardhat" + ) { + const prefix = importedPackage + "/console.sol"; + const target = dependency.rootSourceName + "console.sol"; + + remappings.push({ context, prefix, target }); + } else { + const prefix = importedPackage + "/"; + + const target = + dependency === PROJECT_ROOT_SENTINEL + ? "" + : dependency.rootSourceName; + + remappings.push({ context, prefix, target }); + } + } + } + + // TODO: Always order this in a consistent way + for (const [packageSourceName, prefixes] of this.#localPrefixesByPackage) { + for (const prefix of prefixes) { + remappings.push({ + context: packageSourceName.rootSourceName, + prefix, + target: packageSourceName.rootSourceName + prefix, + }); + } + } + + // We sort the remappings acording to the remappings selection rules, plus + // the targets, which shouldn't be needed. + remappings + .sort((a, b) => a.target.localeCompare(b.target)) + .sort((a, b) => a.target.length - b.target.length) + .sort((a, b) => a.prefix.localeCompare(b.prefix)) + .sort((a, b) => a.prefix.length - b.prefix.length) + .sort((a, b) => a.context.localeCompare(b.context)) + .sort((a, b) => a.context.length - b.context.length); + + return [...userRemappings, ...remappings]; + } + + // >>>>>>>>>> BEGIN SECTION: Import resolution selection + // + // The private methods in this section are in charge of selecting which import + // resolution technique to use, but they don't create any ResolvedFile. + // + // These techniques are: + // 1. Resolving an import to a project file + // 2. Resolving an import remapped by the user into an npm package + // 3. Resolving an import from an npm package to one of its own files with a + // relative import + // 4. Resolving an import from an npm package to one of its own files with a + // direct import — This case is different from 3, as without especial care + // it could be affected by one of the user remappings. + // 5. Resolving an import to a different npm package using our own remmapings + + /** + * Resolves an import from a project file. + * + * This method applies the user remappings, if necessary, and uses the + * appropriate resolution technique. + * + * @param from The file from which the import is being resolved. + * @param importPath The import path, as written in the source code. + * @param directImport The direct import path, after resolving relative paths, + * but before applying any remapping. + */ + async #resolveImportFromProjectFile({ + from, + importPath, + directImport, + }: { + from: ProjectResolvedFile; + directImport: string; + importPath: string; + }): Promise { + const bestUserRemapping = selectBestRemapping( + from.sourceName, + directImport, + this.#userRemappings, + ); + + if (bestUserRemapping !== undefined) { + return this.#resolveUserRemappedImportFromProjectFile({ + from, + importPath, + directImport, + remapping: bestUserRemapping, + }); + } + + if (await this.#isDirectImportLocal(this.#projectRoot, directImport)) { + return this.#resolveImportToProjectFile({ + from, + importPath, + fsPathWithinTheProject: sourceNamePathToFsPath(directImport), + }); + } + + return this.#resolveImportThroughNpm({ + from, + importPath, + directImport, + }); + } + + /** + * Resolves an import from a project file that is affected by a user + * remapping. + * + * Note that this method must only be called with the best user remapping + * applicable to the import, as defined by solc. Otherwise, it's behavior will + * be misleading. + * + * @param from The file from which the import is being resolved. + * @param importPath The import path, as written in the source code. + * @param directImport The direct import path, after resolving relative paths, + * but before applying any remapping. + * @param remapping The user remapping that will be applied. + */ + async #resolveUserRemappedImportFromProjectFile({ + from, + importPath, + directImport, + remapping, + }: { + from: ProjectResolvedFile; + importPath: string; + directImport: string; + remapping: ResolvedUserRemapping; + }): Promise { + const remappedDirectImport = applyValidRemapping(directImport, remapping); + + // Special case, where a user remapping's target is an npm pacakge + if (remapping.targetNpmPackage !== undefined) { + return this.#resolveImportToNpmPackageRemappedByUser({ + from, + importPath, + directImport: remappedDirectImport, + remapping: { + // This weird syntax is because TS doesn't realize that + // bestUserRemapping is Required here + ...remapping, + targetNpmPackage: remapping.targetNpmPackage, + }, + }); + } + + if ( + !(await this.#isDirectImportLocal( + this.#projectRoot, + remappedDirectImport, + )) + ) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.ILLEGAL_PROJECT_IMPORT_AFTER_REMAPPING, + { + importPath, + from: shortenPath(from.fsPath), + remapping: remapping.rawFormat, + remappedDirectImport, + }, + ); + } + + return this.#resolveImportToProjectFile({ + from, + importPath, + fsPathWithinTheProject: sourceNamePathToFsPath(remappedDirectImport), + }); + } + + /** + * Resolves an import from an npm file. + * + * This method does not apply any remapping that may be present in the npm + * package. + * + * @param from The file from which the import is being resolved. + * @param importPath The import path, as written in the source code. + * @param directImport The direct import path, after resolving relative paths, + * but without applying any remapping. + */ + async #resolveImportFromNpmPackageFile({ + from, + importPath, + directImport, + }: { + from: NpmPackageResolvedFile; + directImport: string; + importPath: string; + }): Promise { + // If we wanted to apply its own remappings, this would be the place. + // Initially we won't support it. + if (directImport.startsWith(from.package.rootSourceName)) { + return this.#resolveRelativeImportFromNpmPackage({ + from, + importPath, + directImport, + }); + } + + // This was already a direct import, and may be to the same package. + // As we allow this imports in the local project files, we should also allow + // them on npm packages. If we don't projects won't be easily distributable + // through npm, even if they don't use remappings. + if ( + await this.#isDirectImportLocal(from.package.rootFsPath, directImport) + ) { + const resolvedFile = await this.#resolveLocalImportFromNpmPackage({ + from, + importPath, + directImport, + }); + + let prefixesNeededByPackage = this.#localPrefixesByPackage.get( + from.package, + ); + + if (prefixesNeededByPackage === undefined) { + prefixesNeededByPackage = new Set(); + this.#localPrefixesByPackage.set(from.package, prefixesNeededByPackage); + } + + prefixesNeededByPackage.add( + this.#getDirectImportLocalDesambiguationPrefix(directImport), + ); + + return resolvedFile; + } + + return this.#resolveImportThroughNpm({ + from, + importPath, + directImport, + }); + } + + /** + * This method resolves an import that has to go through the npm resolution + * process and selects the appropriate technique to resolve it. + * + * This method does not apply nor define any remapping, but it populates the + * `#dependencyMaps` with dependencies that each package uses, so that we can + * create all the necessary remappings at the end of the resolution process. + * + * @param from The file from which the import is being resolved. + * @param importPath The import path, as written in the source code. + * @param directImport The direct import path, after resolving relative paths, + * but without applying any remapping. + */ + async #resolveImportThroughNpm({ + from, + importPath, + directImport, + }: { + from: ResolvedFile; + importPath: string; + directImport: string; + }): Promise { + const parsedDirectImport = this.#parseNpmDirectImport(directImport); + + if (parsedDirectImport === undefined) { + throw new HardhatError(HardhatError.ERRORS.SOLIDITY.INVALID_NPM_IMPORT, { + importPath, + from: shortenPath(from.fsPath), + }); + } + + const dependency = await this.#resolveNpmPackageForImport({ + from, + importPath, + importPackageName: parsedDirectImport.package, + }); + + if (dependency === PROJECT_ROOT_SENTINEL) { + return this.#resolveImportToProjectFile({ + from, + importPath, + // If we import a file through npm and end up in the Hardhat project, + // we are going to remap the importPackageName to "", so that the path + // section of the parsed direct import should be the relative path. + fsPathWithinTheProject: sourceNamePathToFsPath(parsedDirectImport.path), + }); + } + + return this.#resolveImportToNpmPackage({ + from, + importPath, + importedPackage: dependency, + fsPathWithinThePackage: sourceNamePathToFsPath(parsedDirectImport.path), + }); + } + + // >>>>>>>>>> END SECTION: Import resolution selection + + // >>>>>>>>>> BEGIN SECTION: Import resolution techniques + // + // The private methods in this section implement the different import + // import resolution techniques, which have been explained in the previous + // section. + + /** + * This method implements the import resolution technique number 1: Importing + * a file that is within the project. Note that this method applies both to + * imports from project files as well as imports from npm packages that may + * have the project as a dependency. + * + * @param from The file from which the import is being resolved. + * @param importPath The import path, as written in the source code. + * @param pathWithinTheProject The path within the project to import, after + * normalizing relative paths, applying user remappings and/or stripping the + * npm package name. + */ + async #resolveImportToProjectFile({ + from, + importPath, + fsPathWithinTheProject, + }: { + from: ResolvedFile; + importPath: string; + fsPathWithinTheProject: string; + }): Promise { + const sourceName = fsPathToSourceNamePath(fsPathWithinTheProject); + const existing = this.#resolvedFileBySourceName.get(sourceName); + if (existing !== undefined) { + /* eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- + The cache is type-unsafe, but we are sure this is a ProjectResolvedFile */ + return existing as ProjectResolvedFile; + } + + // This is a project file, so if it was imported from a local file, this + // is the direct import, without any remapping or necessary consideration. + // If this was imported from an npm package, we are remapping the package + // name of the import to "", so that the direct import is also the same as + // the relative path. + await this.#validateExistanceAndCasingOfImport({ + from, + importPath, + relativeFsPathToValidate: fsPathWithinTheProject, + absoluteFsPathToValidateFrom: this.#projectRoot, + }); + + const fsPath = path.join(this.#projectRoot, fsPathWithinTheProject); + + const resolvedFile: ProjectResolvedFile = { + type: ResolvedFileType.PROJECT_FILE, + sourceName, + fsPath, + content: await readFileContent(fsPath), + }; + + this.#resolvedFileBySourceName.set(sourceName, resolvedFile); + + return resolvedFile; + } + + /** + * This method implements the import resolution technique number 2: A project + * file has an import that should be resolved to a file in an npm package due + * to a user remapping. + * + * @param from The file from which the import is being resolved. + * @param importPath The import path, as written in the source code. + * @param directImport The direct import path, after resolving relative paths, + * and applying the user remapping. + * @param remapping The remapping that was applied. + */ + async #resolveImportToNpmPackageRemappedByUser({ + from, + importPath, + directImport, + remapping, + }: { + from: ProjectResolvedFile; + importPath: string; + directImport: string; + remapping: Required; + }): Promise { + const sourceName = directImport; + const existing = this.#resolvedFileBySourceName.get(sourceName); + if (existing !== undefined) { + /* eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- + The cache is type-unsafe, but we are sure this is a NpmPackageResolvedFile */ + return existing as NpmPackageResolvedFile; + } + + const relativeFileFsPath = sourceNamePathToFsPath( + path.relative(remapping.targetNpmPackage.rootSourceName, directImport), + ); + + // We don't add the dependency to `this.#dependencyMaps` because we + // don't need a new remapping for this package, as it's already + // remapped by the user. + + await this.#validateExistanceAndCasingOfImport({ + from, + importPath, + relativeFsPathToValidate: relativeFileFsPath, + absoluteFsPathToValidateFrom: remapping.targetNpmPackage.rootFsPath, + }); + + const fsPath = path.join( + remapping.targetNpmPackage.rootFsPath, + relativeFileFsPath, + ); + + const resolvedFile: NpmPackageResolvedFile = { + type: ResolvedFileType.NPM_PACKGE_FILE, + sourceName, + fsPath, + content: await readFileContent(fsPath), + package: remapping.targetNpmPackage, + }; + + this.#resolvedFileBySourceName.set(sourceName, resolvedFile); + + return resolvedFile; + } + + /** + * This method implements the import resolution technique number 3: A file + * from an npm package is importing another file from the same package with a + * relative import. + * + * @param from The file from which the import is being resolved. + * @param importPath The import path, as written in the source code. + * @param directImport The direct import path, after resolving relative paths. + * It must start with the package's root source name. + */ + async #resolveRelativeImportFromNpmPackage({ + from, + importPath, + directImport, + }: { + from: NpmPackageResolvedFile; + directImport: string; + importPath: string; + }): Promise { + const sourceName = directImport; + const existing = this.#resolvedFileBySourceName.get(sourceName); + if (existing !== undefined) { + /* eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- + The cache is type-unsafe, but we are sure this is a NpmPackageResolvedFile */ + return existing as NpmPackageResolvedFile; + } + + const relativePath = sourceNamePathToFsPath( + path.relative(from.package.rootSourceName, directImport), + ); + + await this.#validateExistanceAndCasingOfImport({ + from, + importPath, + relativeFsPathToValidate: relativePath, + absoluteFsPathToValidateFrom: from.package.rootFsPath, + }); + + const filePath = path.join(from.package.rootFsPath, relativePath); + + const resolvedFile: NpmPackageResolvedFile = { + type: ResolvedFileType.NPM_PACKGE_FILE, + sourceName, + fsPath: filePath, + content: await readFileContent(filePath), + package: from.package, + }; + + this.#resolvedFileBySourceName.set(sourceName, resolvedFile); + + return resolvedFile; + } + + /** + * This method implements the import resolution technique number 4: A file + * from an npm package is importing another file from the same package with a + * direct import. + * + * For example, a file `node_modules/foo/File.sol` imports + * `node_modules/foo/bar/File2.sol` with the direct import `bar/File2.sol`. + * + * @param from The file from which the import is being resolved. + * @param importPath The import path, as written in the source code. + * @param directImport The direct import path, after resolving relative paths. + * The direct import must be considered local within the package, according to + * the rules of the `#isDirectImportLocal` method. + */ + async #resolveLocalImportFromNpmPackage({ + from, + importPath, + directImport, + }: { + from: NpmPackageResolvedFile; + directImport: string; + importPath: string; + }): Promise { + const sourceName = from.package.rootSourceName + directImport; + const existing = this.#resolvedFileBySourceName.get(sourceName); + if (existing !== undefined) { + /* eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- + The cache is type-unsafe, but we are sure this is a NpmPackageResolvedFile */ + return existing as NpmPackageResolvedFile; + } + + const relativeFsPath = sourceNamePathToFsPath(directImport); + + await this.#validateExistanceAndCasingOfImport({ + from, + importPath, + relativeFsPathToValidate: relativeFsPath, + absoluteFsPathToValidateFrom: from.package.rootFsPath, + }); + + const fsPath = path.join(from.package.rootFsPath, relativeFsPath); + + const resolvedFile: NpmPackageResolvedFile = { + type: ResolvedFileType.NPM_PACKGE_FILE, + sourceName, + fsPath, + content: await readFileContent(fsPath), + package: from.package, + }; + + this.#resolvedFileBySourceName.set(sourceName, resolvedFile); + + return resolvedFile; + } + + /** + * This method implements the import resolution technique number 5: A file, + * within the project or from an npm pacakge, is importing a file from a + * different npm package. + * + * Note: This is not meant to support imports into the Hardhat project. + * + * @param from The file from which the import is being resolved. + * @param importPath The import path, as written in the source code. + * @param importedPackage The NpmPackage that is being imported. + * @param pathWithinThePackage The path to the file to import, within the + * package. That means, after parsing the direct import, and stripping the + * package part. + */ + async #resolveImportToNpmPackage({ + from, + importPath, + importedPackage, + fsPathWithinThePackage, + }: { + from: ResolvedFile; + importPath: string; + importedPackage: ResolvedNpmPackage; + fsPathWithinThePackage: string; + }): Promise { + const sourceName = + importedPackage.rootSourceName + + fsPathToSourceNamePath(fsPathWithinThePackage); + const existing = this.#resolvedFileBySourceName.get(sourceName); + if (existing !== undefined) { + /* eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- + The cache is type-unsafe, but we are sure this is a NpmPackageResolvedFile */ + return existing as NpmPackageResolvedFile; + } + + await this.#validateExistanceAndCasingOfImport({ + from, + importPath, + relativeFsPathToValidate: fsPathWithinThePackage, + absoluteFsPathToValidateFrom: importedPackage.rootFsPath, + }); + + const fsPath = path.join( + importedPackage.rootFsPath, + fsPathWithinThePackage, + ); + + const resolvedFile: NpmPackageResolvedFile = { + type: ResolvedFileType.NPM_PACKGE_FILE, + sourceName, + fsPath, + content: await readFileContent(fsPath), + package: importedPackage, + }; + + this.#resolvedFileBySourceName.set(sourceName, resolvedFile); + + return resolvedFile; + } + + // >>>>>>>>>> END SECTION: Import resolution techniques + + /** + * Resolves an npm package as a dependency of another one. + * + * @param from The package to resolve the dependency from. + * @param packageName The name of the package that should be resolved as a + * dependency. + * @returns A ResolvedNpmPackage or PROJECT_ROOT_SENTINEL. + */ + async #resolveNpmPackage({ + from, + packageName, + }: { + from: ResolvedNpmPackage | typeof PROJECT_ROOT_SENTINEL; + packageName: string; + }): Promise { + let dependenciesMap = this.#dependencyMaps.get(from); + if (dependenciesMap === undefined) { + dependenciesMap = new Map(); + this.#dependencyMaps.set(from, dependenciesMap); + } + + const dependency = dependenciesMap.get(packageName); + if (dependency !== undefined) { + return dependency; + } + + const baseResolutionDirectory = + from === PROJECT_ROOT_SENTINEL ? this.#projectRoot : from.rootFsPath; + + const packageJsonResolution = resolve( + packageName + "/package.json", + baseResolutionDirectory, + ); + + if (packageJsonResolution.success === false) { + if (packageJsonResolution.error === ResolutionError.MODULE_NOT_FOUND) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.NPM_DEPEDNDENCY_NOT_INSTALLED, + { + from: + from === PROJECT_ROOT_SENTINEL + ? "your project" + : `"${shortenPath(from.rootFsPath)}"`, + packageName, + }, + ); + } + + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.NPM_DEPEDNDENCY_USES_EXPORTS, + { + from: + from === PROJECT_ROOT_SENTINEL + ? "your project" + : `"${shortenPath(from.rootFsPath)}"`, + packageName, + }, + ); + } + + const packageJsonPath = packageJsonResolution.absolutePath; + + if (isPackageJsonFromProject(packageJsonPath, this.#projectRoot)) { + dependenciesMap.set(packageName, PROJECT_ROOT_SENTINEL); + return PROJECT_ROOT_SENTINEL; + } + + const packageJson = await readJsonFile<{ name: string; version: string }>( + packageJsonPath, + ); + + const name = packageJson.name; + const version = isPackageJsonFromMonorepo( + packageJsonPath, + this.#projectRoot, + ) + ? "local" + : packageJson.version; + + const npmPackage: ResolvedNpmPackage = { + name, + version, + rootFsPath: path.dirname(packageJsonPath), + rootSourceName: npmPackageToRootSourceName(name, version), + }; + + dependenciesMap.set(packageName, npmPackage); + return npmPackage; + } + + /** + * Resolves the npm package imported by the importPath in the from file. + * + * @param from The file from which the import is being resolved. + * @param importPath The import path, as written in the source code. + * @param importPackageName The name of the package to import, as present in + * the importPath, and not necessarily the name of the package in the + * package.json. + * @returns A ResolvedNpmPackage or PROJECT_ROOT_SENTINEL. + */ + async #resolveNpmPackageForImport({ + from, + importPath, + importPackageName, + }: { + from: ResolvedFile; + importPath: string; + importPackageName: string; + }): Promise { + try { + return await this.#resolveNpmPackage({ + from: + from.type === ResolvedFileType.PROJECT_FILE + ? PROJECT_ROOT_SENTINEL + : from.package, + packageName: importPackageName, + }); + } catch (error) { + ensureError(error); + + if ( + HardhatError.isHardhatError( + error, + HardhatError.ERRORS.SOLIDITY.NPM_DEPEDNDENCY_NOT_INSTALLED, + ) + ) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.IMPORTED_NPM_DEPENDENCY_NOT_INSTALLED, + { + from: shortenPath(from.fsPath), + importPath, + }, + error, + ); + } + + if ( + HardhatError.isHardhatError( + error, + HardhatError.ERRORS.SOLIDITY.NPM_DEPEDNDENCY_USES_EXPORTS, + ) + ) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.IMPORTED_NPM_DEPENDENCY_THAT_USES_EXPORTS, + { from: shortenPath(from.fsPath), importPath }, + error, + ); + } + + throw error; + } + } + + /** + * This method returns true if a direct import should be considered an import + * to a local file when evaluated in the context of the Hardhat project. + */ + async #isDirectImportLocal( + projectOrPackageRootFsPath: string, + directImport: string, + ): Promise { + if (directImport === "hardhat/console.sol") { + return false; + } + + const slash = directImport.indexOf("/"); + + // If it's a file in the root directory + if (slash === -1) { + return true; + } + + const firstDirectory = directImport.substring(0, slash); + + // TODO: Cache this? + return exists(path.join(projectOrPackageRootFsPath, firstDirectory)); + } + + /** + * Returns the prefix used to desambiguate a directImport by + * #isDirectImportLocal to determine if its local. + * + * For example, the prefix for `foo/bar/File.sol`, this returns `foo/`. + * + * NOTE: This method does not support `hardhat/console.sol`, as that's a + * special case, which is never considered local. + */ + #getDirectImportLocalDesambiguationPrefix(directImport: string): string { + const slash = directImport.indexOf("/"); + + // If it's a file in the root directory + if (slash === -1) { + return directImport; + } + + const firstDirectory = directImport.substring(0, slash + 1); + + return firstDirectory; + } + + /** + * This is an utility method that validates the existance and casing of an + * imported file as part of the different resolution techniques. + * + * `from` and `importPath` are used to provide a user-friendly error message, + * but the actual validation is done using `relativePathToValidate` and + * `absolutePathToValidateFrom`. + * + * @param from The file with the import. + * @param importPath The import path, as written in the source code. + * @param relativePathToValidate The relative path to validate its existance. + * @param absolutePathToValidateFrom The absolute path from in which the + * relative path is. + */ + async #validateExistanceAndCasingOfImport({ + from, + importPath, + relativeFsPathToValidate, + absoluteFsPathToValidateFrom, + }: { + from: ResolvedFile; + importPath: string; + relativeFsPathToValidate: string; + absoluteFsPathToValidateFrom: string; + }) { + let trueCaseFsPath: string; + try { + trueCaseFsPath = await getFileTrueCase( + absoluteFsPathToValidateFrom, + relativeFsPathToValidate, + ); + } catch (error) { + ensureError(error, FileNotFoundError); + + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.IMPORTED_FILE_DOESNT_EXIST, + { importPath, from: shortenPath(from.fsPath) }, + error, + ); + } + + if (relativeFsPathToValidate !== trueCaseFsPath) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.IMPORTED_FILE_WITH_ICORRECT_CASING, + { + importPath, + from: shortenPath(from.fsPath), + correctCasing: fsPathToSourceNamePath(trueCaseFsPath), + }, + ); + } + } + + /** + * Parses a direct import as if it were an npm import, returning `undefined` + * if the format is invalid. + */ + #parseNpmDirectImport(directImport: string): + | { + package: string; + path: string; + } + | undefined { + const directImportPattern = + /^(?(?:@[a-z0-9-~._]+\/)?[a-z0-9-~][a-z0-9-~._]*)\/(?.*)$/; + + const match = directImportPattern.exec(directImport); + + if (match === null) { + return undefined; + } + + assertHardhatInvariant( + match.groups !== undefined, + "Groups should be defined because they are part of the pattern", + ); + + return { package: match.groups.package, path: match.groups.path }; + } +} + +async function validateAndResolveUserRemapping( + projectRoot: string, + remappingString: string, +): Promise { + const remapping = parseRemappingString(remappingString); + + if (remapping === undefined) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.REMAPPING_WITH_INVALID_SYNTAX, + { + remapping: remappingString, + }, + ); + } + + if (remapping.context.startsWith("npm/")) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.USER_REMAPPING_WITH_NPM_CONTEXT, + { remapping: remappingString }, + ); + } + + if (!remapping.target.startsWith("npm/")) { + return { ...remapping, rawFormat: remappingString }; + } + + const parsed = parseNpmRemappingTarget(remapping.target); + + if (parsed === undefined) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.REMAPPING_WITH_INVALID_SYNTAX, + { remapping: remappingString }, + ); + } + + const { packageName, packageVersion } = parsed; + + const dependencyPackageJsonResolution = resolve( + `${packageName}/package.json`, + projectRoot, + ); + + if (dependencyPackageJsonResolution.success === false) { + if ( + dependencyPackageJsonResolution.error === ResolutionError.MODULE_NOT_FOUND + ) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.REMAPPING_TO_UNINSTALLED_PACKAGE, + { remapping: remappingString, package: packageName }, + ); + } + + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.REMAPPING_TO_PACKAGE_USING_EXPORTS, + { remapping: remappingString, package: packageName }, + ); + } + + const dependencyPackageJsonPath = + dependencyPackageJsonResolution.absolutePath; + + if (isPackageJsonFromMonorepo(dependencyPackageJsonPath, projectRoot)) { + if (packageVersion !== "local") { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.REMAPPING_NPM_PACKAGE_AS_MONOREPO, + { + remapping: remappingString, + pacakge: packageName, + version: packageVersion, + }, + ); + } + } + + if (isPackageJsonFromProject(dependencyPackageJsonPath, projectRoot)) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.REMAPPING_HARDHAT_PROJECT_AS_MONOREPO_PACKAGE, + { remapping: remappingString, package: packageName }, + ); + } + + if (isPackageJsonFromNpmPackage(dependencyPackageJsonPath)) { + const dependencyPackageJson = await readJsonFile<{ version: string }>( + dependencyPackageJsonPath, + ); + + if (dependencyPackageJson.version !== packageVersion) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.REMAPPING_INCORRECT_VERSION, + { + remapping: remappingString, + package: packageName, + expectedVersion: packageVersion, + actualVersion: dependencyPackageJson.version, + }, + ); + } + } + + const npmPackage: ResolvedNpmPackage = { + name: packageName, + version: packageVersion, + rootFsPath: path.dirname(dependencyPackageJsonPath), + rootSourceName: npmPackageToRootSourceName(packageName, packageVersion), + }; + + return { + ...remapping, + targetNpmPackage: npmPackage, + rawFormat: remappingString, + }; +} + +function parseNpmRemappingTarget(remappingTarget: string): + | { + packageName: string; + packageVersion: string; + } + | undefined { + const npmTargetPattern = + /^npm\/(?(?:@[a-z0-9-~._]+\/)?[a-z0-9-~][a-z0-9-~._]*)@(?local|\d+\.\d+\.\d+)\//; + + const match = npmTargetPattern.exec(remappingTarget); + + if (match === null) { + return undefined; + } + + assertHardhatInvariant( + match.groups !== undefined, + "Groups should be defined because they are part of the pattern", + ); + + return { + packageName: match.groups.package, + packageVersion: match.groups.version, + }; +} + +function npmPackageToRootSourceName(name: string, version: string): string { + return `npm/${name}@${version}/`; +} + +function isPackageJsonFromMonorepo( + packageJsonFsPath: string, + projectRoot: string, +): boolean { + return ( + !packageJsonFsPath.includes("node_modules") && + !packageJsonFsPath.startsWith(projectRoot) + ); +} + +function isPackageJsonFromProject( + packageJsonFsPath: string, + projectRoot: string, +): boolean { + return ( + !packageJsonFsPath.includes("node_modules") && + packageJsonFsPath.startsWith(projectRoot) + ); +} + +function isPackageJsonFromNpmPackage(packageJsonFsPath: string): boolean { + return packageJsonFsPath.includes("node_modules"); +} + +/** + * Transforms an fs path into a sourceName or import path, by normalizing their + * path separators to /. + * + * Note that source + * + * Note: This function is exported for testing purposes, but it's not meant to + * be used outside of the resolver. + */ +export function fsPathToSourceNamePath(fsPath: string): string { + if (path.sep === "/") { + return fsPath; + } + + return fsPath.replace(/\\/g, "/"); +} + +/** + * Transforms a sourceName or import path into an fs path, by normalizing their + * path separators to /. + * + * Note: This function is exported for testing purposes, but it's not meant to + * be used outside of the resolver. + */ +export function sourceNamePathToFsPath(sourceNamePath: string): string { + if (path.sep === "/") { + return sourceNamePath; + } + + return sourceNamePath.replace(/\//g, "\\"); +} + +/** + * The equivalent of path.join but for sourceName or import paths, not fs paths. + */ +function sourceNamePathJoin(...parts: string[]): string { + return fsPathToSourceNamePath(path.join(...parts)); +} + +/** + * Reads and analyzes the file at the given absolute path. + */ +async function readFileContent(absolutePath: string): Promise { + const text = await readUtf8File(absolutePath); + const { imports, versionPragmas } = analyze(text); + + return { + text, + importPaths: imports, + versionPragmas, + }; +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/resolver/remappings.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/resolver/remappings.ts new file mode 100644 index 0000000000..ea908a1042 --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/resolver/remappings.ts @@ -0,0 +1,91 @@ +import type { Remapping } from "./types.js"; + +/** + * Tries to parse a remapping string, returning undefined if it's invalid. + */ +export function parseRemappingString(remapping: string): Remapping | undefined { + let rest = remapping; + const colon = rest.indexOf(":"); + + let context: string; + + if (colon !== -1) { + context = rest.substring(0, colon); + rest = rest.substring(colon + 1); + } else { + context = ""; + } + + const equal = rest.indexOf("="); + if (equal === -1) { + return undefined; + } + + const prefix = rest.substring(0, equal); + + if (prefix === "") { + return undefined; + } + + const target = rest.substring(equal + 1); + + return { context, prefix, target }; +} + +export function selectBestRemapping( + fromSouceName: string, + directImport: string, + remappings: RemappingT[], +): RemappingT | undefined { + let bestRemapping: RemappingT | undefined; + + let longestContext = 0; + let longestPrefix = 0; + + for (const remapping of remappings) { + const contextLength = remapping.context.length; + + if (contextLength < longestContext) { + continue; + } + + if (!fromSouceName.startsWith(remapping.context)) { + continue; + } + + if ( + remapping.prefix.length < longestPrefix && + contextLength === longestContext + ) { + continue; + } + + if (!directImport.startsWith(remapping.prefix)) { + continue; + } + + longestContext = contextLength; + longestPrefix = remapping.prefix.length; + bestRemapping = remapping; + } + + return bestRemapping; +} + +/** + * Applies a remapping assuming that it's valid for this importPath. + */ +export function applyValidRemapping( + importPath: string, + remapping: Remapping, +): string { + return remapping.target + importPath.substring(remapping.prefix.length); +} + +export function formatRemapping(remapping: Remapping): string { + if (remapping.context === "") { + return `${remapping.prefix}=${remapping.target}`; + } + + return `${remapping.context}:${remapping.prefix}=${remapping.target}`; +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/resolver/types.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/resolver/types.ts new file mode 100644 index 0000000000..20b573b1ae --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/resolver/types.ts @@ -0,0 +1,91 @@ +import type { + NpmPackageResolvedFile, + ProjectResolvedFile, + ResolvedFile, +} from "../../../../../types/solidity/resolved-file.js"; + +/** + * A solc remapping. + */ +export interface Remapping { + context: string; + prefix: string; + target: string; +} + +/** + * A Resolver is a stateful object that can be used to to construct a dependency + * graph, by resolving both the local project and npm files, and their imports. + * + * As part of the resolution process, it generates the list of remappings that + * are needed to build the project. + * + * This resolver uses `sourceName`s to identify the resolved files, which are + * not necessarily related to the file path. + * + * The `sourceName` of a Hardhat project file is its relative path from the + * project root. For example, if the project root is `/home/user/project`, and + * there are files `/home/user/project/contracts/File.sol` and + * `home/user/project/File2.sol`, their source names are `contracts/File.sol` + * and `File2.sol`. + * + * The `sourceName` of an npm file is `npm/@/`. + * This is constructed by using the Node.js resolution algorithm, to resolve + * an npm file or import, and using the package's `package.json` file to + * determine the source name. For example, if we import `foo/bar.sol`, its + * source name could be `npm/foo@1.2.3/bar.sol`. + * + * If the Node.js resolution algorithm resolve a file into a package that's + * part of the monorepo where the Hardhat project is (i.e. it's not part of a + * `node_modules` directory), the source name is going to be + * `npm/package@local/path/to/file`. + * + * Note that in the Node.js ecosystem, a package manager may install multiple + * instances of the same package and version (i.e. fail to deduplicate them). + * In those cases the Resolver will use the first instance it finds, and will + * always resolve to that one. + * + * Finally, the current version of the resolver doesn't support npm packages + * that use `pacakge.json#exports`. + */ +export interface Resolver { + /** + * Resolve a Hardhat project file. + * + * @param absoluteFilePath The absolute path to the file. + * @returns The resolved file. + */ + resolveProjectFile(absoluteFilePath: string): Promise; + + /** + * Resolves an npm package file, which must be a dependency available in the + * Hardhat project. + * + * This method is only meant to be used when an npm file needs to be rebuilt + * to emit its artifacts, because the user requested it through their config. + * + * @param npmModule The npm module to resolve, in the form of + * `/`. + * @returns The resolved file. + */ + resolveNpmDependencyFile(npmModule: string): Promise; + + /** + * Resolves an import. + * + * @param from The file where the import statement is located. + * @param importPath The import path, as written in the source code. For + * example, if the import statement is `import "./foo.sol";`, the import + * path is `./foo.sol`. + * @returns The imported file. + */ + resolveImport(from: ResolvedFile, importPath: string): Promise; + + /** + * Returns the list of remappings needed to build the project. + * + * TODO: Does this include all the user remappings? Only the necessary ones? + * What if we are only compiling parts of the dependency graph of it? + */ + getRemappings(): Remapping[]; +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/root-paths-utils.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/root-paths-utils.ts new file mode 100644 index 0000000000..2c63a8dd21 --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/root-paths-utils.ts @@ -0,0 +1,81 @@ +/** + * @file This file contains utilities to work with the path of root files. + * + * The SolidityBuildSystem has a different way to referring to root files when + * they come from npm packages, which is the `npm:/file` string. This + * file contains utilities to work with these paths. + * + * The main reason for this `npm:` prefix is to make the SolidityBuildSystem + * APIs ergonomic, instead of using a tagged union type everywhere, but it adds + * some complexity to the implementation. + */ + +import type { ResolvedFile } from "../../../../types/solidity/resolved-file.js"; + +import { ResolvedFileType } from "../../../../types/solidity/resolved-file.js"; + +/** + * The result of parsing a root path. + * @see parseRootPath + */ +export type ParsedRootPath = { npmPath: string } | { fsPath: string }; + +/** + * Parses the path of a root file, as received by the SolidityBuildSystem APIs. + * + * @param rootPath The root path. + * @returns The parsed root path. + */ +export function parseRootPath( + rootPath: string, +): { npmPath: string } | { fsPath: string } { + if (rootPath.startsWith("npm:")) { + return { npmPath: rootPath.substring(4) }; + } + + return { fsPath: rootPath }; +} + +/** + * Returns true if the given root path is for a npm file. + */ +export function isNpmRootPath(rootPath: string): boolean { + return rootPath.startsWith("npm:"); +} + +/** + * Returns an npm root path for the given module. + * @param mod A module name, i.e. `/`. + * @returns The npm root path. + */ +export function npmModuleToNpmRootPath(mod: string): string { + return `npm:${mod}`; +} + +/** + * Returns true if the given parsed root path is for a npm file. + */ +export function isNpmParsedRootPath( + parsedRootPath: ParsedRootPath, +): parsedRootPath is { npmPath: string } { + return "npmPath" in parsedRootPath; +} + +/** + * Formats the path of a root file, making it compatible with the + * SolidityBuildSystem APIs. + * + * @param publicSourceName The public source name of the root file. + * @param rootFile The root file. + * @returns The formatted path. + */ +export function formatRootPath( + publicSourceName: string, + rootFile: ResolvedFile, +): string { + if (rootFile.type !== ResolvedFileType.NPM_PACKGE_FILE) { + return publicSourceName; + } + + return `npm:${publicSourceName}`; +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/solc-config-selection.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/solc-config-selection.ts new file mode 100644 index 0000000000..4812f9c505 --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/solc-config-selection.ts @@ -0,0 +1,202 @@ +import type { DependencyGraphImplementation } from "./dependency-graph.js"; +import type { + SolcConfig, + SolidityBuildProfileConfig, +} from "../../../../types/config.js"; +import type { CompilationJobCreationError } from "../../../../types/solidity/build-system.js"; +import type { ResolvedFile } from "../../../../types/solidity/resolved-file.js"; + +import { assertHardhatInvariant } from "@ignored/hardhat-vnext-errors"; +import { shortenPath } from "@ignored/hardhat-vnext-utils/path"; +import { intersects, maxSatisfying, satisfies } from "semver"; + +import { CompilationJobCreationErrorReason } from "../../../../types/solidity/build-system.js"; + +export class SolcConfigSelector { + readonly #buildProfileName: string; + readonly #buildProfile: SolidityBuildProfileConfig; + + /** + * Creates a new SolcConfigSelector that can be used to select the best solc + * configuration for subragraphs of the given dependency graph. + * + * All the queries are done in the context of the given dependency graph, and + * using the same build profile. + * + * @param buildProfileName The name of the build profile to use. + * @param buildProfile The build profile config. + * @param _dependencyGraph The entire dependency graph of the project. + */ + constructor( + buildProfileName: string, + buildProfile: SolidityBuildProfileConfig, + _dependencyGraph: DependencyGraphImplementation, + ) { + this.#buildProfileName = buildProfileName; + this.#buildProfile = buildProfile; + } + + /** + * Selects the best solc configuration for a subgraph of the dependency graph + * with which this selector was created. + * + * @param subgraph A single-root subgraph of the dependency graph. + * @returns The best solc configuration for the subgraph, or a + * CompilationJobCreationError if no compatible solc version could be found. + */ + public selectBestSolcConfigForSingleRootGraph( + subgraph: DependencyGraphImplementation, + ): SolcConfig | CompilationJobCreationError { + const roots = subgraph.getRoots(); + + assertHardhatInvariant( + roots.size === 1, + "This method only works for single root graphs", + ); + + const [publicSourceName, root] = [...roots.entries()][0]; + + const allVersionPragamas = [...subgraph.getAllFiles()] + .map(({ content }) => content.versionPragmas) + .flat(1); + + const versionRange = Array.from(new Set(allVersionPragamas)).join(" "); + + const overriddenCompiler = this.#buildProfile.overrides[publicSourceName]; + + // if there's an override, we only check that + if (overriddenCompiler !== undefined) { + if (!satisfies(overriddenCompiler.version, versionRange)) { + return this.#getCompilationJobCreationError( + root, + subgraph, + [overriddenCompiler.version], + true, + ); + } + + return overriddenCompiler; + } + + // if there's no override, we find a compiler that matches the version range + const compilerVersions = this.#buildProfile.compilers.map((x) => x.version); + const matchingVersion = maxSatisfying(compilerVersions, versionRange); + + if (matchingVersion === null) { + return this.#getCompilationJobCreationError( + root, + subgraph, + compilerVersions, + false, + ); + } + + const matchingConfig = this.#buildProfile.compilers.find( + (x) => x.version === matchingVersion, + ); + + assertHardhatInvariant( + matchingConfig !== undefined, + `Matching config not found for version '${matchingVersion.toString()}'`, + ); + + return matchingConfig; + } + + #getCompilationJobCreationError( + root: ResolvedFile, + dependencyGraph: DependencyGraphImplementation, + compilerVersions: string[], + overriden: boolean, + ): CompilationJobCreationError { + const rootVersionRange = root.content.versionPragmas.join(" "); + if (maxSatisfying(compilerVersions, rootVersionRange) === null) { + let reason: CompilationJobCreationErrorReason; + let formattedReason: string; + if (overriden) { + reason = + CompilationJobCreationErrorReason.INCOMPATIBLE_OVERRIDEN_SOLC_VERSION; + formattedReason = `An override with incompatible solc version was found for this file.`; + } else { + reason = + CompilationJobCreationErrorReason.NO_COMPATIBLE_SOLC_VERSION_WITH_ROOT; + formattedReason = `No solc version enabled in this profile is compatible with this file.`; + } + + return { + reason, + rootFilePath: root.fsPath, + buildProfile: this.#buildProfileName, + formattedReason, + }; + } + + for (const transitiveDependency of this.#getTransitiveDependencies( + root, + dependencyGraph, + )) { + const transitiveDependencyVersionRange = + transitiveDependency.versionPragmasPath + .map((pragmas) => pragmas.join(" ")) + .join(" "); + + if (!intersects(rootVersionRange, transitiveDependencyVersionRange)) { + return { + reason: CompilationJobCreationErrorReason.IMPORT_OF_INCOMPATIBLE_FILE, + rootFilePath: root.fsPath, + buildProfile: this.#buildProfileName, + incompatibleImportPath: transitiveDependency.fsPath, + formattedReason: `Following these imports leads to an incompatible solc version pragma that no version can satisfy: + * ${shortenPath(root.fsPath)} + * ${transitiveDependency.fsPath.map((s) => shortenPath(s)).join("\n * ")} +`, + }; + } + } + + return { + reason: + CompilationJobCreationErrorReason.NO_COMPATIBLE_SOLC_VERSION_FOUND, + rootFilePath: root.fsPath, + buildProfile: this.#buildProfileName, + formattedReason: `No solc version enabled in this profile is compatible with this file and all of its dependencies.`, + }; + } + + *#getTransitiveDependencies( + root: ResolvedFile, + dependencyGraph: DependencyGraphImplementation, + visited = new Set([root]), + ): Generator<{ + fsPath: string[]; + versionPragmasPath: string[][]; + dependency: ResolvedFile; + }> { + for (const dependency of dependencyGraph.getDependencies(root)) { + if (visited.has(dependency)) { + continue; + } + + yield { + fsPath: [dependency.fsPath], + versionPragmasPath: [dependency.content.versionPragmas], + dependency, + }; + + for (const transitive of this.#getTransitiveDependencies( + dependency, + dependencyGraph, + visited, + )) { + yield { + fsPath: [dependency.fsPath, ...transitive.fsPath], + versionPragmasPath: [ + dependency.content.versionPragmas, + ...transitive.versionPragmasPath, + ], + dependency: transitive.dependency, + }; + } + } + } +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/solc-info.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/solc-info.ts new file mode 100644 index 0000000000..a417c38fd9 --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/build-system/solc-info.ts @@ -0,0 +1,74 @@ +const defaultEvmTargets: { [key: string]: string } = { + "0.5.1": "byzantium", + "0.5.2": "byzantium", + "0.5.3": "byzantium", + "0.5.4": "byzantium", + "0.5.5": "petersburg", + "0.5.6": "petersburg", + "0.5.7": "petersburg", + "0.5.8": "petersburg", + "0.5.9": "petersburg", + "0.5.10": "petersburg", + "0.5.11": "petersburg", + "0.5.12": "petersburg", + "0.5.13": "petersburg", + "0.5.14": "istanbul", + "0.5.15": "istanbul", + "0.5.16": "istanbul", + "0.5.17": "istanbul", + "0.6.0": "istanbul", + "0.6.1": "istanbul", + "0.6.2": "istanbul", + "0.6.3": "istanbul", + "0.6.4": "istanbul", + "0.6.5": "istanbul", + "0.6.6": "istanbul", + "0.6.7": "istanbul", + "0.6.8": "istanbul", + "0.6.9": "istanbul", + "0.6.10": "istanbul", + "0.6.11": "istanbul", + "0.6.12": "istanbul", + "0.7.0": "istanbul", + "0.7.1": "istanbul", + "0.7.2": "istanbul", + "0.7.3": "istanbul", + "0.7.4": "istanbul", + "0.7.5": "istanbul", + "0.7.6": "istanbul", + "0.8.0": "istanbul", + "0.8.1": "istanbul", + "0.8.2": "istanbul", + "0.8.3": "istanbul", + "0.8.4": "istanbul", + "0.8.5": "berlin", + "0.8.6": "berlin", + "0.8.7": "london", + "0.8.8": "london", + "0.8.9": "london", + "0.8.10": "london", + "0.8.11": "london", + "0.8.12": "london", + "0.8.13": "london", + "0.8.14": "london", + "0.8.15": "london", + "0.8.16": "london", + "0.8.17": "london", + "0.8.18": "paris", + "0.8.19": "paris", + "0.8.20": "shanghai", + "0.8.21": "shanghai", + "0.8.22": "shanghai", + "0.8.23": "shanghai", + "0.8.24": "shanghai", + "0.8.25": "cancun", + "0.8.26": "cancun", + "0.8.27": "cancun", + "0.8.28": "cancun", +}; + +export function getEvmVersionFromSolcVersion( + solcVersion: string, +): string | undefined { + return defaultEvmTargets[solcVersion]; +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/config.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/config.ts new file mode 100644 index 0000000000..e95ba2814d --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/config.ts @@ -0,0 +1,303 @@ +import type { HardhatUserConfig } from "../../../config.js"; +import type { + HardhatConfig, + SolidityBuildProfileConfig, + SolidityConfig, + SolidityUserConfig, +} from "../../../types/config.js"; +import type { HardhatUserConfigValidationError } from "../../../types/hooks.js"; + +import { isObject } from "@ignored/hardhat-vnext-utils/lang"; +import { resolveFromRoot } from "@ignored/hardhat-vnext-utils/path"; +import { + conditionalUnionType, + incompatibleFieldType, + validateUserConfigZodType, +} from "@ignored/hardhat-vnext-zod-utils"; +import { z } from "zod"; + +import { DEFAULT_BUILD_PROFILES } from "./build-profiles.js"; + +const sourcePathsType = conditionalUnionType( + [ + [(data) => typeof data === "string", z.string()], + [(data) => Array.isArray(data), z.array(z.string()).nonempty()], + ], + "Expected a string or an array of strings", +); + +const solcUserConfigType = z.object({ + version: z.string(), + settings: z.any().optional(), + compilers: incompatibleFieldType("This field is incompatible with `version`"), + overrides: incompatibleFieldType("This field is incompatible with `version`"), + profiles: incompatibleFieldType("This field is incompatible with `version`"), +}); + +const multiVersionSolcUserConfigType = z.object({ + compilers: z.array(solcUserConfigType).nonempty(), + overrides: z.record(z.string(), solcUserConfigType).optional(), + version: incompatibleFieldType("This field is incompatible with `compilers`"), + settings: incompatibleFieldType( + "This field is incompatible with `compilers`", + ), +}); + +const singleVersionSolidityUserConfigType = solcUserConfigType.extend({ + dependenciesToCompile: z.array(z.string()).optional(), + remappings: z.array(z.string()).optional(), + compilers: incompatibleFieldType("This field is incompatible with `version`"), + overrides: incompatibleFieldType("This field is incompatible with `version`"), + profiles: incompatibleFieldType("This field is incompatible with `version`"), +}); + +const multiVersionSolidityUserConfigType = + multiVersionSolcUserConfigType.extend({ + dependenciesToCompile: z.array(z.string()).optional(), + remappings: z.array(z.string()).optional(), + version: incompatibleFieldType( + "This field is incompatible with `compilers`", + ), + profiles: incompatibleFieldType( + "This field is incompatible with `compilers`", + ), + }); + +const buildProfilesSolidityUserConfigType = z.object({ + profiles: z.record( + z.string(), + conditionalUnionType( + [ + [(data) => isObject(data) && "version" in data, solcUserConfigType], + [ + (data) => isObject(data) && "compilers" in data, + multiVersionSolcUserConfigType, + ], + ], + "Expected an object configuring one or more versions of Solidity", + ), + ), + dependenciesToCompile: z.array(z.string()).optional(), + remappings: z.array(z.string()).optional(), + version: incompatibleFieldType("This field is incompatible with `profiles`"), + compilers: incompatibleFieldType( + "This field is incompatible with `profiles`", + ), + overrides: incompatibleFieldType( + "This field is incompatible with `profiles`", + ), +}); + +const soldityUserConfigType = conditionalUnionType( + [ + [(data) => typeof data === "string", z.string()], + [(data) => Array.isArray(data), z.array(z.string()).nonempty()], + [ + (data) => isObject(data) && "version" in data, + singleVersionSolidityUserConfigType, + ], + [ + (data) => isObject(data) && "compilers" in data, + multiVersionSolidityUserConfigType, + ], + [ + (data) => isObject(data) && "profiles" in data, + buildProfilesSolidityUserConfigType, + ], + ], + "Expected a version string, an array of version strings, or an object cofiguring one or more versions of Solidity or multiple build profiles", +); + +const userConfigType = z.object({ + paths: z + .object({ + sources: conditionalUnionType( + [ + [isObject, z.object({ solidity: sourcePathsType.optional() })], + [ + (data) => typeof data === "string" || Array.isArray(data), + sourcePathsType, + ], + ], + "Expected a string, an array of strings, or an object with an optional 'solidity' property", + ).optional(), + }) + .optional(), + solidity: soldityUserConfigType.optional(), +}); + +export function validateSolidityUserConfig( + userConfig: unknown, +): HardhatUserConfigValidationError[] { + const result = validateUserConfigZodType(userConfig, userConfigType); + + if ( + isObject(userConfig) && + isObject(userConfig.solidity) && + isObject(userConfig.solidity.profiles) && + !("default" in userConfig.solidity.profiles) + ) { + result.push({ + message: + "The 'default' profile is required when using Solidity build profiles", + path: ["solidity", "profiles"], + }); + } + + return result; +} + +export async function resolveSolidityUserConfig( + userConfig: HardhatUserConfig, + resolvedConfig: HardhatConfig, +): Promise { + let sourcesPaths = userConfig.paths?.sources; + + // TODO: use isObject when the type narrowing issue is fixed + sourcesPaths = + typeof sourcesPaths === "object" && !Array.isArray(sourcesPaths) + ? sourcesPaths.solidity + : sourcesPaths; + + sourcesPaths ??= "contracts"; + + sourcesPaths = Array.isArray(sourcesPaths) ? sourcesPaths : [sourcesPaths]; + + const resolvedPaths = sourcesPaths.map((p) => + resolveFromRoot(resolvedConfig.paths.root, p), + ); + + return { + ...resolvedConfig, + paths: { + ...resolvedConfig.paths, + sources: { + ...resolvedConfig.paths.sources, + solidity: resolvedPaths, + }, + }, + solidity: resolveSolidityConfig(userConfig.solidity ?? "0.8.0"), + }; +} + +function resolveSolidityConfig( + solidityConfig: SolidityUserConfig, +): SolidityConfig { + if (typeof solidityConfig === "string") { + solidityConfig = [solidityConfig]; + } + + if (Array.isArray(solidityConfig)) { + return { + profiles: { + default: { + compilers: solidityConfig.map((version) => ({ + version, + settings: {}, + })), + overrides: {}, + }, + }, + dependenciesToCompile: [], + remappings: [], + }; + } + + if ("version" in solidityConfig) { + return { + profiles: { + default: { + compilers: [ + { + version: solidityConfig.version, + settings: solidityConfig.settings ?? {}, + }, + ], + overrides: {}, + }, + }, + dependenciesToCompile: solidityConfig.dependenciesToCompile ?? [], + remappings: solidityConfig.remappings ?? [], + }; + } + + if ("compilers" in solidityConfig) { + return { + profiles: { + default: { + compilers: solidityConfig.compilers.map((compiler) => ({ + version: compiler.version, + settings: compiler.settings ?? {}, + })), + overrides: Object.fromEntries( + Object.entries(solidityConfig.overrides ?? {}).map( + ([sourceName, override]) => { + return [ + sourceName, + { + version: override.version, + settings: override.settings ?? {}, + }, + ]; + }, + ), + ), + }, + }, + dependenciesToCompile: solidityConfig.dependenciesToCompile ?? [], + remappings: solidityConfig.remappings ?? [], + }; + } + + const profiles: Record = {}; + + // TODO: Merge the profiles + for (const [profileName, profile] of Object.entries( + solidityConfig.profiles, + )) { + if ("version" in profile) { + profiles[profileName] = { + compilers: [ + { + version: profile.version, + settings: profile.settings ?? {}, + }, + ], + overrides: {}, + }; + continue; + } + + profiles[profileName] = { + compilers: profile.compilers.map((compiler) => ({ + version: compiler.version, + settings: compiler.settings ?? {}, + })), + overrides: Object.fromEntries( + Object.entries(profile.overrides ?? {}).map( + ([sourceName, override]) => { + return [ + sourceName, + { + version: override.version, + settings: override.settings ?? {}, + }, + ]; + }, + ), + ), + }; + } + + for (const profile of DEFAULT_BUILD_PROFILES) { + if (!(profile in profiles)) { + profiles[profile] = profiles.default; + } + } + + return { + profiles, + dependenciesToCompile: solidityConfig.dependenciesToCompile ?? [], + remappings: solidityConfig.remappings ?? [], + }; +} diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/hook-handlers/config.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/hook-handlers/config.ts index 43080454a9..2e7556e61f 100644 --- a/v-next/hardhat/src/internal/builtin-plugins/solidity/hook-handlers/config.ts +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/hook-handlers/config.ts @@ -1,29 +1,14 @@ import type { ConfigHooks } from "../../../../types/hooks.js"; -import { resolveFromRoot } from "@ignored/hardhat-vnext-utils/path"; import { - unionType, - validateUserConfigZodType, -} from "@ignored/hardhat-vnext-zod-utils"; -import { z } from "zod"; - -const sourcePathsType = unionType( - [z.string(), z.array(z.string())], - "Expected a string or an array of strings", -); - -const userConfigType = z.object({ - sources: unionType( - [sourcePathsType, z.object({ solidity: sourcePathsType.optional() })], - "Expected a string, an array of strings, or an object with an optional 'solidity' property", - ).optional(), -}); + resolveSolidityUserConfig, + validateSolidityUserConfig, +} from "../config.js"; export default async (): Promise> => { const handlers: Partial = { - validateUserConfig: async (userConfig) => { - return validateUserConfigZodType(userConfig, userConfigType); - }, + validateUserConfig: async (userConfig) => + validateSolidityUserConfig(userConfig), resolveUserConfig: async ( userConfig, resolveConfigurationVariable, @@ -34,34 +19,7 @@ export default async (): Promise> => { resolveConfigurationVariable, ); - let sourcesPaths = userConfig.paths?.sources; - - // TODO: use isObject when the type narrowing issue is fixed - sourcesPaths = - typeof sourcesPaths === "object" && !Array.isArray(sourcesPaths) - ? sourcesPaths.solidity - : sourcesPaths; - - sourcesPaths ??= "contracts"; - - sourcesPaths = Array.isArray(sourcesPaths) - ? sourcesPaths - : [sourcesPaths]; - - const resolvedPaths = sourcesPaths.map((p) => - resolveFromRoot(resolvedConfig.paths.root, p), - ); - - return { - ...resolvedConfig, - paths: { - ...resolvedConfig.paths, - sources: { - ...resolvedConfig.paths.sources, - solidity: resolvedPaths, - }, - }, - }; + return resolveSolidityUserConfig(userConfig, resolvedConfig); }, }; diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/hook-handlers/hre.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/hook-handlers/hre.ts new file mode 100644 index 0000000000..a52c1b42e9 --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/hook-handlers/hre.ts @@ -0,0 +1,115 @@ +import type { HardhatRuntimeEnvironmentHooks } from "../../../../types/hooks.js"; +import type { + BuildOptions, + CompilationJobCreationError, + CompileBuildInfoOptions, + FileBuildResult, + GetCompilationJobsOptions, + RunCompilationJobOptions, + SolidityBuildSystem, +} from "../../../../types/solidity/build-system.js"; +import type { CompilationJob } from "../../../../types/solidity/compilation-job.js"; +import type { + CompilerOutput, + CompilerOutputError, +} from "../../../../types/solidity/compiler-io.js"; +import type { SolidityBuildInfo } from "../../../../types/solidity.js"; +import type { SolidityBuildSystemOptions } from "../build-system/build-system.js"; + +class LazySolidityBuildSystem implements SolidityBuildSystem { + readonly #options: SolidityBuildSystemOptions; + + #buildSystem: SolidityBuildSystem | undefined; + + constructor(options: SolidityBuildSystemOptions) { + this.#options = options; + } + + public async getRootFilePaths(): Promise { + const buildSystem = await this.#getBuildSystem(); + return buildSystem.getRootFilePaths(); + } + + public async build( + rootFiles: string[], + options?: BuildOptions, + ): Promise> { + const buildSystem = await this.#getBuildSystem(); + return buildSystem.build(rootFiles, options); + } + + public async getCompilationJobs( + rootFiles: string[], + options?: GetCompilationJobsOptions, + ): Promise> { + const buildSystem = await this.#getBuildSystem(); + return buildSystem.getCompilationJobs(rootFiles, options); + } + + public async runCompilationJob( + compilationJob: CompilationJob, + options?: RunCompilationJobOptions, + ): Promise { + const buildSystem = await this.#getBuildSystem(); + return buildSystem.runCompilationJob(compilationJob, options); + } + + public async remapCompilerError( + compilationJob: CompilationJob, + error: CompilerOutputError, + shouldShortenPaths?: boolean, + ): Promise { + const buildSystem = await this.#getBuildSystem(); + return buildSystem.remapCompilerError( + compilationJob, + error, + shouldShortenPaths, + ); + } + + public async emitArtifacts( + compilationJob: CompilationJob, + compilerOutput: CompilerOutput, + ): Promise> { + const buildSystem = await this.#getBuildSystem(); + return buildSystem.emitArtifacts(compilationJob, compilerOutput); + } + + public async cleanupArtifacts(rootFilePaths: string[]): Promise { + const buildSystem = await this.#getBuildSystem(); + return buildSystem.cleanupArtifacts(rootFilePaths); + } + + public async compileBuildInfo( + buildInfo: SolidityBuildInfo, + options?: CompileBuildInfoOptions, + ): Promise { + const buildSystem = await this.#getBuildSystem(); + return buildSystem.compileBuildInfo(buildInfo, options); + } + + async #getBuildSystem(): Promise { + if (this.#buildSystem === undefined) { + const { SolidityBuildSystemImplementation } = await import( + "../build-system/build-system.js" + ); + this.#buildSystem = new SolidityBuildSystemImplementation(this.#options); + } + + return this.#buildSystem; + } +} + +export default async (): Promise> => { + return { + created: async (_context, hre) => { + hre.solidity = new LazySolidityBuildSystem({ + solidityConfig: hre.config.solidity, + projectRoot: hre.config.paths.root, + soliditySourcesPaths: hre.config.paths.sources.solidity, + artifactsPath: hre.config.paths.artifacts, + cachePath: hre.config.paths.cache, + }); + }, + }; +}; diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/index.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/index.ts index 1c4abc9861..6d02f26e78 100644 --- a/v-next/hardhat/src/internal/builtin-plugins/solidity/index.ts +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/index.ts @@ -1,12 +1,48 @@ import type { HardhatPlugin } from "../../../types/plugins.js"; +import { globalOption, task } from "../../core/config.js"; + import "./type-extensions.js"; const hardhatPlugin: HardhatPlugin = { id: "builtin:solidity", + dependencies: [ + async () => { + const { default: artifactsPlugin } = await import( + "../artifacts/index.js" + ); + return artifactsPlugin; + }, + ], hookHandlers: { config: import.meta.resolve("./hook-handlers/config.js"), + hre: import.meta.resolve("./hook-handlers/hre.js"), }, + tasks: [ + task("compile", "Compiles your project") + .addFlag({ + name: "force", + description: "Force compilation even if no files have changed", + }) + .addFlag({ + name: "quiet", + description: "Makes the compilation process less verbose", + }) + .addVariadicArgument({ + name: "files", + description: "An optional list of files to compile", + defaultValue: [], + }) + .setAction(import.meta.resolve("./tasks/compile.js")) + .build(), + ], + globalOptions: [ + globalOption({ + name: "buildProfile", + description: "The build profile to use", + defaultValue: "default", + }), + ], }; export default hardhatPlugin; diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/tasks/compile.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/tasks/compile.ts new file mode 100644 index 0000000000..0bb657db05 --- /dev/null +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/tasks/compile.ts @@ -0,0 +1,67 @@ +import type { NewTaskActionFunction } from "../../../../types/tasks.js"; + +import { HardhatError } from "@ignored/hardhat-vnext-errors"; +import { resolveFromRoot } from "@ignored/hardhat-vnext-utils/path"; + +import { FileBuildResultType } from "../../../../types/solidity.js"; +import { shouldMergeCompilationJobs } from "../build-profiles.js"; +import { isNpmRootPath } from "../build-system/root-paths-utils.js"; + +interface CompileActionArguments { + force: boolean; + files: string[]; + quiet: boolean; +} + +const compileAction: NewTaskActionFunction = async ( + { force, files, quiet }, + { solidity, globalOptions }, +) => { + const rootPaths = + files.length === 0 + ? await solidity.getRootFilePaths() + : files.map((file) => { + if (isNpmRootPath(file)) { + return file; + } + + return resolveFromRoot(process.cwd(), file); + }); + + const results = await solidity.build(rootPaths, { + force, + buildProfile: globalOptions.buildProfile, + mergeCompilationJobs: shouldMergeCompilationJobs( + globalOptions.buildProfile, + ), + quiet, + }); + + if ("reason" in results) { + throw new HardhatError( + HardhatError.ERRORS.SOLIDITY.COMPILATION_JOB_CREATION_ERROR, + { + reason: results.formattedReason, + rootFilePath: results.rootFilePath, + buildProfile: results.buildProfile, + }, + ); + } + + const sucessful = [...results.values()].every( + ({ type }) => + type === FileBuildResultType.CACHE_HIT || + type === FileBuildResultType.BUILD_SUCCESS, + ); + + if (!sucessful) { + throw new HardhatError(HardhatError.ERRORS.SOLIDITY.BUILD_FAILED); + } + + // If we recompiled the entire project we cleanup the artifacts + if (files.length === 0) { + await solidity.cleanupArtifacts(rootPaths); + } +}; + +export default compileAction; diff --git a/v-next/hardhat/src/internal/builtin-plugins/solidity/type-extensions.ts b/v-next/hardhat/src/internal/builtin-plugins/solidity/type-extensions.ts index 63acf4786b..7e64585791 100644 --- a/v-next/hardhat/src/internal/builtin-plugins/solidity/type-extensions.ts +++ b/v-next/hardhat/src/internal/builtin-plugins/solidity/type-extensions.ts @@ -1,11 +1,85 @@ import "../../../types/config.js"; -declare module "@ignored/hardhat-vnext/types/config" { +declare module "../../../types/config.js" { + export type SolidityUserConfig = + | string + | string[] + | SingleVersionSolidityUserConfig + | MultiVersionSolidityUserConfig + | BuildProfilesSolidityUserConfig; + + export interface SolcUserConfig { + version: string; + settings?: any; + } + + export interface MultiVersionSolcUserConfig { + compilers: SolcUserConfig[]; + overrides?: Record; + } + + export interface SingleVersionSolidityUserConfig extends SolcUserConfig { + dependenciesToCompile?: string[]; + remappings?: string[]; + } + + export interface MultiVersionSolidityUserConfig + extends MultiVersionSolcUserConfig { + dependenciesToCompile?: string[]; + remappings?: string[]; + } + + export interface BuildProfilesSolidityUserConfig { + profiles: Record; + dependenciesToCompile?: string[]; + remappings?: string[]; + } + + export interface HardhatUserConfig { + solidity?: SolidityUserConfig; + } + + export interface SolcConfig { + version: string; + settings: any; + } + + export interface SolidityBuildProfileConfig { + compilers: SolcConfig[]; + overrides: Record; + } + + export interface SolidityConfig { + profiles: Record; + dependenciesToCompile: string[]; + remappings: string[]; + } + + export interface HardhatConfig { + solidity: SolidityConfig; + } + export interface SourcePathsUserConfig { solidity?: string | string[]; } export interface SourcePathsConfig { - solidity: string | string[]; + solidity: string[]; + } +} + +import "../../../types/hre.js"; +import type { SolidityBuildSystem } from "../../../types/solidity/build-system.js"; + +declare module "../../../types/hre.js" { + export interface HardhatRuntimeEnvironment { + solidity: SolidityBuildSystem; + } +} + +import "../../../types/global-options.js"; +declare module "../../../types/global-options.js" { + export interface GlobalOptions { + buildProfile: string; } } diff --git a/v-next/hardhat/src/internal/constants.ts b/v-next/hardhat/src/internal/constants.ts index e3ec5ff58e..f105f7aaea 100644 --- a/v-next/hardhat/src/internal/constants.ts +++ b/v-next/hardhat/src/internal/constants.ts @@ -1,3 +1,7 @@ export const HARDHAT_PACKAGE_NAME = "hardhat"; export const HARDHAT_NAME = "Hardhat"; export const HARDHAT_WEBSITE_URL = "https://hardhat.org/"; + +// This constant is used to choose the default EVM version for solc versions +// that haven't been defined in ./builtin-plugins/solidity/build-system/solc-info.ts +export const DEFAULT_SOLC_EVM_VERSION = "cancun"; diff --git a/v-next/hardhat/src/internal/core/hre.ts b/v-next/hardhat/src/internal/core/hre.ts index abadcefa51..b692f6e357 100644 --- a/v-next/hardhat/src/internal/core/hre.ts +++ b/v-next/hardhat/src/internal/core/hre.ts @@ -16,6 +16,7 @@ import type { HookContext, HookManager } from "../../types/hooks.js"; import type { HardhatRuntimeEnvironment } from "../../types/hre.js"; import type { NetworkManager } from "../../types/network.js"; import type { HardhatPlugin } from "../../types/plugins.js"; +import type { SolidityBuildSystem } from "../../types/solidity/build-system.js"; import type { TaskManager } from "../../types/tasks.js"; import type { UserInterruptionManager } from "../../types/user-interruptions.js"; @@ -37,11 +38,12 @@ import { UserInterruptionManagerImplementation } from "./user-interruptions.js"; export class HardhatRuntimeEnvironmentImplementation implements HardhatRuntimeEnvironment { - // NOTE: This is a small architectural violation, as this shouldn't be needed - // here, because it's added by a plugin. But as that plugin is builtin, its - // type extensions also affect this module. + // NOTE: This is a small architectural violation, as these shouldn't be needed + // here, because they are added by plugins. But as those plugins are builtin, + // their type extensions also affect this module. public network!: NetworkManager; public artifacts!: ArtifactsManager; + public solidity!: SolidityBuildSystem; public static async create( inputUserConfig: HardhatUserConfig, diff --git a/v-next/hardhat/src/types/artifacts.ts b/v-next/hardhat/src/types/artifacts.ts index ac75d684ce..f9d0a6b651 100644 --- a/v-next/hardhat/src/types/artifacts.ts +++ b/v-next/hardhat/src/types/artifacts.ts @@ -1,10 +1,23 @@ +import type { SolidityBuildInfo } from "./solidity/solidity-artifacts.js"; + +/** + * A map of bare contract names and fully qualified contract names to their + * artifacts that will be completed by Hardhat's build system using module + * augmentation. + */ +/* eslint-disable-next-line @typescript-eslint/no-empty-interface -- This will +be populated by module augmentation */ +export interface ArtifactMap {} + +/** + * Returns the artifact type for the bare or fully qualified contract name. + */ +export type GetAtifactByName = + ContractNameT extends keyof ArtifactMap + ? ArtifactMap[ContractNameT] + : Artifact; + /** - * WARNING: This is a placholder, while the build system is being implemented. - * This ArtifactsManager will be replaced, but the capabilities are similar - * enough to aid development. - * - * TODO: Replace this with the real ArtifactsManager. - * * The ArtifactsManager is responsible for reading and writing artifacts from * the Hardhat build system. */ @@ -20,7 +33,9 @@ export interface ArtifactsManager { * @throws Throws an error if a non-unique contract name is used, * indicating which fully qualified names can be used instead. */ - readArtifact(contractNameOrFullyQualifiedName: string): Promise; + readArtifact( + contractNameOrFullyQualifiedName: ContractNameT, + ): Promise>; /** * Returns true if an artifact exists. @@ -41,8 +56,8 @@ export interface ArtifactsManager { * Returns the BuildInfo associated with the solc run that compiled a * contract. * - * Note that if your contract hasn't been compiled with solc this method - * can return undefined. + * Note that if your contract hasn't been compiled with Hardhat's build system + * this method can return undefined. */ getBuildInfo(fullyQualifiedName: string): Promise; @@ -61,177 +76,129 @@ export interface ArtifactsManager { */ getBuildInfoPaths(): Promise; - /** - * Saves a contract's artifact and debug file. - * - * @param artifact The artifact object. - */ - saveArtifact(artifact: Artifact): Promise; - - /** - * Saves the build info associated to a solc run. - * - * @param solcVersion The semver-compatible version number. - * @param solcLongVersion The full solc version. - * @param input The compiler input. - * @param output The compiler output. - */ - saveBuildInfo( - solcVersion: string, - solcLongVersion: string, - input: CompilerInput, - output: CompilerOutput, - ): Promise; - /** * Returns the absolute path to the given artifact. * - * @param fullyQualifiedName The FQN of the artifact. + * @param contractNameOrFullyQualifiedName The name or fully qualified name + * of the contract. */ getArtifactPath(contractNameOrFullyQualifiedName: string): Promise; } /** - * WARNING: This is a placholder, while the build system is being implemented. + * TODO: This type could be improved to better represent the ABI. */ -export type Abi = readonly any[] | any[]; +export type Abi = readonly any[]; /** - * WARNING: This is a placholder, while the build system is being implemented. - * - * An artifact representing the compilation output of a contract. + * An Artifact represents the compilation output of a single contract. * * This file has just enough information to deploy the contract and interact * with an already deployed instance of it. - * - * For debugging information and other extra information, you should look for - * its companion DebugFile, which should be stored right next to it. - * - * Note that DebugFiles are only generated for Solidity contracts. */ export interface Artifact { - _format: string; - contractName: string; - sourceName: string; - abi: AbiT; - bytecode: string; // "0x"-prefixed hex string - deployedBytecode: string; // "0x"-prefixed hex string - linkReferences: LinkReferences; - deployedLinkReferences: LinkReferences; -} + /** + * The version identifier of this format. + */ + readonly _format: "hh3-artifact-1"; -/** - * WARNING: This is a placholder, while the build system is being implemented. - * - * A BuildInfo is a file that contains all the information of a solc run. It - * includes all the necessary information to recreate that exact same run, and - * all of its output. - */ -export interface BuildInfo { - _format: string; - id: string; - solcVersion: string; - solcLongVersion: string; - input: CompilerInput; - output: CompilerOutput; -} + /** + * The bare name of the contract (i.e. without the source name). + */ + readonly contractName: string; -/** - * WARNING: This is a placholder, while the build system is being implemented. - */ -export interface LinkReferences { - [libraryFileName: string]: { - [libraryName: string]: Array<{ length: number; start: number }>; - }; -} + /** + * The name of the file where the contract is defined. + * + * When Hardhat generates artifacts, it uses the following logic to determine + * the source name: + * - The relative path from the root of the project, if the contract is + * defined in a file in the project. + * - The npm module identifier (i.e. `/`) if the contract + * is defined in a file in a npm package. + * - This may or may not be the same as the source name used by `solc`. + * For that information, see `inputSourceName`. + * + * This source name is used to determine the path to the artifact, and to + * generate its fully qualified name. + */ + readonly sourceName: string; -/** - * WARNING: This is a placholder, while the build system is being implemented. - */ -export interface CompilerInput { - language: string; - sources: { [sourceName: string]: { content: string } }; - settings: { - viaIR?: boolean; - optimizer: { - runs?: number; - enabled?: boolean; - details?: { - yulDetails: { - optimizerSteps: string; - }; - }; - }; - metadata?: { useLiteralContent: boolean }; - outputSelection: { - [sourceName: string]: { - [contractName: string]: string[]; - }; - }; - evmVersion?: string; - libraries?: { - [libraryFileName: string]: { - [libraryName: string]: string; - }; - }; - remappings?: string[]; - }; -} + /** + * The ABI of the contract. + */ + readonly abi: AbiT; -/** - * WARNING: This is a placholder, while the build system is being implemented. - */ -export interface CompilerOutputContract { - abi: any; - evm: { - bytecode: CompilerOutputBytecode; - deployedBytecode: CompilerOutputBytecode; - methodIdentifiers: { - [methodSignature: string]: string; - }; - }; -} + /** + * The bytecode used to deploy the contract. + */ + readonly bytecode: string; // "0x"-prefixed hex string -/** - * WARNING: This is a placholder, while the build system is being implemented. - */ -export interface CompilerOutput { - sources: CompilerOutputSources; - contracts: { - [sourceName: string]: { - [contractName: string]: CompilerOutputContract; - }; - }; + /** + * The link references of the deployment bytecode. + */ + readonly linkReferences: LinkReferences; + + /** + * The deployed or runtime bytecode of the contract. + */ + readonly deployedBytecode: string; // "0x"-prefixed hex string + + /** + * The link references of the deployed bytecode. + */ + readonly deployedLinkReferences: LinkReferences; + + /** + * The references to the immutable variables that get embedded in the deployed + * bytecode. + */ + readonly immutableReferences?: ImmutableReferences; + + /** + * The id of the build info that was used to generate this artifact. + * + * This may not be present if the artifact wasn't generated by Hardhat's build + * system. + */ + readonly buildInfoId?: string; + + /** + * The source name of the file in the build info's source map that has this + * contract's code. + * + * This can be different from the source name of the artifact, when the file + * comes from an npm package. + */ + readonly inputSourceName?: string; } /** - * WARNING: This is a placholder, while the build system is being implemented. + * The link references of a contract, which need to be resolved before using it. */ -export interface CompilerOutputSource { - id: number; - ast: any; +export interface LinkReferences { + [librarySourceName: string]: { + [libraryName: string]: Array<{ length: number; start: number }>; + }; } /** - * WARNING: This is a placholder, while the build system is being implemented. + * The references to the immutable variables that get embedded in the deployed + * bytecode. + * + * Each immutable variable is represented by an id, which in the case of solc + * is the id of the AST node that represents the variable. */ -export interface CompilerOutputSources { - [sourceName: string]: CompilerOutputSource; +export interface ImmutableReferences { + [immuatableId: string]: Array<{ start: number; length: number }>; } /** - * WARNING: This is a placholder, while the build system is being implemented. + * A BuildInfo is a file containing all the information to reproduce a build. + * + * Note that currently, BuildInfos are only generated for Solidity contracts, + * and this will change once we add support for Vyper, so if you are using this, + * keep in mind that you will need to update your code to support or ignore + * Vyper's artifacts. */ -export interface CompilerOutputBytecode { - object: string; - opcodes: string; - sourceMap: string; - linkReferences: { - [sourceName: string]: { - [libraryName: string]: Array<{ start: number; length: 20 }>; - }; - }; - immutableReferences?: { - [key: string]: Array<{ start: number; length: number }>; - }; -} +export type BuildInfo = SolidityBuildInfo; diff --git a/v-next/hardhat/src/types/hre.ts b/v-next/hardhat/src/types/hre.ts index d2bdfb61c3..599dfb4b4e 100644 --- a/v-next/hardhat/src/types/hre.ts +++ b/v-next/hardhat/src/types/hre.ts @@ -10,7 +10,8 @@ export interface HardhatRuntimeEnvironment { readonly config: HardhatConfig; readonly globalOptions: GlobalOptions; readonly interruptions: UserInterruptionManager; - // These fields are defined using module agumentation in this same package: + // These fields are defined using module agumentation despite being part of + // Hardhat's core: // readonly hooks: HookManager; // readonly tasks: TaskManager; } diff --git a/v-next/hardhat/src/types/solidity.ts b/v-next/hardhat/src/types/solidity.ts new file mode 100644 index 0000000000..d0491244eb --- /dev/null +++ b/v-next/hardhat/src/types/solidity.ts @@ -0,0 +1,6 @@ +export * from "./solidity/build-system.js"; +export * from "./solidity/compilation-job.js"; +export * from "./solidity/compiler-io.js"; +export * from "./solidity/dependency-graph.js"; +export * from "./solidity/resolved-file.js"; +export * from "./solidity/solidity-artifacts.js"; diff --git a/v-next/hardhat/src/types/solidity/build-system.ts b/v-next/hardhat/src/types/solidity/build-system.ts new file mode 100644 index 0000000000..821622e61e --- /dev/null +++ b/v-next/hardhat/src/types/solidity/build-system.ts @@ -0,0 +1,285 @@ +import type { CompilationJob } from "./compilation-job.js"; +import type { CompilerOutput, CompilerOutputError } from "./compiler-io.js"; +import type { SolidityBuildInfo } from "./solidity-artifacts.js"; + +/** + * The options of the `build` method. + */ +export interface BuildOptions { + /** + * If `true`, it forces rebuilding every file, ignoring the compilation cache. + */ + force?: boolean; + + /** + * The build profile to use. + * + * Default: "default". + */ + buildProfile?: string; + + /** + * If `true`, this option allows the build process to merge compilation jobs + * if they have the same compiler version and settings. + * + * This is an useful optimization to be used when compiling a large number of + * files, but keep in mind that it can lead to unrelated files being compiled + * together, block explorer verification processes trickier and/or with + * unexpected results. + */ + mergeCompilationJobs?: boolean; + + /** + * The number of concurrent compilation jobs to run. + * + * Default: The number of CPU cores - 1. + */ + concurrency?: number; + + /** + * An array of remappings provided by the user. + */ + userProvidedRemappings?: string[]; + + /** + * If `true`, the build process doesn't print any output. + */ + quiet?: boolean; +} + +/** + * The options of the `getBuildInfos` method. + * + * Note that this option object includes a `quiet` property, as this process + * may require downloading compilers, and potentially printing some output. + */ +export type GetCompilationJobsOptions = Omit< + BuildOptions, + "force" | "removeUnusedArtifacts" +>; + +/** + * The options of the `runCompilationJob` method. + */ +export interface RunCompilationJobOptions { + /** + * If `true`, the compilation process doesn't print any output. + */ + quiet?: boolean; +} + +/** + * The options of the `compileBuildInfo` method. + */ +export interface CompileBuildInfoOptions { + /** + * If `true`, this option foces the build system to recompile the build info, + * even if its output is cached. + */ + force?: boolean; + + /** + * If `true`, the compilation process doesn't print any output. + */ + quiet?: boolean; +} + +export enum CompilationJobCreationErrorReason { + NO_COMPATIBLE_SOLC_VERSION_FOUND = "NO_COMPATIBLE_SOLC_VERSION_FOUND", + NO_COMPATIBLE_SOLC_VERSION_WITH_ROOT = "NO_COMPATIBLE_SOLC_VERSION_WITH_ROOT", + INCOMPATIBLE_OVERRIDEN_SOLC_VERSION = "INCOMPATIBLE_OVERRIDEN_SOLC_VERSION", + IMPORT_OF_INCOMPATIBLE_FILE = "IMPORT_OF_INCOMPATIBLE_FILE", +} + +export interface BaseCompilationJobCreationError { + buildProfile: string; + rootFilePath: string; + formattedReason: string; +} + +export interface CompilationJobCreationErrorNoCompatibleSolcVersionFound + extends BaseCompilationJobCreationError { + reason: CompilationJobCreationErrorReason.NO_COMPATIBLE_SOLC_VERSION_WITH_ROOT; +} + +export interface CompilationJobCreationErrorIncompatibleOverridenSolcVersion + extends BaseCompilationJobCreationError { + reason: CompilationJobCreationErrorReason.INCOMPATIBLE_OVERRIDEN_SOLC_VERSION; +} + +export interface CompilationJobCreationErrorIncompatibleOverridenSolcVersion + extends BaseCompilationJobCreationError { + reason: CompilationJobCreationErrorReason.INCOMPATIBLE_OVERRIDEN_SOLC_VERSION; +} + +export interface CompilationJobCreationErrorIportOfIncompatibleFile + extends BaseCompilationJobCreationError { + reason: CompilationJobCreationErrorReason.IMPORT_OF_INCOMPATIBLE_FILE; + // The path of absolute files imported, starting from the root, that take you + // to the first file with an incompatible version pragma. + incompatibleImportPath: string[]; +} + +export interface NoCompatibleSolcVersionFound + extends BaseCompilationJobCreationError { + reason: CompilationJobCreationErrorReason.NO_COMPATIBLE_SOLC_VERSION_FOUND; +} + +export type CompilationJobCreationError = + | CompilationJobCreationErrorNoCompatibleSolcVersionFound + | CompilationJobCreationErrorIportOfIncompatibleFile + | CompilationJobCreationErrorIncompatibleOverridenSolcVersion + | NoCompatibleSolcVersionFound; + +/** + * The restult of building a file. + */ +export enum FileBuildResultType { + CACHE_HIT = "CACHE_HIT", + BUILD_SUCCESS = "BUILD_SUCCESS", + BUILD_FAILURE = "BUILD_FAILURE", +} + +export type FileBuildResult = + | CacheHitFileBuildResult + | SuccessfulFileBuildResult + | FailedFileBuildResult; + +export interface CacheHitFileBuildResult { + type: FileBuildResultType.CACHE_HIT; + // TODO: Should we remove this? It is a buildId of an already existing build + // info. + buildId: string; +} + +export interface SuccessfulFileBuildResult { + type: FileBuildResultType.BUILD_SUCCESS; + buildId: string; + contractArtifactsGenerated: string[]; + warnings: CompilerOutputError[]; +} + +export interface FailedFileBuildResult { + type: FileBuildResultType.BUILD_FAILURE; + buildId: string; + errors: CompilerOutputError[]; +} + +/** + * The Solidity build system. + */ +export interface SolidityBuildSystem { + /** + * Returns all the root files of the project. + * + * The root files are either absolute paths or + * `npm:/` URIs. + * + * @returns An array of root file paths. + */ + getRootFilePaths(): Promise; + + /** + * Builds the provided files, generating their compilation artifacts. + * + * @param rootFilePaths The files to build, which can be either absolute paths + * or `npm:/` URIs. + * @param options The options to use when building the files. + * @returns An `Map` of the files to their build results, or an error if + * there was a problem when trying to create the necessary compilation jobs. + */ + build( + rootFilePaths: string[], + options?: BuildOptions, + ): Promise>; + + /** + * Returns the CompilationJobs that would be used to build the provided files. + * + * Note that if `options.mergeCompilationJobs` is true, the same instance of + * can be returned for multiple files, so you should deduplicate the results + * before using them. + * + * @param rootFilePaths The files to analyze, which can be either absolute + * paths or `npm:/` URIs. + * @param options The options to use when analyzing the files. + * @returns A `Map` of the files to their compilation jobs, or an error if + * there was a problem when trying to create them. + */ + getCompilationJobs( + rootFilePaths: string[], + options?: GetCompilationJobsOptions, + ): Promise>; + + /** + * Returns the output of running the given compilation job. + * + * Note that this method returns the compiler output verbatim, as `solc` + * returns it. This means that any error message or location will use + * source names, and not fs paths. To transform the paths to fs paths, use + * the `remapCompilerError` method. + * + * @param compilationJob The compilation job to run. + * @param options The options to use when running the compilation job. + * @returns The output of the compilation, as returned by `solc`. + */ + runCompilationJob( + compilationJob: CompilationJob, + options?: RunCompilationJobOptions, + ): Promise; + + /** + * Remaps the given compiler error paths from source names to fs paths. + * + * @param compilationJob The compilation job where the error occurred. + * @param error The compiler error to remap. + * @param shouldShortenPaths If `true`, the paths will be shortened to their + * relative path from the CWD, if that results in a shorter path. + */ + remapCompilerError( + compilationJob: CompilationJob, + error: CompilerOutputError, + shouldShortenPaths?: boolean, + ): Promise; + + /** + * Emits the artifacts of the given compilation job. + * + * @param compilationJob The compilation job to emit the artifacts of. + * @param compilerOutput The result of running the compilation job. + * @returns A map from public source name to the absolute paths of the + * artifacts that were emitted for it. + */ + emitArtifacts( + compilationJob: CompilationJob, + compilerOutput: CompilerOutput, + ): Promise>; + + /** + * Analyzes the project and cleans up the artifacts by: + * - Removing any existing artifact of non-existent contracts. + * - Removing any unreachable build info and build info output files. + * - Overloading the `ArtifactMap` entries for repeated contract names so + * so that they map to `never`. + * + * This method should only be used after a complete build has succeeded, as + * it relies on the build system to have generated all the necessary artifact + * files. + + * @param rootFilePaths All the root files of the project. + */ + cleanupArtifacts(rootFilePaths: string[]): Promise; + + /** + * Compiles a build info, returning the output of the compilation, verbatim, + * as `solc` returns it. + * + * @param buildInfo The build info to compile. + * @param options The options to use when compiling the build info. + * @returns The output of the compilation. + */ + compileBuildInfo( + buildInfo: SolidityBuildInfo, + options?: CompileBuildInfoOptions, + ): Promise; +} diff --git a/v-next/hardhat/src/types/solidity/compilation-job.ts b/v-next/hardhat/src/types/solidity/compilation-job.ts new file mode 100644 index 0000000000..fab052de6e --- /dev/null +++ b/v-next/hardhat/src/types/solidity/compilation-job.ts @@ -0,0 +1,41 @@ +import type { CompilerInput } from "./compiler-io.js"; +import type { DependencyGraph } from "./dependency-graph.js"; +import type { SolcConfig } from "../config.js"; + +/** + * A compilation job to be run using solc. + */ +export interface CompilationJob { + /** + * The dependency graph of the compilation job, whose root files' artifacts + * will be emitted. + */ + dependencyGraph: DependencyGraph; + + /** + * The solc config to use. + */ + solcConfig: SolcConfig; + + /** + * The long version of the solc compiler to be used. + */ + solcLongVersion: string; + + /** + * Returns the solc input to be used. + */ + getSolcInput(): CompilerInput; + + /** + * Returns the build id of the compilation job. + * + * The id is guaranteed to be deterministicly generated based on the solc + * input that this compilation job would generate, the solc long version, + * and the current solidity build info format that Hardhat uses. + * + * While deterministic, it shouldn't be expected to be stable across different + * versions of Hardhat. + */ + getBuildId(): string; +} diff --git a/v-next/hardhat/src/types/solidity/compiler-io.ts b/v-next/hardhat/src/types/solidity/compiler-io.ts new file mode 100644 index 0000000000..b050795e07 --- /dev/null +++ b/v-next/hardhat/src/types/solidity/compiler-io.ts @@ -0,0 +1,82 @@ +export interface CompilerInput { + language: string; + sources: { [sourceName: string]: { content: string } }; + settings: { + viaIR?: boolean; + optimizer: { + runs?: number; + enabled?: boolean; + details?: { + yulDetails: { + optimizerSteps: string; + }; + }; + }; + metadata?: { useLiteralContent: boolean }; + outputSelection: { + [sourceName: string]: { + [contractName: string]: string[]; + }; + }; + evmVersion?: string; + libraries?: { + [libraryFileName: string]: { + [libraryName: string]: string; + }; + }; + remappings?: string[]; + }; +} + +export interface CompilerOutputSource { + id: number; + ast: any; +} + +export interface CompilerOutputSources { + [sourceName: string]: CompilerOutputSource; +} + +export interface CompilerOutputBytecode { + object: string; + opcodes: string; + sourceMap: string; + linkReferences: { + [sourceName: string]: { + [libraryName: string]: Array<{ start: number; length: 20 }>; + }; + }; + immutableReferences?: { + [key: string]: Array<{ start: number; length: number }>; + }; +} + +export interface CompilerOutputContract { + abi: any; + evm?: { + bytecode?: CompilerOutputBytecode; + deployedBytecode?: CompilerOutputBytecode; + methodIdentifiers: { + [methodSignature: string]: string; + }; + }; +} + +export interface CompilerOutput { + errors?: CompilerOutputError[]; + sources: CompilerOutputSources; + contracts?: { + [sourceName: string]: { + [contractName: string]: CompilerOutputContract; + }; + }; +} + +export interface CompilerOutputError { + type: string; + component: string; + message: string; + severity: "error" | "warning" | "info"; + errorCode?: string; + formattedMessage?: string; +} diff --git a/v-next/hardhat/src/types/solidity/dependency-graph.ts b/v-next/hardhat/src/types/solidity/dependency-graph.ts new file mode 100644 index 0000000000..3a11fe1359 --- /dev/null +++ b/v-next/hardhat/src/types/solidity/dependency-graph.ts @@ -0,0 +1,57 @@ +import type { ResolvedFile } from "./resolved-file.js"; + +/** + * A Solidity dependency graph. + */ +export interface DependencyGraph { + /** + * Gets a map of public source names to root files. + */ + getRoots(): ReadonlyMap; + + /** + * Returns an iterable with all the files. + */ + getAllFiles(): Iterable; + + /** + * Returns true if the graph contains the given file. + */ + hasFile(file: ResolvedFile): boolean; + + /** + * Returns the set of dependencies of the given file. + * + * @param file The file to get the dependencies of. It must be present in the + * graph. + */ + getDependencies(file: ResolvedFile): ReadonlySet; + + /** + * Returns a file by its source name, if present. + * + * @param sourceName The source name of the file. + * @returns The file, if present. If found, `file.sourceName` is equal to + * `sourceName`. + */ + getFileBySourceName(sourceName: string): ResolvedFile | undefined; + + /** + * Returns a subgraph of the graph, containing only the given root files and + * their transitive dependencies. + * + * @param rootPublicSourceNames The public source names of the roots of the + * subgraph. They must be present in the graph. + */ + getSubgraph(...rootPublicSourceNames: string[]): DependencyGraph; + + /** + * A method to merge two dependency graphs. The resulting graph will have all + * the files of both graphs, with all the dependencies of the files in both + * graphs, and the roots of both graphs as root. + * + * @param other The other DependencyGraph to merge with, which must have been + * created with the same Resolver. + */ + merge(other: DependencyGraph): DependencyGraph; +} diff --git a/v-next/hardhat/src/types/solidity/resolved-file.ts b/v-next/hardhat/src/types/solidity/resolved-file.ts new file mode 100644 index 0000000000..be8512f956 --- /dev/null +++ b/v-next/hardhat/src/types/solidity/resolved-file.ts @@ -0,0 +1,113 @@ +/** + * The representation of an npm package. + */ +export interface ResolvedNpmPackage { + /** + * The name of the package, potentially scopde. + */ + name: string; + + /** + * The version of the package. + */ + version: string; + + /** + * The path to the package's root directory. + */ + rootFsPath: string; + + /** + * The prefix that represents the source name of the package's files. + * + * For example, package 'foo' with version '1.2.3' would have a root source + * name of 'npm/foo@1.2.3/'. If the package is part of the monorepo, the root + * source name would be 'npm/package@local/'. + * + * Note that this can be derived from the rest of the fields, but it's + * cached here for performance reasons. + */ + rootSourceName: string; +} + +/** + * The possible types of resolved files. + */ +export enum ResolvedFileType { + PROJECT_FILE = "PROJECT_FILE", + NPM_PACKGE_FILE = "NPM_PACKAGE_FILE", +} + +/** + * A file that's part of the Hardhat project (i.e. not installed through npm). + */ +export interface ProjectResolvedFile { + type: ResolvedFileType.PROJECT_FILE; + /** + * The source name of a project files is its relative path from the Hardhat + * project root. + */ + sourceName: string; + + /** + * The absolute path to the file. + */ + fsPath: string; + + /** + * The file contents. + */ + content: FileContent; +} + +/** + * A file that's part of an npm package. + */ +export interface NpmPackageResolvedFile { + type: ResolvedFileType.NPM_PACKGE_FILE; + + /** + * The source of an npm package file is `npm/@/`. + */ + sourceName: string; + + /** + * The absolute path to the file. + */ + fsPath: string; + + /** + * The file contents. + */ + content: FileContent; + + /** + * The package this file belongs to. + */ + package: ResolvedNpmPackage; +} + +/** + * The resolult of resolving a file or import using a Resolver. + */ +export type ResolvedFile = ProjectResolvedFile | NpmPackageResolvedFile; + +/** + * The contents of a Solidity file. + */ +export interface FileContent { + /** + * The raw text of the file. + */ + text: string; + + /** + * The list of importPaths that are used in the file. + */ + importPaths: string[]; + + /** + * The list of version pragmas that are used in the file. + */ + versionPragmas: string[]; +} diff --git a/v-next/hardhat/src/types/solidity/solidity-artifacts.ts b/v-next/hardhat/src/types/solidity/solidity-artifacts.ts new file mode 100644 index 0000000000..eb1919f1d6 --- /dev/null +++ b/v-next/hardhat/src/types/solidity/solidity-artifacts.ts @@ -0,0 +1,62 @@ +import type { CompilerInput, CompilerOutput } from "./compiler-io.js"; + +/** + * A SolidityBuildInfo is a file that contains all the information of a solc + * run. It includes all the necessary information to recreate that exact same + * run, and all of its output. + */ +export interface SolidityBuildInfo { + /** + * The version identifier of this format. + */ + readonly _format: "hh3-sol-build-info-1"; + + /** + * The id of the build, which is derived from the rest of the data, + * guaranteing that it's unique and deterministic. + */ + readonly id: string; + + /** + * The solc version used to compile the build. + */ + readonly solcVersion: string; + + /** + * The long solc version used to compile the build. + */ + readonly solcLongVersion: string; + + /** + * A mapping from public source names to input source names, for the root + * files of the build (i.e. the files whose artifacts where being compiled). + * + * @see import("../artifacts.js").Artifact.inputSourceName + */ + readonly publicSourceNameMap: Record; + + /** + * The compiler input, as provided to solc. + */ + readonly input: CompilerInput; +} + +/** + * The output of compiling a Solidity build info. + */ +export interface SolidityBuildInfoOutput { + /** + * The version identifier of this format. + */ + readonly _format: "hh3-sol-build-info-output-1"; + + /** + * The id of the SolidityBuildInfo. + */ + readonly id: string; + + /** + * The `solc` output, verbatim (i.e. as returned by `solc`). + */ + readonly output: CompilerOutput; +} diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/dependency-resolver.ts b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/dependency-resolver.ts new file mode 100644 index 0000000000..c78e20dd5e --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/dependency-resolver.ts @@ -0,0 +1,928 @@ +import type { Resolver } from "../../../../../../src/internal/builtin-plugins/solidity/build-system/resolver/types.js"; +import type { + ResolvedFile, + ProjectResolvedFile, + ResolvedNpmPackage, + NpmPackageResolvedFile, +} from "../../../../../../src/types/solidity/resolved-file.js"; + +import assert from "node:assert/strict"; +import path from "node:path"; +import { after, before, beforeEach, describe, it } from "node:test"; + +import { HardhatError } from "@ignored/hardhat-vnext-errors"; +import { assertRejectsWithHardhatError } from "@nomicfoundation/hardhat-test-utils"; + +import { + fsPathToSourceNamePath, + ResolverImplementation, +} from "../../../../../../src/internal/builtin-plugins/solidity/build-system/resolver/dependency-resolver.js"; +import { ResolvedFileType } from "../../../../../../src/types/solidity/resolved-file.js"; + +const TEST_FIXTURES_ROOT = path.resolve(import.meta.dirname, "test-fixtures"); + +const FIXTURE_HARDHAT_PROJECT_ROOT = path.resolve( + TEST_FIXTURES_ROOT, + "monorepo/packages/hardhat-project", +); + +function assertResolvedProjectFile( + resolvedFile: ResolvedFile, + pathFromProjectRoot: string, +): asserts resolvedFile is ProjectResolvedFile { + assert.ok( + resolvedFile.type === ResolvedFileType.PROJECT_FILE, + `Resolved file ${resolvedFile.fsPath} is not a project file`, + ); + assert.equal( + resolvedFile.sourceName, + fsPathToSourceNamePath(pathFromProjectRoot), + ); + assert.equal( + resolvedFile.fsPath, + path.resolve(FIXTURE_HARDHAT_PROJECT_ROOT, pathFromProjectRoot), + ); + + const pathFromTestFixturesRoot = path.relative( + TEST_FIXTURES_ROOT, + resolvedFile.fsPath, + ); + + // Just as a way to validate which file we are reading the contents from + // we wrote their relative unix-style relative path from the fixture root + assert.deepEqual(resolvedFile.content, { + text: fsPathToSourceNamePath(pathFromTestFixturesRoot) + "\n", + importPaths: [], + versionPragmas: [], + }); +} + +function assertNpmPackageResolvedFile( + resolvedFile: ResolvedFile, + pacakge: Omit, + packagePathFromTestFixturesRoot: string, + filePathFromPackageRoot: string, +): asserts resolvedFile is NpmPackageResolvedFile { + assert.ok( + resolvedFile.type === ResolvedFileType.NPM_PACKGE_FILE, + `Resolved file ${resolvedFile.fsPath} is not an npm file`, + ); + + const filePathFromTestFixturesRoot = path.join( + packagePathFromTestFixturesRoot, + filePathFromPackageRoot, + ); + + const packageRootPath = path.join( + TEST_FIXTURES_ROOT, + packagePathFromTestFixturesRoot, + ); + + assert.deepEqual(resolvedFile.package, { + ...pacakge, + rootFsPath: packageRootPath, + }); + assert.equal( + resolvedFile.sourceName, + pacakge.rootSourceName + fsPathToSourceNamePath(filePathFromPackageRoot), + ); + assert.equal( + resolvedFile.fsPath, + path.join(TEST_FIXTURES_ROOT, filePathFromTestFixturesRoot), + ); + + // Just as a way to validate which file we are reading the contents from + // we wrote their relative unix-style relative path from the fixture root + assert.deepEqual(resolvedFile.content, { + text: fsPathToSourceNamePath(filePathFromTestFixturesRoot) + "\n", + importPaths: [], + versionPragmas: [], + }); +} + +describe("Resolver", () => { + // Some of the error messages in the resolver use a file path based on the + // CWD, so we set it for these tests + let originalCwd: string; + + before(() => { + originalCwd = process.cwd(); + process.chdir(FIXTURE_HARDHAT_PROJECT_ROOT); + }); + + after(() => { + process.chdir(originalCwd); + }); + + describe("Project files resolution", () => { + it("Should throw if the file isn't part of the project", async () => { + const resolver = await ResolverImplementation.create( + FIXTURE_HARDHAT_PROJECT_ROOT, + [], + ); + + let file = "foo.sol"; + await assertRejectsWithHardhatError( + () => resolver.resolveProjectFile(file), + HardhatError.ERRORS.SOLIDITY.RESOLVING_INCORRECT_FILE_AS_PROJECT_FILE, + { file }, + ); + + file = "/asd/asd/foo.sol"; + await assertRejectsWithHardhatError( + () => resolver.resolveProjectFile(file), + HardhatError.ERRORS.SOLIDITY.RESOLVING_INCORRECT_FILE_AS_PROJECT_FILE, + { file }, + ); + }); + + it("Should resolve them to project files with their path from the project root as sourceName", async () => { + const resolver = await ResolverImplementation.create( + FIXTURE_HARDHAT_PROJECT_ROOT, + [], + ); + + assertResolvedProjectFile( + await resolver.resolveProjectFile( + path.join(FIXTURE_HARDHAT_PROJECT_ROOT, "contracts/File.sol"), + ), + "contracts/File.sol", + ); + + assertResolvedProjectFile( + await resolver.resolveProjectFile( + path.join(FIXTURE_HARDHAT_PROJECT_ROOT, "File.sol"), + ), + "File.sol", + ); + + assertResolvedProjectFile( + await resolver.resolveProjectFile( + path.join(FIXTURE_HARDHAT_PROJECT_ROOT, "npm/File.sol"), + ), + "npm/File.sol", + ); + }); + + it("Should validate that the files exists", async () => { + const resolver = await ResolverImplementation.create( + FIXTURE_HARDHAT_PROJECT_ROOT, + [], + ); + + await assertRejectsWithHardhatError( + resolver.resolveProjectFile( + path.join(FIXTURE_HARDHAT_PROJECT_ROOT, "nope.sol"), + ), + HardhatError.ERRORS.SOLIDITY.RESOLVING_NONEXISTENT_PROJECT_FILE, + { + file: "nope.sol", + }, + ); + }); + }); + + describe("Imports resolution", () => { + describe("Without user remappings", () => { + let resolver: Resolver; + let contractsFileSol: ProjectResolvedFile; + + beforeEach(async () => { + resolver = await ResolverImplementation.create( + FIXTURE_HARDHAT_PROJECT_ROOT, + [], + ); + + contractsFileSol = await resolver.resolveProjectFile( + path.resolve(FIXTURE_HARDHAT_PROJECT_ROOT, "contracts/File.sol"), + ); + }); + + describe("Imports from the project", () => { + describe("Imports of project files", () => { + describe("Relative imports", () => { + it("Should resolve them to project files with their path from the project root as sourceName", async () => { + assertResolvedProjectFile( + await resolver.resolveImport(contractsFileSol, "./File2.sol"), + "contracts/File2.sol", + ); + + assertResolvedProjectFile( + await resolver.resolveImport(contractsFileSol, "../File.sol"), + "File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), []); + }); + + it("Should validate that the files exists with the right casing", async () => { + await assertRejectsWithHardhatError( + resolver.resolveImport(contractsFileSol, "./nope.sol"), + HardhatError.ERRORS.SOLIDITY.IMPORTED_FILE_DOESNT_EXIST, + { + importPath: "./nope.sol", + from: path.join("contracts", "File.sol"), + }, + ); + + await assertRejectsWithHardhatError( + resolver.resolveImport(contractsFileSol, "../file.sol"), + HardhatError.ERRORS.SOLIDITY.IMPORTED_FILE_WITH_ICORRECT_CASING, + { + importPath: "../file.sol", + from: path.join("contracts", "File.sol"), + correctCasing: "File.sol", + }, + ); + + assert.deepEqual(resolver.getRemappings(), []); + }); + }); + + describe("Direct imports", () => { + it("Should resolve them to project files with the direct import as sourceName", async () => { + assertResolvedProjectFile( + await resolver.resolveImport( + contractsFileSol, + "contracts/File.sol", + ), + "contracts/File.sol", + ); + + assertResolvedProjectFile( + await resolver.resolveImport( + contractsFileSol, + "contracts/File2.sol", + ), + "contracts/File2.sol", + ); + + assertResolvedProjectFile( + await resolver.resolveImport(contractsFileSol, "npm/File.sol"), + "npm/File.sol", + ); + + assertResolvedProjectFile( + await resolver.resolveImport(contractsFileSol, "File.sol"), + "File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), []); + }); + + it("Should validate that the files exist with the right casing", async () => { + // Note that the imports here are considered local imports, + // otherwise they would be validated as npm imports + + await assertRejectsWithHardhatError( + resolver.resolveImport(contractsFileSol, "contracts/nope.sol"), + HardhatError.ERRORS.SOLIDITY.IMPORTED_FILE_DOESNT_EXIST, + { + importPath: "contracts/nope.sol", + from: path.join("contracts", "File.sol"), + }, + ); + + await assertRejectsWithHardhatError( + resolver.resolveImport(contractsFileSol, "contracts/file2.sol"), + HardhatError.ERRORS.SOLIDITY.IMPORTED_FILE_WITH_ICORRECT_CASING, + { + importPath: "contracts/file2.sol", + from: path.join("contracts", "File.sol"), + correctCasing: "contracts/File2.sol", + }, + ); + + assert.deepEqual(resolver.getRemappings(), []); + }); + + it("Should treat files in the project root as local imports, even if they don't exist", async () => { + assertResolvedProjectFile( + await resolver.resolveImport(contractsFileSol, "File.sol"), + "File.sol", + ); + + await assertRejectsWithHardhatError( + resolver.resolveImport(contractsFileSol, "nope.sol"), + HardhatError.ERRORS.SOLIDITY.IMPORTED_FILE_DOESNT_EXIST, + { + importPath: "nope.sol", + from: path.join("contracts", "File.sol"), + }, + ); + + assert.deepEqual(resolver.getRemappings(), []); + }); + + it("Should treat files whose first directory exists in the project root as local imports, even if they don't exist", async () => { + assertResolvedProjectFile( + await resolver.resolveImport( + contractsFileSol, + "hardhat/File.sol", + ), + "hardhat/File.sol", + ); + + await assertRejectsWithHardhatError( + resolver.resolveImport(contractsFileSol, "npm/nope.sol"), + HardhatError.ERRORS.SOLIDITY.IMPORTED_FILE_DOESNT_EXIST, + { + importPath: "npm/nope.sol", + from: path.join("contracts", "File.sol"), + }, + ); + + assert.deepEqual(resolver.getRemappings(), []); + }); + }); + }); + + describe("Imports of npm files", () => { + it("Should always treat hardhat/console.sol as an npm file even if other hardhat/ files are local", async () => { + const consoleSol = await resolver.resolveImport( + contractsFileSol, + "hardhat/console.sol", + ); + + assertNpmPackageResolvedFile( + consoleSol, + { + name: "hardhat", + version: "3.0.0", + rootSourceName: "npm/hardhat@3.0.0/", + }, + "monorepo/node_modules/hardhat", + "console.sol", + ); + + const hardhatFile = await resolver.resolveImport( + contractsFileSol, + "hardhat/File.sol", + ); + + assertResolvedProjectFile(hardhatFile, "hardhat/File.sol"); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "hardhat/console.sol", + target: "npm/hardhat@3.0.0/console.sol", + }, + ]); + }); + + it("Should fail if the package is not installed", async () => { + await assertRejectsWithHardhatError( + resolver.resolveImport( + contractsFileSol, + "uninstalled-package/File.sol", + ), + HardhatError.ERRORS.SOLIDITY + .IMPORTED_NPM_DEPENDENCY_NOT_INSTALLED, + { + from: path.join("contracts", "File.sol"), + importPath: "uninstalled-package/File.sol", + }, + ); + }); + + it("Should fail if the package uses package.json#exports", async () => { + await assertRejectsWithHardhatError( + resolver.resolveImport(contractsFileSol, "exports/File.sol"), + HardhatError.ERRORS.SOLIDITY + .IMPORTED_NPM_DEPENDENCY_THAT_USES_EXPORTS, + { + from: path.join("contracts", "File.sol"), + importPath: "exports/File.sol", + }, + ); + }); + + it("Should validate that the files exist with the right casing", async () => { + await assertRejectsWithHardhatError( + resolver.resolveImport(contractsFileSol, "dependency/nope.sol"), + HardhatError.ERRORS.SOLIDITY.IMPORTED_FILE_DOESNT_EXIST, + { + from: path.join("contracts", "File.sol"), + importPath: "dependency/nope.sol", + }, + ); + + await assertRejectsWithHardhatError( + resolver.resolveImport(contractsFileSol, "dependency/file.sol"), + HardhatError.ERRORS.SOLIDITY.IMPORTED_FILE_WITH_ICORRECT_CASING, + { + from: path.join("contracts", "File.sol"), + importPath: "dependency/file.sol", + correctCasing: "File.sol", + }, + ); + }); + + describe("Of a monorepo file", () => { + it("Should be resolved with npm/package@local/path/from/root", async () => { + const localDependencyFile = await resolver.resolveImport( + contractsFileSol, + "local-dependency/contracts/File.sol", + ); + + assertNpmPackageResolvedFile( + localDependencyFile, + { + name: "local-dependency", + version: "local", + rootSourceName: "npm/local-dependency@local/", + }, + "monorepo/packages/local-dependency", + "contracts/File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "local-dependency/", + target: "npm/local-dependency@local/", + }, + ]); + }); + }); + + describe("Of a direct npm dependency file", () => { + it("Should be resolved with npm/package@version/path/from/root", async () => { + const directDependencyFile = await resolver.resolveImport( + contractsFileSol, + "dependency/contracts/File.sol", + ); + + assertNpmPackageResolvedFile( + directDependencyFile, + { + name: "dependency", + version: "2.0.0", + rootSourceName: "npm/dependency@2.0.0/", + }, + "monorepo/packages/hardhat-project/node_modules/dependency", + "contracts/File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "dependency/", + target: "npm/dependency@2.0.0/", + }, + ]); + }); + }); + + describe("Of a hoisted npm dependency file", () => { + it("Should be resolved with npm/package@version/path/from/root", async () => { + const hoistedDependencyFile = await resolver.resolveImport( + contractsFileSol, + "hoisted/File.sol", + ); + + assertNpmPackageResolvedFile( + hoistedDependencyFile, + { + name: "hoisted", + version: "8.0.0", + rootSourceName: "npm/hoisted@8.0.0/", + }, + "monorepo/node_modules/hoisted", + "File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "hoisted/", + target: "npm/hoisted@8.0.0/", + }, + ]); + }); + }); + + describe("Of a scoped dependency file", () => { + it("Should be resolved with npm/@scope/package@version/path/from/root", async () => { + const scopeDependencyFile = await resolver.resolveImport( + contractsFileSol, + "@scope/dependency/contracts/File.sol", + ); + + assertNpmPackageResolvedFile( + scopeDependencyFile, + { + name: "@scope/dependency", + version: "1.0.0", + rootSourceName: "npm/@scope/dependency@1.0.0/", + }, + "monorepo/node_modules/@scope/dependency", + "contracts/File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "@scope/dependency/", + target: "npm/@scope/dependency@1.0.0/", + }, + ]); + }); + }); + + describe("Of package that's installed with an alternative name", () => { + it("Should be resolved with npm/package@version/path/from/root using the package.json's name", async () => { + const otherNameDependencyFile = await resolver.resolveImport( + contractsFileSol, + "other-name/contracts/File.sol", + ); + + assertNpmPackageResolvedFile( + otherNameDependencyFile, + { + name: "real-name", + version: "6.0.0", + rootSourceName: "npm/real-name@6.0.0/", + }, + "monorepo/packages/hardhat-project/node_modules/other-name", + "contracts/File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "other-name/", + target: "npm/real-name@6.0.0/", + }, + ]); + }); + + it("Should not resolve imports to the real-name if not installed with that name", async () => { + await assertRejectsWithHardhatError( + resolver.resolveImport( + contractsFileSol, + "real-name/contracts/File.sol", + ), + HardhatError.ERRORS.SOLIDITY + .IMPORTED_NPM_DEPENDENCY_NOT_INSTALLED, + { + from: path.join("contracts", "File.sol"), + importPath: "real-name/contracts/File.sol", + }, + ); + }); + }); + }); + }); + + describe("Imports from an npm package", () => { + describe("Imports of the own package files", () => { + describe("Relative imports", () => { + it("Should resolve it without needing a new remapping", async () => { + const dependencyFile = await resolver.resolveImport( + contractsFileSol, + "dependency/contracts/File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "dependency/", + target: "npm/dependency@2.0.0/", + }, + ]); + + const dependencyFileAgain = await resolver.resolveImport( + dependencyFile, + "./File.sol", + ); + + assert.equal(dependencyFile, dependencyFileAgain); + + const dependencyNpmFile = await resolver.resolveImport( + dependencyFile, + "../npm/File.sol", + ); + + assertNpmPackageResolvedFile( + dependencyNpmFile, + { + name: "dependency", + version: "2.0.0", + rootSourceName: "npm/dependency@2.0.0/", + }, + "monorepo/packages/hardhat-project/node_modules/dependency", + "npm/File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "dependency/", + target: "npm/dependency@2.0.0/", + }, + ]); + }); + }); + + describe("Direct imports", () => { + it("Should resolve it and create a new remapping to avoid clashes with the project's source names", async () => { + const dependencyFile = await resolver.resolveImport( + contractsFileSol, + "dependency/contracts/File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "dependency/", + target: "npm/dependency@2.0.0/", + }, + ]); + + const dependencyContractsFileSol = await resolver.resolveImport( + dependencyFile, + "contracts/File.sol", + ); + + assertNpmPackageResolvedFile( + dependencyContractsFileSol, + { + name: "dependency", + version: "2.0.0", + rootSourceName: "npm/dependency@2.0.0/", + }, + "monorepo/packages/hardhat-project/node_modules/dependency", + "contracts/File.sol", + ); + + const dependencyFileSol = await resolver.resolveImport( + dependencyFile, + "File.sol", + ); + + assertNpmPackageResolvedFile( + dependencyFileSol, + { + name: "dependency", + version: "2.0.0", + rootSourceName: "npm/dependency@2.0.0/", + }, + "monorepo/packages/hardhat-project/node_modules/dependency", + "File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "dependency/", + target: "npm/dependency@2.0.0/", + }, + { + context: "npm/dependency@2.0.0/", + prefix: "File.sol", + target: "npm/dependency@2.0.0/File.sol", + }, + { + context: "npm/dependency@2.0.0/", + prefix: "contracts/", + target: "npm/dependency@2.0.0/contracts/", + }, + ]); + }); + }); + }); + + describe("Imports of npm files", () => { + describe("Of a monorepo file", () => { + it("Should be resolved with npm/package@local/path/from/root", async () => { + const dependencyFile = await resolver.resolveImport( + contractsFileSol, + "dependency/contracts/File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "dependency/", + target: "npm/dependency@2.0.0/", + }, + ]); + + const monorepoFile = await resolver.resolveImport( + dependencyFile, + "local-dependency/contracts/File.sol", + ); + + assertNpmPackageResolvedFile( + monorepoFile, + { + name: "local-dependency", + version: "local", + rootSourceName: "npm/local-dependency@local/", + }, + "monorepo/packages/local-dependency", + "contracts/File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "dependency/", + target: "npm/dependency@2.0.0/", + }, + { + context: "npm/dependency@2.0.0/", + prefix: "local-dependency/", + target: "npm/local-dependency@local/", + }, + ]); + }); + }); + + describe("Of a direct npm dependency file", () => { + it("Should be resolved with npm/package@version/path/from/root", async () => { + const dependencyFile = await resolver.resolveImport( + contractsFileSol, + "dependency/contracts/File.sol", + ); + + const dependencyDependencyFile = await resolver.resolveImport( + dependencyFile, + "dependencydependency/File.sol", + ); + + assertNpmPackageResolvedFile( + dependencyDependencyFile, + { + name: "dependencydependency", + version: "7.8.9", + rootSourceName: "npm/dependencydependency@7.8.9/", + }, + "monorepo/packages/hardhat-project/node_modules/dependency/node_modules/dependencydependency", + "File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "dependency/", + target: "npm/dependency@2.0.0/", + }, + { + context: "npm/dependency@2.0.0/", + prefix: "dependencydependency/", + target: "npm/dependencydependency@7.8.9/", + }, + ]); + }); + }); + + describe("Of a file within the hardhat project", () => { + it("Should resolve them to project files with the direct import as sourceName", async () => { + const dependencyFile = await resolver.resolveImport( + contractsFileSol, + "dependency/contracts/File.sol", + ); + + const localFile = await resolver.resolveImport( + dependencyFile, + "hardhat-project/File.sol", + ); + + assertResolvedProjectFile(localFile, "File.sol"); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "dependency/", + target: "npm/dependency@2.0.0/", + }, + { + context: "npm/dependency@2.0.0/", + prefix: "hardhat-project/", + target: "", + }, + ]); + }); + }); + + describe("Of the same dependency than the hardhat project but a different version", () => { + it("Should be resolved with npm/package@version/path/from/root using the package.json's name", async () => { + const dependencyFile = await resolver.resolveImport( + contractsFileSol, + "dependency/contracts/File.sol", + ); + + const localDependencyFile = await resolver.resolveImport( + contractsFileSol, + "local-dependency/contracts/File.sol", + ); + + const localDependencyDependencyFile = + await resolver.resolveImport( + localDependencyFile, + "dependency/contracts/File.sol", + ); + + assert.notEqual(dependencyFile, localDependencyDependencyFile); + + assertNpmPackageResolvedFile( + dependencyFile, + { + name: "dependency", + version: "2.0.0", + rootSourceName: "npm/dependency@2.0.0/", + }, + "monorepo/packages/hardhat-project/node_modules/dependency", + "contracts/File.sol", + ); + + assertNpmPackageResolvedFile( + localDependencyDependencyFile, + { + name: "dependency", + version: "4.0.0", + rootSourceName: "npm/dependency@4.0.0/", + }, + "monorepo/packages/local-dependency/node_modules/dependency", + "contracts/File.sol", + ); + + assert.deepEqual(resolver.getRemappings(), [ + { + context: "", + prefix: "dependency/", + target: "npm/dependency@2.0.0/", + }, + { + context: "", + prefix: "local-dependency/", + target: "npm/local-dependency@local/", + }, + { + context: "npm/local-dependency@local/", + prefix: "dependency/", + target: "npm/dependency@4.0.0/", + }, + ]); + }); + }); + }); + }); + }); + + describe("With user remappings", () => { + describe("Resolver initialization", () => { + it("Should validate forbid remappings with npm/... context", async () => {}); + + it.todo("Should allow remappings with npm/... targets"); + + it.todo( + "Should validate and resolve npm/... targets of npm dependencies", + ); + + it.todo( + "Should validate and resolve npm/... targets of monorepo dependencies", + ); + }); + + describe("Imports from the project", () => { + describe("Imports into project files", () => { + it.todo( + "Should throw if the resulting sourceName would be considered an npm import if used as a direct import", + ); + + it.todo( + "Should validate that the resulting sourceName exists and has the correct casing as a relative path from the project root", + ); + + it.todo("Should resolve it to the remapped sourceName"); + }); + + describe("Imports into npm files", () => { + describe("Using the npm/ prefix", () => { + it.todo( + "Should be equivalent to just importing that file through npm", + ); + }); + }); + }); + + describe("Imports from an npm package", () => { + describe("Direct imports", () => { + it.todo( + "It should not be affected by a user remapping, even if the prefix matches", + ); + }); + }); + }); + + describe("Edge cases", () => { + describe("Duplicated dependency in the monorepo", () => { + it.todo("Should always be resolved to whatever was resolved first"); + }); + }); + }); +}); diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/remappings.ts b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/remappings.ts new file mode 100644 index 0000000000..6caab8d6d0 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/remappings.ts @@ -0,0 +1,212 @@ +import assert from "node:assert/strict"; +import { describe, it } from "node:test"; + +import { + applyValidRemapping, + formatRemapping, + parseRemappingString, + selectBestRemapping, +} from "../../../../../../src/internal/builtin-plugins/solidity/build-system/resolver/remappings.js"; + +describe("Remappings", () => { + describe("parseRemappingString", () => { + it("Should parse valid remappings correctly", () => { + assert.deepEqual(parseRemappingString("a:b=c"), { + context: "a", + prefix: "b", + target: "c", + }); + + assert.deepEqual(parseRemappingString("a:b="), { + context: "a", + prefix: "b", + target: "", + }); + + assert.deepEqual(parseRemappingString("a:/="), { + context: "a", + prefix: "/", + target: "", + }); + + assert.deepEqual(parseRemappingString(":b=c"), { + context: "", + prefix: "b", + target: "c", + }); + + assert.deepEqual(parseRemappingString("b=c"), { + context: "", + prefix: "b", + target: "c", + }); + + assert.deepEqual(parseRemappingString("b="), { + context: "", + prefix: "b", + target: "", + }); + }); + + it("Should return undefined on invalid remappings", () => { + assert.equal(parseRemappingString("a:=c"), undefined); + + assert.equal(parseRemappingString("a:c"), undefined); + + assert.equal(parseRemappingString("a/c"), undefined); + }); + }); + + describe("selectBestRemapping", () => { + describe("Without context", () => { + it("Should select the remapping with the longest matching prefix", () => { + const best = selectBestRemapping("from.sol", "directImport.sol", [ + { context: "", prefix: "from", target: "1" }, + { context: "", prefix: "dir", target: "2" }, + { context: "", prefix: "direct", target: "3" }, + { context: "", prefix: "directImp", target: "4" }, + ]); + + assert.deepEqual(best, { + context: "", + prefix: "directImp", + target: "4", + }); + }); + + it("Should keep the last matching one if there are many", () => { + const best = selectBestRemapping("from.sol", "directImport.sol", [ + { context: "", prefix: "direct", target: "1" }, + { context: "", prefix: "directImp", target: "2" }, + { context: "", prefix: "directImp", target: "3" }, + ]); + + assert.deepEqual(best, { + context: "", + prefix: "directImp", + target: "3", + }); + }); + + it("Should return undefined if there are no matching remappings", () => { + const best = selectBestRemapping("from.sol", "directImport.sol", [ + { context: "", prefix: "a", target: "1" }, + { context: "", prefix: "foo/", target: "2" }, + { context: "", prefix: "/not", target: "3" }, + ]); + + assert.deepEqual(best, undefined); + }); + }); + + describe("With context", () => { + it("Should select the remapping with the longest context whose prefix also matches", () => { + const best = selectBestRemapping("from.sol", "directImport.sol", [ + { context: "", prefix: "d", target: "1" }, + { context: "f", prefix: "d", target: "2" }, + { context: "fr", prefix: "d", target: "3" }, + { context: "fr", prefix: "not", target: "4" }, + { context: "f", prefix: "d", target: "5" }, + ]); + + assert.deepEqual(best, { context: "fr", prefix: "d", target: "3" }); + }); + + it("If multiple match the context with equal length, select the remapping with the longest prefix that matches", () => { + const best = selectBestRemapping("from.sol", "directImport.sol", [ + { context: "fr", prefix: "d", target: "1" }, + { context: "fr", prefix: "di", target: "2" }, + { context: "fr", prefix: "d", target: "3" }, + ]); + + assert.deepEqual(best, { context: "fr", prefix: "di", target: "2" }); + }); + + it("Context should have more priority than prefix", () => { + const best = selectBestRemapping("from.sol", "directImport.sol", [ + { context: "f", prefix: "d", target: "1" }, + { context: "not", prefix: "directImport.sol", target: "2" }, + ]); + + assert.deepEqual(best, { context: "f", prefix: "d", target: "1" }); + }); + + it("If there are multiple candidates pick the lastest one", () => { + const best = selectBestRemapping("from.sol", "directImport.sol", [ + { context: "fr", prefix: "di", target: "1" }, + { context: "fr", prefix: "di", target: "2" }, + { context: "fr", prefix: "di", target: "3" }, + ]); + + assert.deepEqual(best, { context: "fr", prefix: "di", target: "3" }); + }); + + it("If no remapping matches the context, return undefined", () => { + const best = selectBestRemapping("from.sol", "directImport.sol", [ + { context: "no", prefix: "directImport.sol", target: "1" }, + { context: "/", prefix: "di", target: "2" }, + { context: "boo", prefix: "di", target: "3" }, + ]); + + assert.deepEqual(best, undefined); + }); + }); + }); + + describe("Remappings application", () => { + it("Should apply valid remappings (prefix matches) correctly", () => { + assert.equal( + applyValidRemapping("contracts/A.sol", { + context: "", + prefix: "contracts/", + target: "lib/contracts/", + }), + "lib/contracts/A.sol", + ); + + assert.equal( + applyValidRemapping("contracts/A.sol", { + context: "", + prefix: "con", + target: "CON", + }), + "CONtracts/A.sol", + ); + + assert.equal( + applyValidRemapping("contracts/A.sol", { + context: "it-doesnt-matter", + prefix: "contracts/", + target: "", + }), + "A.sol", + ); + }); + }); + + describe("formatRemapping", () => { + it("Should format remappings without context correctly", () => { + assert.equal( + formatRemapping({ context: "", prefix: "a", target: "b" }), + "a=b", + ); + + assert.equal( + formatRemapping({ context: "", prefix: "a", target: "" }), + "a=", + ); + }); + + it("Should format remappings with context correctly", () => { + assert.equal( + formatRemapping({ context: "c", prefix: "a", target: "b" }), + "c:a=b", + ); + + assert.equal( + formatRemapping({ context: "c", prefix: "a", target: "" }), + "c:a=", + ); + }); + }); +}); diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/.gitignore b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/.gitignore new file mode 100644 index 0000000000..cf4bab9ddd --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/.gitignore @@ -0,0 +1 @@ +!node_modules diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/entirely-local/A.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/entirely-local/A.sol new file mode 100644 index 0000000000..98acace5e1 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/entirely-local/A.sol @@ -0,0 +1,2 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.20; diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/@scope/dependency/contracts/File.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/@scope/dependency/contracts/File.sol new file mode 100644 index 0000000000..9936eec1e4 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/@scope/dependency/contracts/File.sol @@ -0,0 +1 @@ +monorepo/node_modules/@scope/dependency/contracts/File.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/@scope/dependency/package.json b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/@scope/dependency/package.json new file mode 100644 index 0000000000..3ddf20908c --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/@scope/dependency/package.json @@ -0,0 +1,4 @@ +{ + "name": "@scope/dependency", + "version": "1.0.0" +} diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/exports/package.json b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/exports/package.json new file mode 100644 index 0000000000..53a33a48a3 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/exports/package.json @@ -0,0 +1,7 @@ +{ + "name": "hardhat", + "version": "3.0.0", + "exports": { + ".": "./fool.js" + } +} diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hardhat-project b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hardhat-project new file mode 120000 index 0000000000..c06af93c62 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hardhat-project @@ -0,0 +1 @@ +../packages/hardhat-project \ No newline at end of file diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hardhat/console.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hardhat/console.sol new file mode 100644 index 0000000000..5d75e6588b --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hardhat/console.sol @@ -0,0 +1 @@ +monorepo/node_modules/hardhat/console.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hardhat/package.json b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hardhat/package.json new file mode 100644 index 0000000000..d1dae395d3 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hardhat/package.json @@ -0,0 +1,4 @@ +{ + "name": "hardhat", + "version": "3.0.0" +} diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hoisted/File.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hoisted/File.sol new file mode 100644 index 0000000000..10436a1183 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hoisted/File.sol @@ -0,0 +1 @@ +monorepo/node_modules/hoisted/File.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hoisted/package.json b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hoisted/package.json new file mode 100644 index 0000000000..165430e591 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/hoisted/package.json @@ -0,0 +1,4 @@ +{ + "name": "hoisted", + "version": "8.0.0" +} diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/local-dependency b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/local-dependency new file mode 120000 index 0000000000..d4325fd94b --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/node_modules/local-dependency @@ -0,0 +1 @@ +../packages/local-dependency \ No newline at end of file diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/File.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/File.sol new file mode 100644 index 0000000000..14558054cf --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/File.sol @@ -0,0 +1 @@ +monorepo/packages/hardhat-project/File.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/contracts/File.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/contracts/File.sol new file mode 100644 index 0000000000..02e27bb10f --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/contracts/File.sol @@ -0,0 +1 @@ +monorepo/packages/hardhat-project/contracts/File.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/contracts/File2.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/contracts/File2.sol new file mode 100644 index 0000000000..8f6c2abf44 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/contracts/File2.sol @@ -0,0 +1 @@ +monorepo/packages/hardhat-project/contracts/File2.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/hardhat/File.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/hardhat/File.sol new file mode 100644 index 0000000000..ac6a04daa7 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/hardhat/File.sol @@ -0,0 +1 @@ +monorepo/packages/hardhat-project/hardhat/File.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/File.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/File.sol new file mode 100644 index 0000000000..ad6c353193 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/File.sol @@ -0,0 +1 @@ +monorepo/packages/hardhat-project/node_modules/dependency/File.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/contracts/File.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/contracts/File.sol new file mode 100644 index 0000000000..04dbfd75eb --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/contracts/File.sol @@ -0,0 +1 @@ +monorepo/packages/hardhat-project/node_modules/dependency/contracts/File.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/node_modules/dependencydependency/File.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/node_modules/dependencydependency/File.sol new file mode 100644 index 0000000000..3bc5538605 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/node_modules/dependencydependency/File.sol @@ -0,0 +1 @@ +monorepo/packages/hardhat-project/node_modules/dependency/node_modules/dependencydependency/File.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/node_modules/dependencydependency/package.json b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/node_modules/dependencydependency/package.json new file mode 100644 index 0000000000..8ff330fb61 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/node_modules/dependencydependency/package.json @@ -0,0 +1,4 @@ +{ + "name": "dependencydependency", + "version": "7.8.9" +} diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/npm/File.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/npm/File.sol new file mode 100644 index 0000000000..8e69251fe1 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/npm/File.sol @@ -0,0 +1 @@ +monorepo/packages/hardhat-project/node_modules/dependency/npm/File.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/package.json b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/package.json new file mode 100644 index 0000000000..5358b31ea7 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/dependency/package.json @@ -0,0 +1,4 @@ +{ + "name": "dependency", + "version": "2.0.0" +} diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/other-name/contracts/File.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/other-name/contracts/File.sol new file mode 100644 index 0000000000..17e3647028 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/other-name/contracts/File.sol @@ -0,0 +1 @@ +monorepo/packages/hardhat-project/node_modules/other-name/contracts/File.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/other-name/package.json b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/other-name/package.json new file mode 100644 index 0000000000..82f52b1a68 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/node_modules/other-name/package.json @@ -0,0 +1,4 @@ +{ + "name": "real-name", + "version": "6.0.0" +} diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/npm/File.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/npm/File.sol new file mode 100644 index 0000000000..57f6c67cb7 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/npm/File.sol @@ -0,0 +1 @@ +monorepo/packages/hardhat-project/npm/File.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/package.json b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/package.json new file mode 100644 index 0000000000..07d0abbf42 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/hardhat-project/package.json @@ -0,0 +1,4 @@ +{ + "name": "hardhat-project", + "version": "3.0.0" +} diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/contracts/File.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/contracts/File.sol new file mode 100644 index 0000000000..31f07aac63 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/contracts/File.sol @@ -0,0 +1 @@ +monorepo/packages/local-dependency/contracts/File.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/node_modules/dependency/contracts/File.sol b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/node_modules/dependency/contracts/File.sol new file mode 100644 index 0000000000..2f2143539b --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/node_modules/dependency/contracts/File.sol @@ -0,0 +1 @@ +monorepo/packages/local-dependency/node_modules/dependency/contracts/File.sol diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/node_modules/dependency/package.json b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/node_modules/dependency/package.json new file mode 100644 index 0000000000..c18c69f376 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/node_modules/dependency/package.json @@ -0,0 +1,4 @@ +{ + "name": "dependency", + "version": "4.0.0" +} diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/package.json b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/package.json new file mode 100644 index 0000000000..46c87489d1 --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/build-system/resolver/test-fixtures/monorepo/packages/local-dependency/package.json @@ -0,0 +1,4 @@ +{ + "name": "local-dependency", + "version": "5.0.0" +} diff --git a/v-next/hardhat/test/internal/builtin-plugins/solidity/config.ts b/v-next/hardhat/test/internal/builtin-plugins/solidity/config.ts new file mode 100644 index 0000000000..9f29b9dd8e --- /dev/null +++ b/v-next/hardhat/test/internal/builtin-plugins/solidity/config.ts @@ -0,0 +1,270 @@ +import assert from "node:assert/strict"; +import { describe, it } from "node:test"; + +import { validateSolidityUserConfig } from "../../../../src/internal/builtin-plugins/solidity/config.js"; + +describe("solidity plugin config validation", () => { + describe("sources paths", () => { + it("Should reject invalid values in `config.paths.sources`", () => { + assert.deepEqual( + validateSolidityUserConfig({ + paths: 123, + }), + [ + { + message: "Expected object, received number", + path: ["paths"], + }, + ], + ); + + assert.deepEqual( + validateSolidityUserConfig({ + paths: { + sources: 123, + }, + }), + [ + { + message: + "Expected a string, an array of strings, or an object with an optional 'solidity' property", + path: ["paths", "sources"], + }, + ], + ); + + assert.deepEqual( + validateSolidityUserConfig({ + paths: { + sources: [], + }, + }), + [ + { + message: "Array must contain at least 1 element(s)", + path: ["paths", "sources"], + }, + ], + ); + + assert.deepEqual( + validateSolidityUserConfig({ + paths: { + sources: { + solidity: 123, + }, + }, + }), + [ + { + message: "Expected a string or an array of strings", + path: ["paths", "sources", "solidity"], + }, + ], + ); + + assert.deepEqual( + validateSolidityUserConfig({ + paths: { + sources: { + solidity: {}, + }, + }, + }), + [ + { + message: "Expected a string or an array of strings", + path: ["paths", "sources", "solidity"], + }, + ], + ); + + assert.deepEqual( + validateSolidityUserConfig({ + paths: { + sources: { + solidity: [], + }, + }, + }), + [ + { + message: "Array must contain at least 1 element(s)", + path: ["paths", "sources", "solidity"], + }, + ], + ); + }); + + it("Should accept valid values in `config.paths.sources`", () => { + assert.deepEqual(validateSolidityUserConfig({}), []); + + assert.deepEqual(validateSolidityUserConfig({ paths: {} }), []); + + assert.deepEqual( + validateSolidityUserConfig({ paths: { sources: "contracts" } }), + [], + ); + + assert.deepEqual( + validateSolidityUserConfig({ paths: { sources: ["contracts"] } }), + [], + ); + + assert.deepEqual( + validateSolidityUserConfig({ paths: { sources: {} } }), + [], + ); + + assert.deepEqual( + validateSolidityUserConfig({ + paths: { + sources: { + solidity: "contracts", + }, + }, + }), + [], + ); + + assert.deepEqual( + validateSolidityUserConfig({ + paths: { + sources: { + solidity: ["contracts"], + }, + }, + }), + [], + ); + }); + }); + + describe("solidity config", () => { + it("Should reject invalid values in `config.solidity`", () => { + assert.deepEqual( + validateSolidityUserConfig({ + solidity: 123, + }), + [ + { + message: + "Expected a version string, an array of version strings, or an object cofiguring one or more versions of Solidity or multiple build profiles", + path: ["solidity"], + }, + ], + ); + + assert.deepEqual( + validateSolidityUserConfig({ + solidity: [], + }), + [ + { + message: "Array must contain at least 1 element(s)", + path: ["solidity"], + }, + ], + ); + + assert.deepEqual( + validateSolidityUserConfig({ + solidity: {}, + }), + [ + { + message: + "Expected a version string, an array of version strings, or an object cofiguring one or more versions of Solidity or multiple build profiles", + path: ["solidity"], + }, + ], + ); + }); + + it("Should reject clashes between Solidity config types", () => { + assert.deepEqual( + validateSolidityUserConfig({ + solidity: { + version: "0.8.0", + compilers: 123, + }, + }), + [ + { + message: "This field is incompatible with `version`", + path: ["solidity", "compilers"], + }, + ], + ); + + assert.deepEqual( + validateSolidityUserConfig({ + solidity: { + version: "0.8.0", + profiles: 123, + }, + }), + [ + { + message: "This field is incompatible with `version`", + path: ["solidity", "profiles"], + }, + ], + ); + + assert.deepEqual( + validateSolidityUserConfig({ + solidity: { + compilers: [ + { + version: "0.8.0", + }, + ], + profiles: 123, + }, + }), + [ + { + message: "This field is incompatible with `compilers`", + path: ["solidity", "profiles"], + }, + ], + ); + }); + + it.todo("Should reject invalid SingleVersionSolidityUserConfig values"); + + it.todo("Should reject invalid MultiVersionSolidityUserConfig values"); + + it.todo("Should reject invalid BuildProfilesSolidityUserConfig values"); + + it("Should accept solidity version strings", () => { + assert.deepEqual(validateSolidityUserConfig({ solidity: "0.8.0" }), []); + }); + + it("Should accept an array of solidity version strings", () => { + assert.deepEqual( + validateSolidityUserConfig({ solidity: ["0.8.0", "0.8.1"] }), + [], + ); + }); + + it.todo("Should accept a SingleVersionSolidityUserConfig value"); + + it.todo("Should accept a MultiVersionSolidityUserConfig value"); + + it.todo("Should accept a BuildProfilesSolidityUserConfig value"); + }); +}); + +describe("solidity plugin config resolution", () => { + it.todo("should resolve a config with a single version string", () => {}); + + it.todo("should resolve a config with multiple version strings", () => {}); + + it.todo("should resolve a SingleVersionSolidityUserConfig value", () => {}); + + it.todo("should resolve a MultiVersionSolidityUserConfig value", () => {}); + + it.todo("should resolve a BuildProfilesSolidityUserConfig value", () => {}); +}); diff --git a/v-next/hardhat/test/internal/cli/main.ts b/v-next/hardhat/test/internal/cli/main.ts index eae70dcbae..ede9eb2676 100644 --- a/v-next/hardhat/test/internal/cli/main.ts +++ b/v-next/hardhat/test/internal/cli/main.ts @@ -223,7 +223,7 @@ Usage: hardhat [GLOBAL OPTIONS] [SUBTASK] [TASK OPTIONS] [--] [TASK ARGUM AVAILABLE TASKS: clean Clears the cache and deletes all artifacts - compile Compiles the entire project, building all artifacts + compile Compiles your project console Opens a hardhat console run Runs a user-defined script after compiling the project task A task that uses arg1 @@ -231,6 +231,7 @@ AVAILABLE TASKS: GLOBAL OPTIONS: + --build-profile The build profile to use --config A Hardhat config file. --help Shows this message, or a task's help if its name is provided. --init Initializes a Hardhat project. diff --git a/v-next/hardhat/test/internal/example-mock-artifacts-plugin-using-test.ts b/v-next/hardhat/test/internal/example-mock-artifacts-plugin-using-test.ts index 05d1a4275a..de01feb3d5 100644 --- a/v-next/hardhat/test/internal/example-mock-artifacts-plugin-using-test.ts +++ b/v-next/hardhat/test/internal/example-mock-artifacts-plugin-using-test.ts @@ -14,7 +14,7 @@ describe("createMockHardhatRuntimeEnvironment", () => { it("should allow plugins that leverage the artifact hre object", async () => { // arrange const exampleArtifact: Artifact = { - _format: "hh-sol-artifact-1", + _format: "hh3-artifact-1", contractName: "MyContract", sourceName: "source.sol", abi: [], @@ -22,6 +22,7 @@ describe("createMockHardhatRuntimeEnvironment", () => { linkReferences: {}, deployedBytecode: "0x", deployedLinkReferences: {}, + immutableReferences: {}, }; const myPlugin: HardhatPlugin = { diff --git a/v-next/hardhat/test/internal/hre-intialization.ts b/v-next/hardhat/test/internal/hre-intialization.ts index 80d5bcd9dd..aff96409c0 100644 --- a/v-next/hardhat/test/internal/hre-intialization.ts +++ b/v-next/hardhat/test/internal/hre-intialization.ts @@ -184,6 +184,7 @@ describe("HRE intialization", () => { const configPath = await getRealPath("hardhat.config.ts"); assert.deepEqual(hre.globalOptions, { + buildProfile: "default", config: configPath, help: false, init: false, diff --git a/v-next/hardhat/test/test-helpers/create-mock-hardhat-runtime-environment.ts b/v-next/hardhat/test/test-helpers/create-mock-hardhat-runtime-environment.ts index c67458e154..1a15a4e5a6 100644 --- a/v-next/hardhat/test/test-helpers/create-mock-hardhat-runtime-environment.ts +++ b/v-next/hardhat/test/test-helpers/create-mock-hardhat-runtime-environment.ts @@ -16,13 +16,15 @@ export async function createMockHardhatRuntimeEnvironment( userProvidedGlobalOptions: Partial = {}, projectRoot?: string, unsafeOptions: UnsafeHardhatRuntimeEnvironmentOptions = {}, -): Promise { +): Promise { + /* eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- + We know that the mockArtifactPlugin sets `hre.artifacts` to `MockArtifactsManager */ return createHardhatRuntimeEnvironment( { ...config, plugins: [mockArtifactsPlugin, ...(config.plugins ?? [])] }, userProvidedGlobalOptions, projectRoot, unsafeOptions, - ); + ) as Promise; } const mockArtifactsPlugin: HardhatPlugin = { diff --git a/v-next/hardhat/test/test-helpers/mock-artifacts-manager.ts b/v-next/hardhat/test/test-helpers/mock-artifacts-manager.ts index dbdc36456b..e5ce0937ab 100644 --- a/v-next/hardhat/test/test-helpers/mock-artifacts-manager.ts +++ b/v-next/hardhat/test/test-helpers/mock-artifacts-manager.ts @@ -2,8 +2,7 @@ import type { ArtifactsManager, Artifact, BuildInfo, - CompilerInput, - CompilerOutput, + GetAtifactByName, } from "../../src/types/artifacts.js"; import { @@ -18,9 +17,9 @@ export class MockArtifactsManager implements ArtifactsManager { this.#artifacts = new Map(); } - public async readArtifact( - contractNameOrFullyQualifiedName: string, - ): Promise { + public async readArtifact( + contractNameOrFullyQualifiedName: ContractNameT, + ): Promise> { const artifact = this.#artifacts.get(contractNameOrFullyQualifiedName); assertHardhatInvariant( @@ -29,7 +28,10 @@ export class MockArtifactsManager implements ArtifactsManager { contractNameOrFullyQualifiedName, ); - return artifact; + /* eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- + We are asserting that the artifact is of the correct type, which won't be + really used during tests. */ + return artifact as GetAtifactByName; } public artifactExists( @@ -76,17 +78,6 @@ export class MockArtifactsManager implements ArtifactsManager { this.#artifacts.set(artifact.contractName, artifact); } - public saveBuildInfo( - _solcVersion: string, - _solcLongVersion: string, - _input: CompilerInput, - _output: CompilerOutput, - ): Promise { - throw new HardhatError(HardhatError.ERRORS.INTERNAL.NOT_IMPLEMENTED_ERROR, { - message: "Not implemented in MockArtifactsManager", - }); - } - public formArtifactPathFromFullyQualifiedName( _fullyQualifiedName: string, ): string { diff --git a/v-next/hardhat/tsconfig.json b/v-next/hardhat/tsconfig.json index 3540c7add4..33039238a0 100644 --- a/v-next/hardhat/tsconfig.json +++ b/v-next/hardhat/tsconfig.json @@ -1,9 +1,6 @@ { "extends": "../../config-v-next/tsconfig.json", "references": [ - { - "path": "../hardhat-build-system" - }, { "path": "../hardhat-errors" },