diff --git a/mock-registry/package.json b/mock-registry/package.json index fa885b2cdb548..5f9598b08e4fb 100644 --- a/mock-registry/package.json +++ b/mock-registry/package.json @@ -56,7 +56,7 @@ "json-stringify-safe": "^5.0.1", "nock": "^13.3.0", "npm-package-arg": "^11.0.0", - "pacote": "^16.0.0", + "pacote": "^17.0.0", "tap": "^16.3.4" } } diff --git a/node_modules/.gitignore b/node_modules/.gitignore index c71880acbc340..7bc0a96e9f6ef 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -35,8 +35,15 @@ !/@npmcli/metavuln-calculator !/@npmcli/metavuln-calculator/node_modules/ /@npmcli/metavuln-calculator/node_modules/* +!/@npmcli/metavuln-calculator/node_modules/@npmcli/ +/@npmcli/metavuln-calculator/node_modules/@npmcli/* +!/@npmcli/metavuln-calculator/node_modules/@npmcli/git !/@npmcli/metavuln-calculator/node_modules/cacache +!/@npmcli/metavuln-calculator/node_modules/hosted-git-info !/@npmcli/metavuln-calculator/node_modules/lru-cache +!/@npmcli/metavuln-calculator/node_modules/npm-package-arg +!/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest +!/@npmcli/metavuln-calculator/node_modules/pacote !/@npmcli/name-from-folder !/@npmcli/node-gyp !/@npmcli/package-json @@ -245,14 +252,14 @@ !/pacote !/pacote/node_modules/ /pacote/node_modules/* -!/pacote/node_modules/@npmcli/ -/pacote/node_modules/@npmcli/* -!/pacote/node_modules/@npmcli/git -!/pacote/node_modules/cacache -!/pacote/node_modules/hosted-git-info !/pacote/node_modules/lru-cache -!/pacote/node_modules/npm-package-arg +!/pacote/node_modules/normalize-package-data !/pacote/node_modules/npm-pick-manifest +!/pacote/node_modules/npm-pick-manifest/node_modules/ +/pacote/node_modules/npm-pick-manifest/node_modules/* +!/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info +!/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg +!/pacote/node_modules/read-package-json !/parse-conflict-json !/path-is-absolute !/path-key diff --git a/node_modules/pacote/node_modules/@npmcli/git/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/LICENSE similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/LICENSE diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/clone.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/clone.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/lib/clone.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/clone.js diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/errors.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/errors.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/lib/errors.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/errors.js diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/find.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/find.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/lib/find.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/find.js diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/index.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/lib/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/index.js diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is-clean.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is-clean.js diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/is.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/lib/is.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/is.js diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/lines-to-revs.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/lines-to-revs.js diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/make-error.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/make-error.js diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/opts.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/opts.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/lib/opts.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/opts.js diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/revs.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/revs.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/lib/revs.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/revs.js diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/spawn.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/spawn.js diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/utils.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/utils.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/lib/utils.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/utils.js diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/which.js b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/which.js similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/lib/which.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/lib/which.js diff --git a/node_modules/pacote/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/package.json similarity index 100% rename from node_modules/pacote/node_modules/@npmcli/git/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git/package.json diff --git a/node_modules/pacote/node_modules/hosted-git-info/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/LICENSE similarity index 100% rename from node_modules/pacote/node_modules/hosted-git-info/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/LICENSE diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/from-url.js similarity index 100% rename from node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/from-url.js diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/hosts.js similarity index 100% rename from node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/hosts.js diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/index.js similarity index 100% rename from node_modules/pacote/node_modules/hosted-git-info/lib/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/index.js diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/parse-url.js similarity index 100% rename from node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/lib/parse-url.js diff --git a/node_modules/pacote/node_modules/hosted-git-info/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/package.json similarity index 100% rename from node_modules/pacote/node_modules/hosted-git-info/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info/package.json diff --git a/node_modules/pacote/node_modules/npm-package-arg/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/LICENSE similarity index 100% rename from node_modules/pacote/node_modules/npm-package-arg/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/LICENSE diff --git a/node_modules/pacote/node_modules/npm-package-arg/lib/npa.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/lib/npa.js similarity index 100% rename from node_modules/pacote/node_modules/npm-package-arg/lib/npa.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/lib/npa.js diff --git a/node_modules/pacote/node_modules/npm-package-arg/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/package.json similarity index 100% rename from node_modules/pacote/node_modules/npm-package-arg/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg/package.json diff --git a/node_modules/pacote/node_modules/cacache/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/LICENSE.md similarity index 100% rename from node_modules/pacote/node_modules/cacache/LICENSE.md rename to node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/LICENSE.md diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/lib/index.js new file mode 100644 index 0000000000000..8dbd2721c8996 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/lib/index.js @@ -0,0 +1,218 @@ +'use strict' + +const npa = require('npm-package-arg') +const semver = require('semver') +const { checkEngine } = require('npm-install-checks') +const normalizeBin = require('npm-normalize-package-bin') + +const engineOk = (manifest, npmVersion, nodeVersion) => { + try { + checkEngine(manifest, npmVersion, nodeVersion) + return true + } catch (_) { + return false + } +} + +const isBefore = (verTimes, ver, time) => + !verTimes || !verTimes[ver] || Date.parse(verTimes[ver]) <= time + +const avoidSemverOpt = { includePrerelease: true, loose: true } +const shouldAvoid = (ver, avoid) => + avoid && semver.satisfies(ver, avoid, avoidSemverOpt) + +const decorateAvoid = (result, avoid) => + result && shouldAvoid(result.version, avoid) + ? { ...result, _shouldAvoid: true } + : result + +const pickManifest = (packument, wanted, opts) => { + const { + defaultTag = 'latest', + before = null, + nodeVersion = process.version, + npmVersion = null, + includeStaged = false, + avoid = null, + avoidStrict = false, + } = opts + + const { name, time: verTimes } = packument + const versions = packument.versions || {} + + if (avoidStrict) { + const looseOpts = { + ...opts, + avoidStrict: false, + } + + const result = pickManifest(packument, wanted, looseOpts) + if (!result || !result._shouldAvoid) { + return result + } + + const caret = pickManifest(packument, `^${result.version}`, looseOpts) + if (!caret || !caret._shouldAvoid) { + return { + ...caret, + _outsideDependencyRange: true, + _isSemVerMajor: false, + } + } + + const star = pickManifest(packument, '*', looseOpts) + if (!star || !star._shouldAvoid) { + return { + ...star, + _outsideDependencyRange: true, + _isSemVerMajor: true, + } + } + + throw Object.assign(new Error(`No avoidable versions for ${name}`), { + code: 'ETARGET', + name, + wanted, + avoid, + before, + versions: Object.keys(versions), + }) + } + + const staged = (includeStaged && packument.stagedVersions && + packument.stagedVersions.versions) || {} + const restricted = (packument.policyRestrictions && + packument.policyRestrictions.versions) || {} + + const time = before && verTimes ? +(new Date(before)) : Infinity + const spec = npa.resolve(name, wanted || defaultTag) + const type = spec.type + const distTags = packument['dist-tags'] || {} + + if (type !== 'tag' && type !== 'version' && type !== 'range') { + throw new Error('Only tag, version, and range are supported') + } + + // if the type is 'tag', and not just the implicit default, then it must + // be that exactly, or nothing else will do. + if (wanted && type === 'tag') { + const ver = distTags[wanted] + // if the version in the dist-tags is before the before date, then + // we use that. Otherwise, we get the highest precedence version + // prior to the dist-tag. + if (isBefore(verTimes, ver, time)) { + return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid) + } else { + return pickManifest(packument, `<=${ver}`, opts) + } + } + + // similarly, if a specific version, then only that version will do + if (wanted && type === 'version') { + const ver = semver.clean(wanted, { loose: true }) + const mani = versions[ver] || staged[ver] || restricted[ver] + return isBefore(verTimes, ver, time) ? decorateAvoid(mani, avoid) : null + } + + // ok, sort based on our heuristics, and pick the best fit + const range = type === 'range' ? wanted : '*' + + // if the range is *, then we prefer the 'latest' if available + // but skip this if it should be avoided, in that case we have + // to try a little harder. + const defaultVer = distTags[defaultTag] + if (defaultVer && + (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) && + !shouldAvoid(defaultVer, avoid)) { + const mani = versions[defaultVer] + if (mani && isBefore(verTimes, defaultVer, time)) { + return mani + } + } + + // ok, actually have to sort the list and take the winner + const allEntries = Object.entries(versions) + .concat(Object.entries(staged)) + .concat(Object.entries(restricted)) + .filter(([ver, mani]) => isBefore(verTimes, ver, time)) + + if (!allEntries.length) { + throw Object.assign(new Error(`No versions available for ${name}`), { + code: 'ENOVERSIONS', + name, + type, + wanted, + before, + versions: Object.keys(versions), + }) + } + + const sortSemverOpt = { loose: true } + const entries = allEntries.filter(([ver, mani]) => + semver.satisfies(ver, range, { loose: true })) + .sort((a, b) => { + const [vera, mania] = a + const [verb, manib] = b + const notavoida = !shouldAvoid(vera, avoid) + const notavoidb = !shouldAvoid(verb, avoid) + const notrestra = !restricted[a] + const notrestrb = !restricted[b] + const notstagea = !staged[a] + const notstageb = !staged[b] + const notdepra = !mania.deprecated + const notdeprb = !manib.deprecated + const enginea = engineOk(mania, npmVersion, nodeVersion) + const engineb = engineOk(manib, npmVersion, nodeVersion) + // sort by: + // - not an avoided version + // - not restricted + // - not staged + // - not deprecated and engine ok + // - engine ok + // - not deprecated + // - semver + return (notavoidb - notavoida) || + (notrestrb - notrestra) || + (notstageb - notstagea) || + ((notdeprb && engineb) - (notdepra && enginea)) || + (engineb - enginea) || + (notdeprb - notdepra) || + semver.rcompare(vera, verb, sortSemverOpt) + }) + + return decorateAvoid(entries[0] && entries[0][1], avoid) +} + +module.exports = (packument, wanted, opts = {}) => { + const mani = pickManifest(packument, wanted, opts) + const picked = mani && normalizeBin(mani) + const policyRestrictions = packument.policyRestrictions + const restricted = (policyRestrictions && policyRestrictions.versions) || {} + + if (picked && !restricted[picked.version]) { + return picked + } + + const { before = null, defaultTag = 'latest' } = opts + const bstr = before ? new Date(before).toLocaleString() : '' + const { name } = packument + const pckg = `${name}@${wanted}` + + (before ? ` with a date before ${bstr}` : '') + + const isForbidden = picked && !!restricted[picked.version] + const polMsg = isForbidden ? policyRestrictions.message : '' + + const msg = !isForbidden ? `No matching version found for ${pckg}.` + : `Could not download ${pckg} due to policy violations:\n${polMsg}` + + const code = isForbidden ? 'E403' : 'ETARGET' + throw Object.assign(new Error(msg), { + code, + type: npa.resolve(packument.name, wanted).type, + wanted, + versions: Object.keys(packument.versions ?? {}), + name, + distTags: packument['dist-tags'], + defaultTag, + }) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/package.json new file mode 100644 index 0000000000000..feff81f5b2fee --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest/package.json @@ -0,0 +1,57 @@ +{ + "name": "npm-pick-manifest", + "version": "8.0.2", + "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.", + "main": "./lib", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "coverage": "tap", + "lint": "eslint \"**/*.js\"", + "test": "tap", + "posttest": "npm run lint", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/npm-pick-manifest.git" + }, + "keywords": [ + "npm", + "semver", + "package manager" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "npm-install-checks": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0", + "npm-package-arg": "^10.0.0", + "semver": "^7.3.5" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "tap": "^16.0.1" + }, + "tap": { + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.18.0", + "publish": true + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE new file mode 100644 index 0000000000000..a03cd0ed0b338 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter, Kat Marchán, npm, Inc., and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js new file mode 100755 index 0000000000000..f35b62ca71a53 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/bin.js @@ -0,0 +1,158 @@ +#!/usr/bin/env node + +const run = conf => { + const pacote = require('../') + switch (conf._[0]) { + case 'resolve': + case 'manifest': + case 'packument': + if (conf._[0] === 'resolve' && conf.long) { + return pacote.manifest(conf._[1], conf).then(mani => ({ + resolved: mani._resolved, + integrity: mani._integrity, + from: mani._from, + })) + } + return pacote[conf._[0]](conf._[1], conf) + + case 'tarball': + if (!conf._[2] || conf._[2] === '-') { + return pacote.tarball.stream(conf._[1], stream => { + stream.pipe( + conf.testStdout || + /* istanbul ignore next */ + process.stdout + ) + // make sure it resolves something falsey + return stream.promise().then(() => { + return false + }) + }, conf) + } else { + return pacote.tarball.file(conf._[1], conf._[2], conf) + } + + case 'extract': + return pacote.extract(conf._[1], conf._[2], conf) + + default: /* istanbul ignore next */ { + throw new Error(`bad command: ${conf._[0]}`) + } + } +} + +const version = require('../package.json').version +const usage = () => +`Pacote - The JavaScript Package Handler, v${version} + +Usage: + + pacote resolve + Resolve a specifier and output the fully resolved target + Returns integrity and from if '--long' flag is set. + + pacote manifest + Fetch a manifest and print to stdout + + pacote packument + Fetch a full packument and print to stdout + + pacote tarball [] + Fetch a package tarball and save to + If is missing or '-', the tarball will be streamed to stdout. + + pacote extract + Extract a package to the destination folder. + +Configuration values all match the names of configs passed to npm, or +options passed to Pacote. Additional flags for this executable: + + --long Print an object from 'resolve', including integrity and spec. + --json Print result objects as JSON rather than node's default. + (This is the default if stdout is not a TTY.) + --help -h Print this helpful text. + +For example '--cache=/path/to/folder' will use that folder as the cache. +` + +const shouldJSON = (conf, result) => + conf.json || + !process.stdout.isTTY && + conf.json === undefined && + result && + typeof result === 'object' + +const pretty = (conf, result) => + shouldJSON(conf, result) ? JSON.stringify(result, 0, 2) : result + +let addedLogListener = false +const main = args => { + const conf = parse(args) + if (conf.help || conf.h) { + return console.log(usage()) + } + + if (!addedLogListener) { + process.on('log', console.error) + addedLogListener = true + } + + try { + return run(conf) + .then(result => result && console.log(pretty(conf, result))) + .catch(er => { + console.error(er) + process.exit(1) + }) + } catch (er) { + console.error(er.message) + console.error(usage()) + } +} + +const parseArg = arg => { + const split = arg.slice(2).split('=') + const k = split.shift() + const v = split.join('=') + const no = /^no-/.test(k) && !v + const key = (no ? k.slice(3) : k) + .replace(/^tag$/, 'defaultTag') + .replace(/-([a-z])/g, (_, c) => c.toUpperCase()) + const value = v ? v.replace(/^~/, process.env.HOME) : !no + return { key, value } +} + +const parse = args => { + const conf = { + _: [], + cache: process.env.HOME + '/.npm/_cacache', + } + let dashdash = false + args.forEach(arg => { + if (dashdash) { + conf._.push(arg) + } else if (arg === '--') { + dashdash = true + } else if (arg === '-h') { + conf.help = true + } else if (/^--/.test(arg)) { + const { key, value } = parseArg(arg) + conf[key] = value + } else { + conf._.push(arg) + } + }) + return conf +} + +if (module === require.main) { + main(process.argv.slice(2)) +} else { + module.exports = { + main, + run, + usage, + parseArg, + parse, + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js new file mode 100644 index 0000000000000..420afc5802cb2 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/dir.js @@ -0,0 +1,108 @@ +const Fetcher = require('./fetcher.js') +const FileFetcher = require('./file.js') +const { Minipass } = require('minipass') +const tarCreateOptions = require('./util/tar-create-options.js') +const packlist = require('npm-packlist') +const tar = require('tar') +const _prepareDir = Symbol('_prepareDir') +const { resolve } = require('path') +const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson') + +const runScript = require('@npmcli/run-script') + +const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') +class DirFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + // just the fully resolved filename + this.resolved = this.spec.fetchSpec + + this.tree = opts.tree || null + this.Arborist = opts.Arborist || null + } + + // exposes tarCreateOptions as public API + static tarCreateOptions (manifest) { + return tarCreateOptions(manifest) + } + + get types () { + return ['directory'] + } + + [_prepareDir] () { + return this.manifest().then(mani => { + if (!mani.scripts || !mani.scripts.prepare) { + return + } + + // we *only* run prepare. + // pre/post-pack is run by the npm CLI for publish and pack, + // but this function is *also* run when installing git deps + const stdio = this.opts.foregroundScripts ? 'inherit' : 'pipe' + + // hide the banner if silent opt is passed in, or if prepare running + // in the background. + const banner = this.opts.silent ? false : stdio === 'inherit' + + return runScript({ + pkg: mani, + event: 'prepare', + path: this.resolved, + stdio, + banner, + env: { + npm_package_resolved: this.resolved, + npm_package_integrity: this.integrity, + npm_package_json: resolve(this.resolved, 'package.json'), + }, + }) + }) + } + + [_tarballFromResolved] () { + if (!this.tree && !this.Arborist) { + throw new Error('DirFetcher requires either a tree or an Arborist constructor to pack') + } + + const stream = new Minipass() + stream.resolved = this.resolved + stream.integrity = this.integrity + + const { prefix, workspaces } = this.opts + + // run the prepare script, get the list of files, and tar it up + // pipe to the stream, and proxy errors the chain. + this[_prepareDir]() + .then(async () => { + if (!this.tree) { + const arb = new this.Arborist({ path: this.resolved }) + this.tree = await arb.loadActual() + } + return packlist(this.tree, { path: this.resolved, prefix, workspaces }) + }) + .then(files => tar.c(tarCreateOptions(this.package), files) + .on('error', er => stream.emit('error', er)).pipe(stream)) + .catch(er => stream.emit('error', er)) + return stream + } + + manifest () { + if (this.package) { + return Promise.resolve(this.package) + } + + return this[_readPackageJson](this.resolved + '/package.json') + .then(mani => this.package = { + ...mani, + _integrity: this.integrity && String(this.integrity), + _resolved: this.resolved, + _from: this.from, + }) + } + + packument () { + return FileFetcher.prototype.packument.apply(this) + } +} +module.exports = DirFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js new file mode 100644 index 0000000000000..f961a45c7d346 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/fetcher.js @@ -0,0 +1,505 @@ +// This is the base class that the other fetcher types in lib +// all descend from. +// It handles the unpacking and retry logic that is shared among +// all of the other Fetcher types. + +const npa = require('npm-package-arg') +const ssri = require('ssri') +const { promisify } = require('util') +const { basename, dirname } = require('path') +const tar = require('tar') +const log = require('proc-log') +const retry = require('promise-retry') +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const cacache = require('cacache') +const isPackageBin = require('./util/is-package-bin.js') +const removeTrailingSlashes = require('./util/trailing-slashes.js') +const getContents = require('@npmcli/installed-package-contents') +const readPackageJsonFast = require('read-package-json-fast') +const readPackageJson = promisify(require('read-package-json')) +const { Minipass } = require('minipass') + +const cacheDir = require('./util/cache-dir.js') + +// Private methods. +// Child classes should not have to override these. +// Users should never call them. +const _extract = Symbol('_extract') +const _mkdir = Symbol('_mkdir') +const _empty = Symbol('_empty') +const _toFile = Symbol('_toFile') +const _tarxOptions = Symbol('_tarxOptions') +const _entryMode = Symbol('_entryMode') +const _istream = Symbol('_istream') +const _assertType = Symbol('_assertType') +const _tarballFromCache = Symbol('_tarballFromCache') +const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') +const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches') +const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson') + +class FetcherBase { + constructor (spec, opts) { + if (!opts || typeof opts !== 'object') { + throw new TypeError('options object is required') + } + this.spec = npa(spec, opts.where) + + this.allowGitIgnore = !!opts.allowGitIgnore + + // a bit redundant because presumably the caller already knows this, + // but it makes it easier to not have to keep track of the requested + // spec when we're dispatching thousands of these at once, and normalizing + // is nice. saveSpec is preferred if set, because it turns stuff like + // x/y#committish into github:x/y#committish. use name@rawSpec for + // registry deps so that we turn xyz and xyz@ -> xyz@ + this.from = this.spec.registry + ? `${this.spec.name}@${this.spec.rawSpec}` : this.spec.saveSpec + + this[_assertType]() + // clone the opts object so that others aren't upset when we mutate it + // by adding/modifying the integrity value. + this.opts = { ...opts } + + this.cache = opts.cache || cacheDir().cacache + this.tufCache = opts.tufCache || cacheDir().tufcache + this.resolved = opts.resolved || null + + // default to caching/verifying with sha512, that's what we usually have + // need to change this default, or start overriding it, when sha512 + // is no longer strong enough. + this.defaultIntegrityAlgorithm = opts.defaultIntegrityAlgorithm || 'sha512' + + if (typeof opts.integrity === 'string') { + this.opts.integrity = ssri.parse(opts.integrity) + } + + this.package = null + this.type = this.constructor.name + this.fmode = opts.fmode || 0o666 + this.dmode = opts.dmode || 0o777 + // we don't need a default umask, because we don't chmod files coming + // out of package tarballs. they're forced to have a mode that is + // valid, regardless of what's in the tarball entry, and then we let + // the process's umask setting do its job. but if configured, we do + // respect it. + this.umask = opts.umask || 0 + + this.preferOnline = !!opts.preferOnline + this.preferOffline = !!opts.preferOffline + this.offline = !!opts.offline + + this.before = opts.before + this.fullMetadata = this.before ? true : !!opts.fullMetadata + this.fullReadJson = !!opts.fullReadJson + if (this.fullReadJson) { + this[_readPackageJson] = readPackageJson + } else { + this[_readPackageJson] = readPackageJsonFast + } + + // rrh is a registry hostname or 'never' or 'always' + // defaults to registry.npmjs.org + this.replaceRegistryHost = (!opts.replaceRegistryHost || opts.replaceRegistryHost === 'npmjs') ? + 'registry.npmjs.org' : opts.replaceRegistryHost + + this.defaultTag = opts.defaultTag || 'latest' + this.registry = removeTrailingSlashes(opts.registry || 'https://registry.npmjs.org') + + // command to run 'prepare' scripts on directories and git dirs + // To use pacote with yarn, for example, set npmBin to 'yarn' + // and npmCliConfig with yarn's equivalents. + this.npmBin = opts.npmBin || 'npm' + + // command to install deps for preparing + this.npmInstallCmd = opts.npmInstallCmd || ['install', '--force'] + + // XXX fill more of this in based on what we know from this.opts + // we explicitly DO NOT fill in --tag, though, since we are often + // going to be packing in the context of a publish, which may set + // a dist-tag, but certainly wants to keep defaulting to latest. + this.npmCliConfig = opts.npmCliConfig || [ + `--cache=${dirname(this.cache)}`, + `--prefer-offline=${!!this.preferOffline}`, + `--prefer-online=${!!this.preferOnline}`, + `--offline=${!!this.offline}`, + ...(this.before ? [`--before=${this.before.toISOString()}`] : []), + '--no-progress', + '--no-save', + '--no-audit', + // override any omit settings from the environment + '--include=dev', + '--include=peer', + '--include=optional', + // we need the actual things, not just the lockfile + '--no-package-lock-only', + '--no-dry-run', + ] + } + + get integrity () { + return this.opts.integrity || null + } + + set integrity (i) { + if (!i) { + return + } + + i = ssri.parse(i) + const current = this.opts.integrity + + // do not ever update an existing hash value, but do + // merge in NEW algos and hashes that we don't already have. + if (current) { + current.merge(i) + } else { + this.opts.integrity = i + } + } + + get notImplementedError () { + return new Error('not implemented in this fetcher type: ' + this.type) + } + + // override in child classes + // Returns a Promise that resolves to this.resolved string value + resolve () { + return this.resolved ? Promise.resolve(this.resolved) + : Promise.reject(this.notImplementedError) + } + + packument () { + return Promise.reject(this.notImplementedError) + } + + // override in child class + // returns a manifest containing: + // - name + // - version + // - _resolved + // - _integrity + // - plus whatever else was in there (corgi, full metadata, or pj file) + manifest () { + return Promise.reject(this.notImplementedError) + } + + // private, should be overridden. + // Note that they should *not* calculate or check integrity or cache, + // but *just* return the raw tarball data stream. + [_tarballFromResolved] () { + throw this.notImplementedError + } + + // public, should not be overridden + tarball () { + return this.tarballStream(stream => stream.concat().then(data => { + data.integrity = this.integrity && String(this.integrity) + data.resolved = this.resolved + data.from = this.from + return data + })) + } + + // private + // Note: cacache will raise a EINTEGRITY error if the integrity doesn't match + [_tarballFromCache] () { + return cacache.get.stream.byDigest(this.cache, this.integrity, this.opts) + } + + get [_cacheFetches] () { + return true + } + + [_istream] (stream) { + // if not caching this, just return it + if (!this.opts.cache || !this[_cacheFetches]) { + // instead of creating a new integrity stream, we only piggyback on the + // provided stream's events + if (stream.hasIntegrityEmitter) { + stream.on('integrity', i => this.integrity = i) + return stream + } + + const istream = ssri.integrityStream(this.opts) + istream.on('integrity', i => this.integrity = i) + stream.on('error', err => istream.emit('error', err)) + return stream.pipe(istream) + } + + // we have to return a stream that gets ALL the data, and proxies errors, + // but then pipe from the original tarball stream into the cache as well. + // To do this without losing any data, and since the cacache put stream + // is not a passthrough, we have to pipe from the original stream into + // the cache AFTER we pipe into the middleStream. Since the cache stream + // has an asynchronous flush to write its contents to disk, we need to + // defer the middleStream end until the cache stream ends. + const middleStream = new Minipass() + stream.on('error', err => middleStream.emit('error', err)) + stream.pipe(middleStream, { end: false }) + const cstream = cacache.put.stream( + this.opts.cache, + `pacote:tarball:${this.from}`, + this.opts + ) + cstream.on('integrity', i => this.integrity = i) + cstream.on('error', err => stream.emit('error', err)) + stream.pipe(cstream) + + // eslint-disable-next-line promise/catch-or-return + cstream.promise().catch(() => {}).then(() => middleStream.end()) + return middleStream + } + + pickIntegrityAlgorithm () { + return this.integrity ? this.integrity.pickAlgorithm(this.opts) + : this.defaultIntegrityAlgorithm + } + + // TODO: check error class, once those are rolled out to our deps + isDataCorruptionError (er) { + return er.code === 'EINTEGRITY' || er.code === 'Z_DATA_ERROR' + } + + // override the types getter + get types () { + return false + } + + [_assertType] () { + if (this.types && !this.types.includes(this.spec.type)) { + throw new TypeError(`Wrong spec type (${ + this.spec.type + }) for ${ + this.constructor.name + }. Supported types: ${this.types.join(', ')}`) + } + } + + // We allow ENOENTs from cacache, but not anywhere else. + // An ENOENT trying to read a tgz file, for example, is Right Out. + isRetriableError (er) { + // TODO: check error class, once those are rolled out to our deps + return this.isDataCorruptionError(er) || + er.code === 'ENOENT' || + er.code === 'EISDIR' + } + + // Mostly internal, but has some uses + // Pass in a function which returns a promise + // Function will be called 1 or more times with streams that may fail. + // Retries: + // Function MUST handle errors on the stream by rejecting the promise, + // so that retry logic can pick it up and either retry or fail whatever + // promise it was making (ie, failing extraction, etc.) + // + // The return value of this method is a Promise that resolves the same + // as whatever the streamHandler resolves to. + // + // This should never be overridden by child classes, but it is public. + tarballStream (streamHandler) { + // Only short-circuit via cache if we have everything else we'll need, + // and the user has not expressed a preference for checking online. + + const fromCache = ( + !this.preferOnline && + this.integrity && + this.resolved + ) ? streamHandler(this[_tarballFromCache]()).catch(er => { + if (this.isDataCorruptionError(er)) { + log.warn('tarball', `cached data for ${ + this.spec + } (${this.integrity}) seems to be corrupted. Refreshing cache.`) + return this.cleanupCached().then(() => { + throw er + }) + } else { + throw er + } + }) : null + + const fromResolved = er => { + if (er) { + if (!this.isRetriableError(er)) { + throw er + } + log.silly('tarball', `no local data for ${ + this.spec + }. Extracting by manifest.`) + } + return this.resolve().then(() => retry(tryAgain => + streamHandler(this[_istream](this[_tarballFromResolved]())) + .catch(streamErr => { + // Most likely data integrity. A cache ENOENT error is unlikely + // here, since we're definitely not reading from the cache, but it + // IS possible that the fetch subsystem accessed the cache, and the + // entry got blown away or something. Try one more time to be sure. + if (this.isRetriableError(streamErr)) { + log.warn('tarball', `tarball data for ${ + this.spec + } (${this.integrity}) seems to be corrupted. Trying again.`) + return this.cleanupCached().then(() => tryAgain(streamErr)) + } + throw streamErr + }), { retries: 1, minTimeout: 0, maxTimeout: 0 })) + } + + return fromCache ? fromCache.catch(fromResolved) : fromResolved() + } + + cleanupCached () { + return cacache.rm.content(this.cache, this.integrity, this.opts) + } + + [_empty] (path) { + return getContents({ path, depth: 1 }).then(contents => Promise.all( + contents.map(entry => fs.rm(entry, { recursive: true, force: true })))) + } + + async [_mkdir] (dest) { + await this[_empty](dest) + return await fs.mkdir(dest, { recursive: true }) + } + + // extraction is always the same. the only difference is where + // the tarball comes from. + async extract (dest) { + await this[_mkdir](dest) + return this.tarballStream((tarball) => this[_extract](dest, tarball)) + } + + [_toFile] (dest) { + return this.tarballStream(str => new Promise((res, rej) => { + const writer = new fsm.WriteStream(dest) + str.on('error', er => writer.emit('error', er)) + writer.on('error', er => rej(er)) + writer.on('close', () => res({ + integrity: this.integrity && String(this.integrity), + resolved: this.resolved, + from: this.from, + })) + str.pipe(writer) + })) + } + + // don't use this[_mkdir] because we don't want to rimraf anything + async tarballFile (dest) { + const dir = dirname(dest) + await fs.mkdir(dir, { recursive: true }) + return this[_toFile](dest) + } + + [_extract] (dest, tarball) { + const extractor = tar.x(this[_tarxOptions]({ cwd: dest })) + const p = new Promise((resolve, reject) => { + extractor.on('end', () => { + resolve({ + resolved: this.resolved, + integrity: this.integrity && String(this.integrity), + from: this.from, + }) + }) + + extractor.on('error', er => { + log.warn('tar', er.message) + log.silly('tar', er) + reject(er) + }) + + tarball.on('error', er => reject(er)) + }) + + tarball.pipe(extractor) + return p + } + + // always ensure that entries are at least as permissive as our configured + // dmode/fmode, but never more permissive than the umask allows. + [_entryMode] (path, mode, type) { + const m = /Directory|GNUDumpDir/.test(type) ? this.dmode + : /File$/.test(type) ? this.fmode + : /* istanbul ignore next - should never happen in a pkg */ 0 + + // make sure package bins are executable + const exe = isPackageBin(this.package, path) ? 0o111 : 0 + // always ensure that files are read/writable by the owner + return ((mode | m) & ~this.umask) | exe | 0o600 + } + + [_tarxOptions] ({ cwd }) { + const sawIgnores = new Set() + return { + cwd, + noChmod: true, + noMtime: true, + filter: (name, entry) => { + if (/Link$/.test(entry.type)) { + return false + } + entry.mode = this[_entryMode](entry.path, entry.mode, entry.type) + // this replicates the npm pack behavior where .gitignore files + // are treated like .npmignore files, but only if a .npmignore + // file is not present. + if (/File$/.test(entry.type)) { + const base = basename(entry.path) + if (base === '.npmignore') { + sawIgnores.add(entry.path) + } else if (base === '.gitignore' && !this.allowGitIgnore) { + // rename, but only if there's not already a .npmignore + const ni = entry.path.replace(/\.gitignore$/, '.npmignore') + if (sawIgnores.has(ni)) { + return false + } + entry.path = ni + } + return true + } + }, + strip: 1, + onwarn: /* istanbul ignore next - we can trust that tar logs */ + (code, msg, data) => { + log.warn('tar', code, msg) + log.silly('tar', code, msg, data) + }, + umask: this.umask, + // always ignore ownership info from tarball metadata + preserveOwner: false, + } + } +} + +module.exports = FetcherBase + +// Child classes +const GitFetcher = require('./git.js') +const RegistryFetcher = require('./registry.js') +const FileFetcher = require('./file.js') +const DirFetcher = require('./dir.js') +const RemoteFetcher = require('./remote.js') + +// Get an appropriate fetcher object from a spec and options +FetcherBase.get = (rawSpec, opts = {}) => { + const spec = npa(rawSpec, opts.where) + switch (spec.type) { + case 'git': + return new GitFetcher(spec, opts) + + case 'remote': + return new RemoteFetcher(spec, opts) + + case 'version': + case 'range': + case 'tag': + case 'alias': + return new RegistryFetcher(spec.subSpec || spec, opts) + + case 'file': + return new FileFetcher(spec, opts) + + case 'directory': + return new DirFetcher(spec, opts) + + default: + throw new TypeError('Unknown spec type: ' + spec.type) + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js new file mode 100644 index 0000000000000..bf99bb86e359e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/file.js @@ -0,0 +1,96 @@ +const Fetcher = require('./fetcher.js') +const fsm = require('fs-minipass') +const cacache = require('cacache') +const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') +const _exeBins = Symbol('_exeBins') +const { resolve } = require('path') +const fs = require('fs') +const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson') + +class FileFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + // just the fully resolved filename + this.resolved = this.spec.fetchSpec + } + + get types () { + return ['file'] + } + + manifest () { + if (this.package) { + return Promise.resolve(this.package) + } + + // have to unpack the tarball for this. + return cacache.tmp.withTmp(this.cache, this.opts, dir => + this.extract(dir) + .then(() => this[_readPackageJson](dir + '/package.json')) + .then(mani => this.package = { + ...mani, + _integrity: this.integrity && String(this.integrity), + _resolved: this.resolved, + _from: this.from, + })) + } + + [_exeBins] (pkg, dest) { + if (!pkg.bin) { + return Promise.resolve() + } + + return Promise.all(Object.keys(pkg.bin).map(k => new Promise(res => { + const script = resolve(dest, pkg.bin[k]) + // Best effort. Ignore errors here, the only result is that + // a bin script is not executable. But if it's missing or + // something, we just leave it for a later stage to trip over + // when we can provide a more useful contextual error. + fs.stat(script, (er, st) => { + if (er) { + return res() + } + const mode = st.mode | 0o111 + if (mode === st.mode) { + return res() + } + fs.chmod(script, mode, res) + }) + }))) + } + + extract (dest) { + // if we've already loaded the manifest, then the super got it. + // but if not, read the unpacked manifest and chmod properly. + return super.extract(dest) + .then(result => this.package ? result + : this[_readPackageJson](dest + '/package.json').then(pkg => + this[_exeBins](pkg, dest)).then(() => result)) + } + + [_tarballFromResolved] () { + // create a read stream and return it + return new fsm.ReadStream(this.resolved) + } + + packument () { + // simulate based on manifest + return this.manifest().then(mani => ({ + name: mani.name, + 'dist-tags': { + [this.defaultTag]: mani.version, + }, + versions: { + [mani.version]: { + ...mani, + dist: { + tarball: `file:${this.resolved}`, + integrity: this.integrity && String(this.integrity), + }, + }, + }, + })) + } +} + +module.exports = FileFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js new file mode 100644 index 0000000000000..5d24f72497ec9 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/git.js @@ -0,0 +1,327 @@ +const Fetcher = require('./fetcher.js') +const FileFetcher = require('./file.js') +const RemoteFetcher = require('./remote.js') +const DirFetcher = require('./dir.js') +const hashre = /^[a-f0-9]{40}$/ +const git = require('@npmcli/git') +const pickManifest = require('npm-pick-manifest') +const npa = require('npm-package-arg') +const { Minipass } = require('minipass') +const cacache = require('cacache') +const log = require('proc-log') +const npm = require('./util/npm.js') + +const _resolvedFromRepo = Symbol('_resolvedFromRepo') +const _resolvedFromHosted = Symbol('_resolvedFromHosted') +const _resolvedFromClone = Symbol('_resolvedFromClone') +const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') +const _addGitSha = Symbol('_addGitSha') +const addGitSha = require('./util/add-git-sha.js') +const _clone = Symbol('_clone') +const _cloneHosted = Symbol('_cloneHosted') +const _cloneRepo = Symbol('_cloneRepo') +const _setResolvedWithSha = Symbol('_setResolvedWithSha') +const _prepareDir = Symbol('_prepareDir') +const _readPackageJson = Symbol.for('package.Fetcher._readPackageJson') + +// get the repository url. +// prefer https if there's auth, since ssh will drop that. +// otherwise, prefer ssh if available (more secure). +// We have to add the git+ back because npa suppresses it. +const repoUrl = (h, opts) => + h.sshurl && !(h.https && h.auth) && addGitPlus(h.sshurl(opts)) || + h.https && addGitPlus(h.https(opts)) + +// add git+ to the url, but only one time. +const addGitPlus = url => url && `git+${url}`.replace(/^(git\+)+/, 'git+') + +class GitFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + + // we never want to compare integrity for git dependencies: npm/rfcs#525 + if (this.opts.integrity) { + delete this.opts.integrity + log.warn(`skipping integrity check for git dependency ${this.spec.fetchSpec}`) + } + + this.resolvedRef = null + if (this.spec.hosted) { + this.from = this.spec.hosted.shortcut({ noCommittish: false }) + } + + // shortcut: avoid full clone when we can go straight to the tgz + // if we have the full sha and it's a hosted git platform + if (this.spec.gitCommittish && hashre.test(this.spec.gitCommittish)) { + this.resolvedSha = this.spec.gitCommittish + // use hosted.tarball() when we shell to RemoteFetcher later + this.resolved = this.spec.hosted + ? repoUrl(this.spec.hosted, { noCommittish: false }) + : this.spec.rawSpec + } else { + this.resolvedSha = '' + } + + this.Arborist = opts.Arborist || null + } + + // just exposed to make it easier to test all the combinations + static repoUrl (hosted, opts) { + return repoUrl(hosted, opts) + } + + get types () { + return ['git'] + } + + resolve () { + // likely a hosted git repo with a sha, so get the tarball url + // but in general, no reason to resolve() more than necessary! + if (this.resolved) { + return super.resolve() + } + + // fetch the git repo and then look at the current hash + const h = this.spec.hosted + // try to use ssh, fall back to git. + return h ? this[_resolvedFromHosted](h) + : this[_resolvedFromRepo](this.spec.fetchSpec) + } + + // first try https, since that's faster and passphrase-less for + // public repos, and supports private repos when auth is provided. + // Fall back to SSH to support private repos + // NB: we always store the https url in resolved field if auth + // is present, otherwise ssh if the hosted type provides it + [_resolvedFromHosted] (hosted) { + return this[_resolvedFromRepo](hosted.https && hosted.https()) + .catch(er => { + // Throw early since we know pathspec errors will fail again if retried + if (er instanceof git.errors.GitPathspecError) { + throw er + } + const ssh = hosted.sshurl && hosted.sshurl() + // no fallthrough if we can't fall through or have https auth + if (!ssh || hosted.auth) { + throw er + } + return this[_resolvedFromRepo](ssh) + }) + } + + [_resolvedFromRepo] (gitRemote) { + // XXX make this a custom error class + if (!gitRemote) { + return Promise.reject(new Error(`No git url for ${this.spec}`)) + } + const gitRange = this.spec.gitRange + const name = this.spec.name + return git.revs(gitRemote, this.opts).then(remoteRefs => { + return gitRange ? pickManifest({ + versions: remoteRefs.versions, + 'dist-tags': remoteRefs['dist-tags'], + name, + }, gitRange, this.opts) + : this.spec.gitCommittish ? + remoteRefs.refs[this.spec.gitCommittish] || + remoteRefs.refs[remoteRefs.shas[this.spec.gitCommittish]] + : remoteRefs.refs.HEAD // no git committish, get default head + }).then(revDoc => { + // the committish provided isn't in the rev list + // things like HEAD~3 or @yesterday can land here. + if (!revDoc || !revDoc.sha) { + return this[_resolvedFromClone]() + } + + this.resolvedRef = revDoc + this.resolvedSha = revDoc.sha + this[_addGitSha](revDoc.sha) + return this.resolved + }) + } + + [_setResolvedWithSha] (withSha) { + // we haven't cloned, so a tgz download is still faster + // of course, if it's not a known host, we can't do that. + this.resolved = !this.spec.hosted ? withSha + : repoUrl(npa(withSha).hosted, { noCommittish: false }) + } + + // when we get the git sha, we affix it to our spec to build up + // either a git url with a hash, or a tarball download URL + [_addGitSha] (sha) { + this[_setResolvedWithSha](addGitSha(this.spec, sha)) + } + + [_resolvedFromClone] () { + // do a full or shallow clone, then look at the HEAD + // kind of wasteful, but no other option, really + return this[_clone](dir => this.resolved) + } + + [_prepareDir] (dir) { + return this[_readPackageJson](dir + '/package.json').then(mani => { + // no need if we aren't going to do any preparation. + const scripts = mani.scripts + if (!mani.workspaces && (!scripts || !( + scripts.postinstall || + scripts.build || + scripts.preinstall || + scripts.install || + scripts.prepack || + scripts.prepare))) { + return + } + + // to avoid cases where we have an cycle of git deps that depend + // on one another, we only ever do preparation for one instance + // of a given git dep along the chain of installations. + // Note that this does mean that a dependency MAY in theory end up + // trying to run its prepare script using a dependency that has not + // been properly prepared itself, but that edge case is smaller + // and less hazardous than a fork bomb of npm and git commands. + const noPrepare = !process.env._PACOTE_NO_PREPARE_ ? [] + : process.env._PACOTE_NO_PREPARE_.split('\n') + if (noPrepare.includes(this.resolved)) { + log.info('prepare', 'skip prepare, already seen', this.resolved) + return + } + noPrepare.push(this.resolved) + + // the DirFetcher will do its own preparation to run the prepare scripts + // All we have to do is put the deps in place so that it can succeed. + return npm( + this.npmBin, + [].concat(this.npmInstallCmd).concat(this.npmCliConfig), + dir, + { ...process.env, _PACOTE_NO_PREPARE_: noPrepare.join('\n') }, + { message: 'git dep preparation failed' } + ) + }) + } + + [_tarballFromResolved] () { + const stream = new Minipass() + stream.resolved = this.resolved + stream.from = this.from + + // check it out and then shell out to the DirFetcher tarball packer + this[_clone](dir => this[_prepareDir](dir) + .then(() => new Promise((res, rej) => { + if (!this.Arborist) { + throw new Error('GitFetcher requires an Arborist constructor to pack a tarball') + } + const df = new DirFetcher(`file:${dir}`, { + ...this.opts, + Arborist: this.Arborist, + resolved: null, + integrity: null, + }) + const dirStream = df[_tarballFromResolved]() + dirStream.on('error', rej) + dirStream.on('end', res) + dirStream.pipe(stream) + }))).catch( + /* istanbul ignore next: very unlikely and hard to test */ + er => stream.emit('error', er) + ) + return stream + } + + // clone a git repo into a temp folder (or fetch and unpack if possible) + // handler accepts a directory, and returns a promise that resolves + // when we're done with it, at which point, cacache deletes it + // + // TODO: after cloning, create a tarball of the folder, and add to the cache + // with cacache.put.stream(), using a key that's deterministic based on the + // spec and repo, so that we don't ever clone the same thing multiple times. + [_clone] (handler, tarballOk = true) { + const o = { tmpPrefix: 'git-clone' } + const ref = this.resolvedSha || this.spec.gitCommittish + const h = this.spec.hosted + const resolved = this.resolved + + // can be set manually to false to fall back to actual git clone + tarballOk = tarballOk && + h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball + + return cacache.tmp.withTmp(this.cache, o, async tmp => { + // if we're resolved, and have a tarball url, shell out to RemoteFetcher + if (tarballOk) { + const nameat = this.spec.name ? `${this.spec.name}@` : '' + return new RemoteFetcher(h.tarball({ noCommittish: false }), { + ...this.opts, + allowGitIgnore: true, + pkgid: `git:${nameat}${this.resolved}`, + resolved: this.resolved, + integrity: null, // it'll always be different, if we have one + }).extract(tmp).then(() => handler(tmp), er => { + // fall back to ssh download if tarball fails + if (er.constructor.name.match(/^Http/)) { + return this[_clone](handler, false) + } else { + throw er + } + }) + } + + const sha = await ( + h ? this[_cloneHosted](ref, tmp) + : this[_cloneRepo](this.spec.fetchSpec, ref, tmp) + ) + this.resolvedSha = sha + if (!this.resolved) { + await this[_addGitSha](sha) + } + return handler(tmp) + }) + } + + // first try https, since that's faster and passphrase-less for + // public repos, and supports private repos when auth is provided. + // Fall back to SSH to support private repos + // NB: we always store the https url in resolved field if auth + // is present, otherwise ssh if the hosted type provides it + [_cloneHosted] (ref, tmp) { + const hosted = this.spec.hosted + return this[_cloneRepo](hosted.https({ noCommittish: true }), ref, tmp) + .catch(er => { + // Throw early since we know pathspec errors will fail again if retried + if (er instanceof git.errors.GitPathspecError) { + throw er + } + const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true }) + // no fallthrough if we can't fall through or have https auth + if (!ssh || hosted.auth) { + throw er + } + return this[_cloneRepo](ssh, ref, tmp) + }) + } + + [_cloneRepo] (repo, ref, tmp) { + const { opts, spec } = this + return git.clone(repo, ref, tmp, { ...opts, spec }) + } + + manifest () { + if (this.package) { + return Promise.resolve(this.package) + } + + return this.spec.hosted && this.resolved + ? FileFetcher.prototype.manifest.apply(this) + : this[_clone](dir => + this[_readPackageJson](dir + '/package.json') + .then(mani => this.package = { + ...mani, + _resolved: this.resolved, + _from: this.from, + })) + } + + packument () { + return FileFetcher.prototype.packument.apply(this) + } +} +module.exports = GitFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js new file mode 100644 index 0000000000000..cbcbd7c92d15f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/index.js @@ -0,0 +1,23 @@ +const { get } = require('./fetcher.js') +const GitFetcher = require('./git.js') +const RegistryFetcher = require('./registry.js') +const FileFetcher = require('./file.js') +const DirFetcher = require('./dir.js') +const RemoteFetcher = require('./remote.js') + +module.exports = { + GitFetcher, + RegistryFetcher, + FileFetcher, + DirFetcher, + RemoteFetcher, + resolve: (spec, opts) => get(spec, opts).resolve(), + extract: (spec, dest, opts) => get(spec, opts).extract(dest), + manifest: (spec, opts) => get(spec, opts).manifest(), + tarball: (spec, opts) => get(spec, opts).tarball(), + packument: (spec, opts) => get(spec, opts).packument(), +} +module.exports.tarball.stream = (spec, handler, opts) => + get(spec, opts).tarballStream(handler) +module.exports.tarball.file = (spec, dest, opts) => + get(spec, opts).tarballFile(dest) diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js new file mode 100644 index 0000000000000..34d9b2b87f3f3 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/registry.js @@ -0,0 +1,344 @@ +const Fetcher = require('./fetcher.js') +const RemoteFetcher = require('./remote.js') +const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') +const pacoteVersion = require('../package.json').version +const removeTrailingSlashes = require('./util/trailing-slashes.js') +const rpj = require('read-package-json-fast') +const pickManifest = require('npm-pick-manifest') +const ssri = require('ssri') +const crypto = require('crypto') +const npa = require('npm-package-arg') +const { sigstore } = require('sigstore') + +// Corgis are cute. 🐕🐶 +const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*' +const fullDoc = 'application/json' + +const fetch = require('npm-registry-fetch') + +const _headers = Symbol('_headers') +class RegistryFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + + // you usually don't want to fetch the same packument multiple times in + // the span of a given script or command, no matter how many pacote calls + // are made, so this lets us avoid doing that. It's only relevant for + // registry fetchers, because other types simulate their packument from + // the manifest, which they memoize on this.package, so it's very cheap + // already. + this.packumentCache = this.opts.packumentCache || null + + this.registry = fetch.pickRegistry(spec, opts) + this.packumentUrl = removeTrailingSlashes(this.registry) + '/' + + this.spec.escapedName + + const parsed = new URL(this.registry) + const regKey = `//${parsed.host}${parsed.pathname}` + // unlike the nerf-darted auth keys, this one does *not* allow a mismatch + // of trailing slashes. It must match exactly. + if (this.opts[`${regKey}:_keys`]) { + this.registryKeys = this.opts[`${regKey}:_keys`] + } + + // XXX pacote <=9 has some logic to ignore opts.resolved if + // the resolved URL doesn't go to the same registry. + // Consider reproducing that here, to throw away this.resolved + // in that case. + } + + async resolve () { + // fetching the manifest sets resolved and (if present) integrity + await this.manifest() + if (!this.resolved) { + throw Object.assign( + new Error('Invalid package manifest: no `dist.tarball` field'), + { package: this.spec.toString() } + ) + } + return this.resolved + } + + [_headers] () { + return { + // npm will override UA, but ensure that we always send *something* + 'user-agent': this.opts.userAgent || + `pacote/${pacoteVersion} node/${process.version}`, + ...(this.opts.headers || {}), + 'pacote-version': pacoteVersion, + 'pacote-req-type': 'packument', + 'pacote-pkg-id': `registry:${this.spec.name}`, + accept: this.fullMetadata ? fullDoc : corgiDoc, + } + } + + async packument () { + // note this might be either an in-flight promise for a request, + // or the actual packument, but we never want to make more than + // one request at a time for the same thing regardless. + if (this.packumentCache && this.packumentCache.has(this.packumentUrl)) { + return this.packumentCache.get(this.packumentUrl) + } + + // npm-registry-fetch the packument + // set the appropriate header for corgis if fullMetadata isn't set + // return the res.json() promise + try { + const res = await fetch(this.packumentUrl, { + ...this.opts, + headers: this[_headers](), + spec: this.spec, + // never check integrity for packuments themselves + integrity: null, + }) + const packument = await res.json() + packument._contentLength = +res.headers.get('content-length') + if (this.packumentCache) { + this.packumentCache.set(this.packumentUrl, packument) + } + return packument + } catch (err) { + if (this.packumentCache) { + this.packumentCache.delete(this.packumentUrl) + } + if (err.code !== 'E404' || this.fullMetadata) { + throw err + } + // possible that corgis are not supported by this registry + this.fullMetadata = true + return this.packument() + } + } + + async manifest () { + if (this.package) { + return this.package + } + + const packument = await this.packument() + let mani = await pickManifest(packument, this.spec.fetchSpec, { + ...this.opts, + defaultTag: this.defaultTag, + before: this.before, + }) + mani = rpj.normalize(mani) + /* XXX add ETARGET and E403 revalidation of cached packuments here */ + + // add _resolved and _integrity from dist object + const { dist } = mani + if (dist) { + this.resolved = mani._resolved = dist.tarball + mani._from = this.from + const distIntegrity = dist.integrity ? ssri.parse(dist.integrity) + : dist.shasum ? ssri.fromHex(dist.shasum, 'sha1', { ...this.opts }) + : null + if (distIntegrity) { + if (this.integrity && !this.integrity.match(distIntegrity)) { + // only bork if they have algos in common. + // otherwise we end up breaking if we have saved a sha512 + // previously for the tarball, but the manifest only + // provides a sha1, which is possible for older publishes. + // Otherwise, this is almost certainly a case of holding it + // wrong, and will result in weird or insecure behavior + // later on when building package tree. + for (const algo of Object.keys(this.integrity)) { + if (distIntegrity[algo]) { + throw Object.assign(new Error( + `Integrity checksum failed when using ${algo}: ` + + `wanted ${this.integrity} but got ${distIntegrity}.` + ), { code: 'EINTEGRITY' }) + } + } + } + // made it this far, the integrity is worthwhile. accept it. + // the setter here will take care of merging it into what we already + // had. + this.integrity = distIntegrity + } + } + if (this.integrity) { + mani._integrity = String(this.integrity) + if (dist.signatures) { + if (this.opts.verifySignatures) { + // validate and throw on error, then set _signatures + const message = `${mani._id}:${mani._integrity}` + for (const signature of dist.signatures) { + const publicKey = this.registryKeys && + this.registryKeys.filter(key => (key.keyid === signature.keyid))[0] + if (!publicKey) { + throw Object.assign(new Error( + `${mani._id} has a registry signature with keyid: ${signature.keyid} ` + + 'but no corresponding public key can be found' + ), { code: 'EMISSINGSIGNATUREKEY' }) + } + const validPublicKey = + !publicKey.expires || (Date.parse(publicKey.expires) > Date.now()) + if (!validPublicKey) { + throw Object.assign(new Error( + `${mani._id} has a registry signature with keyid: ${signature.keyid} ` + + `but the corresponding public key has expired ${publicKey.expires}` + ), { code: 'EEXPIREDSIGNATUREKEY' }) + } + const verifier = crypto.createVerify('SHA256') + verifier.write(message) + verifier.end() + const valid = verifier.verify( + publicKey.pemkey, + signature.sig, + 'base64' + ) + if (!valid) { + throw Object.assign(new Error( + `${mani._id} has an invalid registry signature with ` + + `keyid: ${publicKey.keyid} and signature: ${signature.sig}` + ), { + code: 'EINTEGRITYSIGNATURE', + keyid: publicKey.keyid, + signature: signature.sig, + resolved: mani._resolved, + integrity: mani._integrity, + }) + } + } + mani._signatures = dist.signatures + } else { + mani._signatures = dist.signatures + } + } + + if (dist.attestations) { + if (this.opts.verifyAttestations) { + // Always fetch attestations from the current registry host + const attestationsPath = new URL(dist.attestations.url).pathname + const attestationsUrl = removeTrailingSlashes(this.registry) + attestationsPath + const res = await fetch(attestationsUrl, { + ...this.opts, + // disable integrity check for attestations json payload, we check the + // integrity in the verification steps below + integrity: null, + }) + const { attestations } = await res.json() + const bundles = attestations.map(({ predicateType, bundle }) => { + const statement = JSON.parse( + Buffer.from(bundle.dsseEnvelope.payload, 'base64').toString('utf8') + ) + const keyid = bundle.dsseEnvelope.signatures[0].keyid + const signature = bundle.dsseEnvelope.signatures[0].sig + + return { + predicateType, + bundle, + statement, + keyid, + signature, + } + }) + + const attestationKeyIds = bundles.map((b) => b.keyid).filter((k) => !!k) + const attestationRegistryKeys = (this.registryKeys || []) + .filter(key => attestationKeyIds.includes(key.keyid)) + if (!attestationRegistryKeys.length) { + throw Object.assign(new Error( + `${mani._id} has attestations but no corresponding public key(s) can be found` + ), { code: 'EMISSINGSIGNATUREKEY' }) + } + + for (const { predicateType, bundle, keyid, signature, statement } of bundles) { + const publicKey = attestationRegistryKeys.find(key => key.keyid === keyid) + // Publish attestations have a keyid set and a valid public key must be found + if (keyid) { + if (!publicKey) { + throw Object.assign(new Error( + `${mani._id} has attestations with keyid: ${keyid} ` + + 'but no corresponding public key can be found' + ), { code: 'EMISSINGSIGNATUREKEY' }) + } + + const validPublicKey = + !publicKey.expires || (Date.parse(publicKey.expires) > Date.now()) + if (!validPublicKey) { + throw Object.assign(new Error( + `${mani._id} has attestations with keyid: ${keyid} ` + + `but the corresponding public key has expired ${publicKey.expires}` + ), { code: 'EEXPIREDSIGNATUREKEY' }) + } + } + + const subject = { + name: statement.subject[0].name, + sha512: statement.subject[0].digest.sha512, + } + + // Only type 'version' can be turned into a PURL + const purl = this.spec.type === 'version' ? npa.toPurl(this.spec) : this.spec + // Verify the statement subject matches the package, version + if (subject.name !== purl) { + throw Object.assign(new Error( + `${mani._id} package name and version (PURL): ${purl} ` + + `doesn't match what was signed: ${subject.name}` + ), { code: 'EATTESTATIONSUBJECT' }) + } + + // Verify the statement subject matches the tarball integrity + const integrityHexDigest = ssri.parse(this.integrity).hexDigest() + if (subject.sha512 !== integrityHexDigest) { + throw Object.assign(new Error( + `${mani._id} package integrity (hex digest): ` + + `${integrityHexDigest} ` + + `doesn't match what was signed: ${subject.sha512}` + ), { code: 'EATTESTATIONSUBJECT' }) + } + + try { + // Provenance attestations are signed with a signing certificate + // (including the key) so we don't need to return a public key. + // + // Publish attestations are signed with a keyid so we need to + // specify a public key from the keys endpoint: `registry-host.tld/-/npm/v1/keys` + const options = { + tufCachePath: this.tufCache, + keySelector: publicKey ? () => publicKey.pemkey : undefined, + } + await sigstore.verify(bundle, null, options) + } catch (e) { + throw Object.assign(new Error( + `${mani._id} failed to verify attestation: ${e.message}` + ), { + code: 'EATTESTATIONVERIFY', + predicateType, + keyid, + signature, + resolved: mani._resolved, + integrity: mani._integrity, + }) + } + } + mani._attestations = dist.attestations + } else { + mani._attestations = dist.attestations + } + } + } + + this.package = mani + return this.package + } + + [_tarballFromResolved] () { + // we use a RemoteFetcher to get the actual tarball stream + return new RemoteFetcher(this.resolved, { + ...this.opts, + resolved: this.resolved, + pkgid: `registry:${this.spec.name}@${this.resolved}`, + })[_tarballFromResolved]() + } + + get types () { + return [ + 'tag', + 'version', + 'range', + ] + } +} +module.exports = RegistryFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js new file mode 100644 index 0000000000000..fd617459fb031 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/remote.js @@ -0,0 +1,91 @@ +const Fetcher = require('./fetcher.js') +const FileFetcher = require('./file.js') +const _tarballFromResolved = Symbol.for('pacote.Fetcher._tarballFromResolved') +const pacoteVersion = require('../package.json').version +const fetch = require('npm-registry-fetch') +const { Minipass } = require('minipass') + +const _cacheFetches = Symbol.for('pacote.Fetcher._cacheFetches') +const _headers = Symbol('_headers') +class RemoteFetcher extends Fetcher { + constructor (spec, opts) { + super(spec, opts) + this.resolved = this.spec.fetchSpec + const resolvedURL = new URL(this.resolved) + if (this.replaceRegistryHost !== 'never' + && (this.replaceRegistryHost === 'always' + || this.replaceRegistryHost === resolvedURL.host)) { + this.resolved = new URL(resolvedURL.pathname, this.registry).href + } + + // nam is a fermented pork sausage that is good to eat + const nameat = this.spec.name ? `${this.spec.name}@` : '' + this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}` + } + + // Don't need to cache tarball fetches in pacote, because make-fetch-happen + // will write into cacache anyway. + get [_cacheFetches] () { + return false + } + + [_tarballFromResolved] () { + const stream = new Minipass() + stream.hasIntegrityEmitter = true + + const fetchOpts = { + ...this.opts, + headers: this[_headers](), + spec: this.spec, + integrity: this.integrity, + algorithms: [this.pickIntegrityAlgorithm()], + } + + // eslint-disable-next-line promise/always-return + fetch(this.resolved, fetchOpts).then(res => { + res.body.on('error', + /* istanbul ignore next - exceedingly rare and hard to simulate */ + er => stream.emit('error', er) + ) + + res.body.on('integrity', i => { + this.integrity = i + stream.emit('integrity', i) + }) + + res.body.pipe(stream) + }).catch(er => stream.emit('error', er)) + + return stream + } + + [_headers] () { + return { + // npm will override this, but ensure that we always send *something* + 'user-agent': this.opts.userAgent || + `pacote/${pacoteVersion} node/${process.version}`, + ...(this.opts.headers || {}), + 'pacote-version': pacoteVersion, + 'pacote-req-type': 'tarball', + 'pacote-pkg-id': this.pkgid, + ...(this.integrity ? { 'pacote-integrity': String(this.integrity) } + : {}), + ...(this.opts.headers || {}), + } + } + + get types () { + return ['remote'] + } + + // getting a packument and/or manifest is the same as with a file: spec. + // unpack the tarball stream, and then read from the package.json file. + packument () { + return FileFetcher.prototype.packument.apply(this) + } + + manifest () { + return FileFetcher.prototype.manifest.apply(this) + } +} +module.exports = RemoteFetcher diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js new file mode 100644 index 0000000000000..843fe5b600caf --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/add-git-sha.js @@ -0,0 +1,15 @@ +// add a sha to a git remote url spec +const addGitSha = (spec, sha) => { + if (spec.hosted) { + const h = spec.hosted + const opt = { noCommittish: true } + const base = h.https && h.auth ? h.https(opt) : h.shortcut(opt) + + return `${base}#${sha}` + } else { + // don't use new URL for this, because it doesn't handle scp urls + return spec.rawSpec.replace(/#.*$/, '') + `#${sha}` + } +} + +module.exports = addGitSha diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js new file mode 100644 index 0000000000000..ac83b1793f199 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/cache-dir.js @@ -0,0 +1,15 @@ +const os = require('os') +const { resolve } = require('path') + +module.exports = (fakePlatform = false) => { + const temp = os.tmpdir() + const uidOrPid = process.getuid ? process.getuid() : process.pid + const home = os.homedir() || resolve(temp, 'npm-' + uidOrPid) + const platform = fakePlatform || process.platform + const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm' + const cacheRoot = (platform === 'win32' && process.env.LOCALAPPDATA) || home + return { + cacache: resolve(cacheRoot, cacheExtra, '_cacache'), + tufcache: resolve(cacheRoot, cacheExtra, '_tuf'), + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js new file mode 100644 index 0000000000000..49a3f73f537ce --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/is-package-bin.js @@ -0,0 +1,25 @@ +// Function to determine whether a path is in the package.bin set. +// Used to prevent issues when people publish a package from a +// windows machine, and then install with --no-bin-links. +// +// Note: this is not possible in remote or file fetchers, since +// we don't have the manifest until AFTER we've unpacked. But the +// main use case is registry fetching with git a distant second, +// so that's an acceptable edge case to not handle. + +const binObj = (name, bin) => + typeof bin === 'string' ? { [name]: bin } : bin + +const hasBin = (pkg, path) => { + const bin = binObj(pkg.name, pkg.bin) + const p = path.replace(/^[^\\/]*\//, '') + for (const kv of Object.entries(bin)) { + if (kv[1] === p) { + return true + } + } + return false +} + +module.exports = (pkg, path) => + pkg && pkg.bin ? hasBin(pkg, path) : false diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js new file mode 100644 index 0000000000000..a3005c255565f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/npm.js @@ -0,0 +1,14 @@ +// run an npm command +const spawn = require('@npmcli/promise-spawn') + +module.exports = (npmBin, npmCommand, cwd, env, extra) => { + const isJS = npmBin.endsWith('.js') + const cmd = isJS ? process.execPath : npmBin + const args = (isJS ? [npmBin] : []).concat(npmCommand) + // when installing to run the `prepare` script for a git dep, we need + // to ensure that we don't run into a cycle of checking out packages + // in temp directories. this lets us link previously-seen repos that + // are also being prepared. + + return spawn(cmd, args, { cwd, env }, extra) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js new file mode 100644 index 0000000000000..d070f0f7ba2d4 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/tar-create-options.js @@ -0,0 +1,31 @@ +const isPackageBin = require('./is-package-bin.js') + +const tarCreateOptions = manifest => ({ + cwd: manifest._resolved, + prefix: 'package/', + portable: true, + gzip: { + // forcing the level to 9 seems to avoid some + // platform specific optimizations that cause + // integrity mismatch errors due to differing + // end results after compression + level: 9, + }, + + // ensure that package bins are always executable + // Note that npm-packlist is already filtering out + // anything that is not a regular file, ignored by + // .npmignore or package.json "files", etc. + filter: (path, stat) => { + if (isPackageBin(manifest, path)) { + stat.mode |= 0o111 + } + return true + }, + + // Provide a specific date in the 1980s for the benefit of zip, + // which is confounded by files dated at the Unix epoch 0. + mtime: new Date('1985-10-26T08:15:00.000Z'), +}) + +module.exports = tarCreateOptions diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js new file mode 100644 index 0000000000000..c50cb6173b92e --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/lib/util/trailing-slashes.js @@ -0,0 +1,10 @@ +const removeTrailingSlashes = (input) => { + // in order to avoid regexp redos detection + let output = input + while (output.endsWith('/')) { + output = output.slice(0, -1) + } + return output +} + +module.exports = removeTrailingSlashes diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json new file mode 100644 index 0000000000000..d9119065bfc3d --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/pacote/package.json @@ -0,0 +1,85 @@ +{ + "name": "pacote", + "version": "16.0.0", + "description": "JavaScript package downloader", + "author": "GitHub Inc.", + "bin": { + "pacote": "lib/bin.js" + }, + "license": "ISC", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "tap": { + "timeout": 300, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0", + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "hosted-git-info": "^6.0.0", + "mutate-fs": "^2.1.1", + "nock": "^13.2.4", + "npm-registry-mock": "^1.3.2", + "tap": "^16.0.1" + }, + "files": [ + "bin/", + "lib/" + ], + "keywords": [ + "packages", + "npm", + "git" + ], + "dependencies": { + "@npmcli/git": "^4.0.0", + "@npmcli/installed-package-contents": "^2.0.1", + "@npmcli/promise-spawn": "^6.0.1", + "@npmcli/run-script": "^6.0.0", + "cacache": "^17.0.0", + "fs-minipass": "^3.0.0", + "minipass": "^7.0.2", + "npm-package-arg": "^10.0.0", + "npm-packlist": "^7.0.0", + "npm-pick-manifest": "^8.0.0", + "npm-registry-fetch": "^15.0.0", + "proc-log": "^3.0.0", + "promise-retry": "^2.0.1", + "read-package-json": "^6.0.0", + "read-package-json-fast": "^3.0.0", + "sigstore": "^1.3.0", + "ssri": "^10.0.0", + "tar": "^6.1.11" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/pacote.git" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ], + "version": "4.18.0", + "windowsCI": false, + "publish": "true" + } +} diff --git a/node_modules/pacote/node_modules/cacache/lib/content/path.js b/node_modules/pacote/node_modules/cacache/lib/content/path.js deleted file mode 100644 index ad5a76a4f73f2..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/content/path.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const contentVer = require('../../package.json')['cache-version'].content -const hashToSegments = require('../util/hash-to-segments') -const path = require('path') -const ssri = require('ssri') - -// Current format of content file path: -// -// sha512-BaSE64Hex= -> -// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee -// -module.exports = contentPath - -function contentPath (cache, integrity) { - const sri = ssri.parse(integrity, { single: true }) - // contentPath is the *strongest* algo given - return path.join( - contentDir(cache), - sri.algorithm, - ...hashToSegments(sri.hexDigest()) - ) -} - -module.exports.contentDir = contentDir - -function contentDir (cache) { - return path.join(cache, `content-v${contentVer}`) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/content/read.js b/node_modules/pacote/node_modules/cacache/lib/content/read.js deleted file mode 100644 index f41b539df65dc..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/content/read.js +++ /dev/null @@ -1,166 +0,0 @@ -'use strict' - -const fs = require('fs/promises') -const fsm = require('fs-minipass') -const ssri = require('ssri') -const contentPath = require('./path') -const Pipeline = require('minipass-pipeline') - -module.exports = read - -const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 -async function read (cache, integrity, opts = {}) { - const { size } = opts - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // get size - const stat = await fs.stat(cpath) - return { stat, cpath, sri } - }) - if (typeof size === 'number' && stat.size !== size) { - throw sizeError(size, stat.size) - } - - if (stat.size > MAX_SINGLE_READ_SIZE) { - return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } - - const data = await fs.readFile(cpath, { encoding: null }) - if (!ssri.checkData(data, sri)) { - throw integrityError(sri, cpath) - } - - return data -} - -const readPipeline = (cpath, size, sri, stream) => { - stream.push( - new fsm.ReadStream(cpath, { - size, - readSize: MAX_SINGLE_READ_SIZE, - }), - ssri.integrityStream({ - integrity: sri, - size, - }) - ) - return stream -} - -module.exports.stream = readStream -module.exports.readStream = readStream - -function readStream (cache, integrity, opts = {}) { - const { size } = opts - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // just stat to ensure it exists - const stat = await fs.stat(cpath) - return { stat, cpath, sri } - }) - if (typeof size === 'number' && size !== stat.size) { - return stream.emit('error', sizeError(size, stat.size)) - } - - return readPipeline(cpath, stat.size, sri, stream) - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.copy = copy - -function copy (cache, integrity, dest) { - return withContentSri(cache, integrity, (cpath, sri) => { - return fs.copyFile(cpath, dest) - }) -} - -module.exports.hasContent = hasContent - -async function hasContent (cache, integrity) { - if (!integrity) { - return false - } - - try { - return await withContentSri(cache, integrity, async (cpath, sri) => { - const stat = await fs.stat(cpath) - return { size: stat.size, sri, stat } - }) - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } -} - -async function withContentSri (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - // Can't use race here because a generic error can happen before - // a ENOENT error, and can happen before a valid result - const results = await Promise.all(digests.map(async (meta) => { - try { - return await withContentSri(cache, meta, fn) - } catch (err) { - if (err.code === 'ENOENT') { - return Object.assign( - new Error('No matching content found for ' + sri.toString()), - { code: 'ENOENT' } - ) - } - return err - } - })) - // Return the first non error if it is found - const result = results.find((r) => !(r instanceof Error)) - if (result) { - return result - } - - // Throw the No matching content found error - const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { - throw enoentError - } - - // Throw generic error - throw results.find((r) => r instanceof Error) - } -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function integrityError (sri, path) { - const err = new Error(`Integrity verification failed for ${sri} (${path})`) - err.code = 'EINTEGRITY' - err.sri = sri - err.path = path - return err -} diff --git a/node_modules/pacote/node_modules/cacache/lib/content/rm.js b/node_modules/pacote/node_modules/cacache/lib/content/rm.js deleted file mode 100644 index ce58d679e4cb2..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/content/rm.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict' - -const fs = require('fs/promises') -const contentPath = require('./path') -const { hasContent } = require('./read') - -module.exports = rm - -async function rm (cache, integrity) { - const content = await hasContent(cache, integrity) - // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { - await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) - return true - } else { - return false - } -} diff --git a/node_modules/pacote/node_modules/cacache/lib/content/write.js b/node_modules/pacote/node_modules/cacache/lib/content/write.js deleted file mode 100644 index 7146146581287..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/content/write.js +++ /dev/null @@ -1,205 +0,0 @@ -'use strict' - -const events = require('events') - -const contentPath = require('./path') -const fs = require('fs/promises') -const { moveFile } = require('@npmcli/fs') -const { Minipass } = require('minipass') -const Pipeline = require('minipass-pipeline') -const Flush = require('minipass-flush') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') -const fsm = require('fs-minipass') - -module.exports = write - -// Cache of move operations in process so we don't duplicate -const moveOperations = new Map() - -async function write (cache, data, opts = {}) { - const { algorithms, size, integrity } = opts - - if (typeof size === 'number' && data.length !== size) { - throw sizeError(size, data.length) - } - - const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) - if (integrity && !ssri.checkData(data, integrity, opts)) { - throw checksumError(integrity, sri) - } - - for (const algo in sri) { - const tmp = await makeTmp(cache, opts) - const hash = sri[algo].toString() - try { - await fs.writeFile(tmp.target, data, { flag: 'wx' }) - await moveToDestination(tmp, cache, hash, opts) - } finally { - if (!tmp.moved) { - await fs.rm(tmp.target, { recursive: true, force: true }) - } - } - } - return { integrity: sri, size: data.length } -} - -module.exports.stream = writeStream - -// writes proxied to the 'inputStream' that is passed to the Promise -// 'end' is deferred until content is handled. -class CacacheWriteStream extends Flush { - constructor (cache, opts) { - super() - this.opts = opts - this.cache = cache - this.inputStream = new Minipass() - this.inputStream.on('error', er => this.emit('error', er)) - this.inputStream.on('drain', () => this.emit('drain')) - this.handleContentP = null - } - - write (chunk, encoding, cb) { - if (!this.handleContentP) { - this.handleContentP = handleContent( - this.inputStream, - this.cache, - this.opts - ) - } - return this.inputStream.write(chunk, encoding, cb) - } - - flush (cb) { - this.inputStream.end(() => { - if (!this.handleContentP) { - const e = new Error('Cache input stream was empty') - e.code = 'ENODATA' - // empty streams are probably emitting end right away. - // defer this one tick by rejecting a promise on it. - return Promise.reject(e).catch(cb) - } - // eslint-disable-next-line promise/catch-or-return - this.handleContentP.then( - (res) => { - res.integrity && this.emit('integrity', res.integrity) - // eslint-disable-next-line promise/always-return - res.size !== null && this.emit('size', res.size) - cb() - }, - (er) => cb(er) - ) - }) - } -} - -function writeStream (cache, opts = {}) { - return new CacacheWriteStream(cache, opts) -} - -async function handleContent (inputStream, cache, opts) { - const tmp = await makeTmp(cache, opts) - try { - const res = await pipeToTmp(inputStream, cache, tmp.target, opts) - await moveToDestination( - tmp, - cache, - res.integrity, - opts - ) - return res - } finally { - if (!tmp.moved) { - await fs.rm(tmp.target, { recursive: true, force: true }) - } - } -} - -async function pipeToTmp (inputStream, cache, tmpTarget, opts) { - const outStream = new fsm.WriteStream(tmpTarget, { - flags: 'wx', - }) - - if (opts.integrityEmitter) { - // we need to create these all simultaneously since they can fire in any order - const [integrity, size] = await Promise.all([ - events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), - events.once(opts.integrityEmitter, 'size').then(res => res[0]), - new Pipeline(inputStream, outStream).promise(), - ]) - return { integrity, size } - } - - let integrity - let size - const hashStream = ssri.integrityStream({ - integrity: opts.integrity, - algorithms: opts.algorithms, - size: opts.size, - }) - hashStream.on('integrity', i => { - integrity = i - }) - hashStream.on('size', s => { - size = s - }) - - const pipeline = new Pipeline(inputStream, hashStream, outStream) - await pipeline.promise() - return { integrity, size } -} - -async function makeTmp (cache, opts) { - const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) - return { - target: tmpTarget, - moved: false, - } -} - -async function moveToDestination (tmp, cache, sri, opts) { - const destination = contentPath(cache, sri) - const destDir = path.dirname(destination) - if (moveOperations.has(destination)) { - return moveOperations.get(destination) - } - moveOperations.set( - destination, - fs.mkdir(destDir, { recursive: true }) - .then(async () => { - await moveFile(tmp.target, destination, { overwrite: false }) - tmp.moved = true - return tmp.moved - }) - .catch(err => { - if (!err.message.startsWith('The destination file exists')) { - throw Object.assign(err, { code: 'EEXIST' }) - } - }).finally(() => { - moveOperations.delete(destination) - }) - - ) - return moveOperations.get(destination) -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function checksumError (expected, found) { - const err = new Error(`Integrity check failed: - Wanted: ${expected} - Found: ${found}`) - err.code = 'EINTEGRITY' - err.expected = expected - err.found = found - return err -} diff --git a/node_modules/pacote/node_modules/cacache/lib/entry-index.js b/node_modules/pacote/node_modules/cacache/lib/entry-index.js deleted file mode 100644 index 722a37af5ce15..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/entry-index.js +++ /dev/null @@ -1,330 +0,0 @@ -'use strict' - -const crypto = require('crypto') -const { - appendFile, - mkdir, - readFile, - readdir, - rm, - writeFile, -} = require('fs/promises') -const { Minipass } = require('minipass') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') - -const contentPath = require('./content/path') -const hashToSegments = require('./util/hash-to-segments') -const indexV = require('../package.json')['cache-version'].index -const { moveFile } = require('@npmcli/fs') - -module.exports.NotFoundError = class NotFoundError extends Error { - constructor (cache, key) { - super(`No cache entry for ${key} found in ${cache}`) - this.code = 'ENOENT' - this.cache = cache - this.key = key - } -} - -module.exports.compact = compact - -async function compact (cache, key, matchFn, opts = {}) { - const bucket = bucketPath(cache, key) - const entries = await bucketEntries(bucket) - const newEntries = [] - // we loop backwards because the bottom-most result is the newest - // since we add new entries with appendFile - for (let i = entries.length - 1; i >= 0; --i) { - const entry = entries[i] - // a null integrity could mean either a delete was appended - // or the user has simply stored an index that does not map - // to any content. we determine if the user wants to keep the - // null integrity based on the validateEntry function passed in options. - // if the integrity is null and no validateEntry is provided, we break - // as we consider the null integrity to be a deletion of everything - // that came before it. - if (entry.integrity === null && !opts.validateEntry) { - break - } - - // if this entry is valid, and it is either the first entry or - // the newEntries array doesn't already include an entry that - // matches this one based on the provided matchFn, then we add - // it to the beginning of our list - if ((!opts.validateEntry || opts.validateEntry(entry) === true) && - (newEntries.length === 0 || - !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { - newEntries.unshift(entry) - } - } - - const newIndex = '\n' + newEntries.map((entry) => { - const stringified = JSON.stringify(entry) - const hash = hashEntry(stringified) - return `${hash}\t${stringified}` - }).join('\n') - - const setup = async () => { - const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await mkdir(path.dirname(target), { recursive: true }) - return { - target, - moved: false, - } - } - - const teardown = async (tmp) => { - if (!tmp.moved) { - return rm(tmp.target, { recursive: true, force: true }) - } - } - - const write = async (tmp) => { - await writeFile(tmp.target, newIndex, { flag: 'wx' }) - await mkdir(path.dirname(bucket), { recursive: true }) - // we use @npmcli/move-file directly here because we - // want to overwrite the existing file - await moveFile(tmp.target, bucket) - tmp.moved = true - } - - // write the file atomically - const tmp = await setup() - try { - await write(tmp) - } finally { - await teardown(tmp) - } - - // we reverse the list we generated such that the newest - // entries come first in order to make looping through them easier - // the true passed to formatEntry tells it to keep null - // integrity values, if they made it this far it's because - // validateEntry returned true, and as such we should return it - return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) -} - -module.exports.insert = insert - -async function insert (cache, key, integrity, opts = {}) { - const { metadata, size, time } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: time || Date.now(), - size, - metadata, - } - try { - await mkdir(path.dirname(bucket), { recursive: true }) - const stringified = JSON.stringify(entry) - // NOTE - Cleverness ahoy! - // - // This works because it's tremendously unlikely for an entry to corrupt - // another while still preserving the string length of the JSON in - // question. So, we just slap the length in there and verify it on read. - // - // Thanks to @isaacs for the whiteboarding session that ended up with - // this. - await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - - throw err - } - return formatEntry(cache, entry) -} - -module.exports.find = find - -async function find (cache, key) { - const bucket = bucketPath(cache, key) - try { - const entries = await bucketEntries(bucket) - return entries.reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - -module.exports.delete = del - -function del (cache, key, opts = {}) { - if (!opts.removeFully) { - return insert(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rm(bucket, { recursive: true, force: true }) -} - -module.exports.lsStream = lsStream - -function lsStream (cache) { - const indexDir = bucketDir(cache) - const stream = new Minipass({ objectMode: true }) - - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const buckets = await readdirOrEmpty(indexDir) - await Promise.all(buckets.map(async (bucket) => { - const bucketPath = path.join(indexDir, bucket) - const subbuckets = await readdirOrEmpty(bucketPath) - await Promise.all(subbuckets.map(async (subbucket) => { - const subbucketPath = path.join(bucketPath, subbucket) - - // "/cachename//./*" - const subbucketEntries = await readdirOrEmpty(subbucketPath) - await Promise.all(subbucketEntries.map(async (entry) => { - const entryPath = path.join(subbucketPath, entry) - try { - const entries = await bucketEntries(entryPath) - // using a Map here prevents duplicate keys from showing up - // twice, I guess? - const reduced = entries.reduce((acc, entry) => { - acc.set(entry.key, entry) - return acc - }, new Map()) - // reduced is a map of key => entry - for (const entry of reduced.values()) { - const formatted = formatEntry(cache, entry) - if (formatted) { - stream.write(formatted) - } - } - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - throw err - } - })) - })) - })) - stream.end() - return stream - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.ls = ls - -async function ls (cache) { - const entries = await lsStream(cache).collect() - return entries.reduce((acc, xs) => { - acc[xs.key] = xs - return acc - }, {}) -} - -module.exports.bucketEntries = bucketEntries - -async function bucketEntries (bucket, filter) { - const data = await readFile(bucket, 'utf8') - return _bucketEntries(data, filter) -} - -function _bucketEntries (data, filter) { - const entries = [] - data.split('\n').forEach((entry) => { - if (!entry) { - return - } - - const pieces = entry.split('\t') - if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { - // Hash is no good! Corruption or malice? Doesn't matter! - // EJECT EJECT - return - } - let obj - try { - obj = JSON.parse(pieces[1]) - } catch (_) { - // eslint-ignore-next-line no-empty-block - } - // coverage disabled here, no need to test with an entry that parses to something falsey - // istanbul ignore else - if (obj) { - entries.push(obj) - } - }) - return entries -} - -module.exports.bucketDir = bucketDir - -function bucketDir (cache) { - return path.join(cache, `index-v${indexV}`) -} - -module.exports.bucketPath = bucketPath - -function bucketPath (cache, key) { - const hashed = hashKey(key) - return path.join.apply( - path, - [bucketDir(cache)].concat(hashToSegments(hashed)) - ) -} - -module.exports.hashKey = hashKey - -function hashKey (key) { - return hash(key, 'sha256') -} - -module.exports.hashEntry = hashEntry - -function hashEntry (str) { - return hash(str, 'sha1') -} - -function hash (str, digest) { - return crypto - .createHash(digest) - .update(str) - .digest('hex') -} - -function formatEntry (cache, entry, keepAll) { - // Treat null digests as deletions. They'll shadow any previous entries. - if (!entry.integrity && !keepAll) { - return null - } - - return { - key: entry.key, - integrity: entry.integrity, - path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, - size: entry.size, - time: entry.time, - metadata: entry.metadata, - } -} - -function readdirOrEmpty (dir) { - return readdir(dir).catch((err) => { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { - return [] - } - - throw err - }) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/get.js b/node_modules/pacote/node_modules/cacache/lib/get.js deleted file mode 100644 index 80ec206c7ecaa..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/get.js +++ /dev/null @@ -1,170 +0,0 @@ -'use strict' - -const Collect = require('minipass-collect') -const { Minipass } = require('minipass') -const Pipeline = require('minipass-pipeline') - -const index = require('./entry-index') -const memo = require('./memoization') -const read = require('./content/read') - -async function getData (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - const data = await read(cache, entry.integrity, { integrity, size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} -module.exports = getData - -async function getDataByDigest (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, key, opts) - if (memoized && memoize !== false) { - return memoized - } - - const res = await read(cache, key, { integrity, size }) - if (memoize) { - memo.put.byDigest(cache, key, res, opts) - } - return res -} -module.exports.byDigest = getDataByDigest - -const getMemoizedStream = (memoized) => { - const stream = new Minipass() - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(memoized.entry.metadata) - ev === 'integrity' && cb(memoized.entry.integrity) - ev === 'size' && cb(memoized.entry.size) - }) - stream.end(memoized.data) - return stream -} - -function getStream (cache, key, opts = {}) { - const { memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return getMemoizedStream(memoized) - } - - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const entry = await index.find(cache, key) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - stream.emit('metadata', entry.metadata) - stream.emit('integrity', entry.integrity) - stream.emit('size', entry.size) - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(entry.metadata) - ev === 'integrity' && cb(entry.integrity) - ev === 'size' && cb(entry.size) - }) - - const src = read.readStream( - cache, - entry.integrity, - { ...opts, size: typeof size !== 'number' ? entry.size : size } - ) - - if (memoize) { - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put(cache, entry, data, opts)) - stream.unshift(memoStream) - } - stream.unshift(src) - return stream - }).catch((err) => stream.emit('error', err)) - - return stream -} - -module.exports.stream = getStream - -function getStreamDigest (cache, integrity, opts = {}) { - const { memoize } = opts - const memoized = memo.get.byDigest(cache, integrity, opts) - if (memoized && memoize !== false) { - const stream = new Minipass() - stream.end(memoized) - return stream - } else { - const stream = read.readStream(cache, integrity, opts) - if (!memoize) { - return stream - } - - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put.byDigest( - cache, - integrity, - data, - opts - )) - return new Pipeline(stream, memoStream) - } -} - -module.exports.stream.byDigest = getStreamDigest - -function info (cache, key, opts = {}) { - const { memoize } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve(memoized.entry) - } else { - return index.find(cache, key) - } -} -module.exports.info = info - -async function copy (cache, key, dest, opts = {}) { - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - await read.copy(cache, entry.integrity, dest, opts) - return { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} - -module.exports.copy = copy - -async function copyByDigest (cache, key, dest, opts = {}) { - await read.copy(cache, key, dest, opts) - return key -} - -module.exports.copy.byDigest = copyByDigest - -module.exports.hasContent = read.hasContent diff --git a/node_modules/pacote/node_modules/cacache/lib/index.js b/node_modules/pacote/node_modules/cacache/lib/index.js deleted file mode 100644 index c9b0da5f3a271..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/index.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -const get = require('./get.js') -const put = require('./put.js') -const rm = require('./rm.js') -const verify = require('./verify.js') -const { clearMemoized } = require('./memoization.js') -const tmp = require('./util/tmp.js') -const index = require('./entry-index.js') - -module.exports.index = {} -module.exports.index.compact = index.compact -module.exports.index.insert = index.insert - -module.exports.ls = index.ls -module.exports.ls.stream = index.lsStream - -module.exports.get = get -module.exports.get.byDigest = get.byDigest -module.exports.get.stream = get.stream -module.exports.get.stream.byDigest = get.stream.byDigest -module.exports.get.copy = get.copy -module.exports.get.copy.byDigest = get.copy.byDigest -module.exports.get.info = get.info -module.exports.get.hasContent = get.hasContent - -module.exports.put = put -module.exports.put.stream = put.stream - -module.exports.rm = rm.entry -module.exports.rm.all = rm.all -module.exports.rm.entry = module.exports.rm -module.exports.rm.content = rm.content - -module.exports.clearMemoized = clearMemoized - -module.exports.tmp = {} -module.exports.tmp.mkdir = tmp.mkdir -module.exports.tmp.withTmp = tmp.withTmp - -module.exports.verify = verify -module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/pacote/node_modules/cacache/lib/memoization.js b/node_modules/pacote/node_modules/cacache/lib/memoization.js deleted file mode 100644 index 0ff604a479c9c..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/memoization.js +++ /dev/null @@ -1,72 +0,0 @@ -'use strict' - -const LRU = require('lru-cache') - -const MEMOIZED = new LRU({ - max: 500, - maxSize: 50 * 1024 * 1024, // 50MB - ttl: 3 * 60 * 1000, // 3 minutes - sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, -}) - -module.exports.clearMemoized = clearMemoized - -function clearMemoized () { - const old = {} - MEMOIZED.forEach((v, k) => { - old[k] = v - }) - MEMOIZED.clear() - return old -} - -module.exports.put = put - -function put (cache, entry, data, opts) { - pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) - putDigest(cache, entry.integrity, data, opts) -} - -module.exports.put.byDigest = putDigest - -function putDigest (cache, integrity, data, opts) { - pickMem(opts).set(`digest:${cache}:${integrity}`, data) -} - -module.exports.get = get - -function get (cache, key, opts) { - return pickMem(opts).get(`key:${cache}:${key}`) -} - -module.exports.get.byDigest = getDigest - -function getDigest (cache, integrity, opts) { - return pickMem(opts).get(`digest:${cache}:${integrity}`) -} - -class ObjProxy { - constructor (obj) { - this.obj = obj - } - - get (key) { - return this.obj[key] - } - - set (key, val) { - this.obj[key] = val - } -} - -function pickMem (opts) { - if (!opts || !opts.memoize) { - return MEMOIZED - } else if (opts.memoize.get && opts.memoize.set) { - return opts.memoize - } else if (typeof opts.memoize === 'object') { - return new ObjProxy(opts.memoize) - } else { - return MEMOIZED - } -} diff --git a/node_modules/pacote/node_modules/cacache/lib/put.js b/node_modules/pacote/node_modules/cacache/lib/put.js deleted file mode 100644 index 9fc932d5f6dec..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/put.js +++ /dev/null @@ -1,80 +0,0 @@ -'use strict' - -const index = require('./entry-index') -const memo = require('./memoization') -const write = require('./content/write') -const Flush = require('minipass-flush') -const { PassThrough } = require('minipass-collect') -const Pipeline = require('minipass-pipeline') - -const putOpts = (opts) => ({ - algorithms: ['sha512'], - ...opts, -}) - -module.exports = putData - -async function putData (cache, key, data, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - const res = await write(cache, data, opts) - const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return res.integrity -} - -module.exports.stream = putStream - -function putStream (cache, key, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - let integrity - let size - let error - - let memoData - const pipeline = new Pipeline() - // first item in the pipeline is the memoizer, because we need - // that to end first and get the collected data. - if (memoize) { - const memoizer = new PassThrough().on('collect', data => { - memoData = data - }) - pipeline.push(memoizer) - } - - // contentStream is a write-only, not a passthrough - // no data comes out of it. - const contentStream = write.stream(cache, opts) - .on('integrity', (int) => { - integrity = int - }) - .on('size', (s) => { - size = s - }) - .on('error', (err) => { - error = err - }) - - pipeline.push(contentStream) - - // last but not least, we write the index and emit hash and size, - // and memoize if we're doing that - pipeline.push(new Flush({ - async flush () { - if (!error) { - const entry = await index.insert(cache, key, integrity, { ...opts, size }) - if (memoize && memoData) { - memo.put(cache, entry, memoData, opts) - } - pipeline.emit('integrity', integrity) - pipeline.emit('size', size) - } - }, - })) - - return pipeline -} diff --git a/node_modules/pacote/node_modules/cacache/lib/rm.js b/node_modules/pacote/node_modules/cacache/lib/rm.js deleted file mode 100644 index a94760c7cf243..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/rm.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -const { rm } = require('fs/promises') -const glob = require('./util/glob.js') -const index = require('./entry-index') -const memo = require('./memoization') -const path = require('path') -const rmContent = require('./content/rm') - -module.exports = entry -module.exports.entry = entry - -function entry (cache, key, opts) { - memo.clearMemoized() - return index.delete(cache, key, opts) -} - -module.exports.content = content - -function content (cache, integrity) { - memo.clearMemoized() - return rmContent(cache, integrity) -} - -module.exports.all = all - -async function all (cache) { - memo.clearMemoized() - const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) - return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/util/glob.js b/node_modules/pacote/node_modules/cacache/lib/util/glob.js deleted file mode 100644 index 8500c1c16a429..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/util/glob.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -const { glob } = require('glob') -const path = require('path') - -const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) -module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js deleted file mode 100644 index 445599b503808..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -module.exports = hashToSegments - -function hashToSegments (hash) { - return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] -} diff --git a/node_modules/pacote/node_modules/cacache/lib/util/tmp.js b/node_modules/pacote/node_modules/cacache/lib/util/tmp.js deleted file mode 100644 index 0bf5302136ebe..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/util/tmp.js +++ /dev/null @@ -1,26 +0,0 @@ -'use strict' - -const { withTempDir } = require('@npmcli/fs') -const fs = require('fs/promises') -const path = require('path') - -module.exports.mkdir = mktmpdir - -async function mktmpdir (cache, opts = {}) { - const { tmpPrefix } = opts - const tmpDir = path.join(cache, 'tmp') - await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) - // do not use path.join(), it drops the trailing / if tmpPrefix is unset - const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` - return fs.mkdtemp(target, { owner: 'inherit' }) -} - -module.exports.withTmp = withTmp - -function withTmp (cache, opts, cb) { - if (!cb) { - cb = opts - opts = {} - } - return withTempDir(path.join(cache, 'tmp'), cb, opts) -} diff --git a/node_modules/pacote/node_modules/cacache/lib/verify.js b/node_modules/pacote/node_modules/cacache/lib/verify.js deleted file mode 100644 index 62e85c946490f..0000000000000 --- a/node_modules/pacote/node_modules/cacache/lib/verify.js +++ /dev/null @@ -1,257 +0,0 @@ -'use strict' - -const { - mkdir, - readFile, - rm, - stat, - truncate, - writeFile, -} = require('fs/promises') -const pMap = require('p-map') -const contentPath = require('./content/path') -const fsm = require('fs-minipass') -const glob = require('./util/glob.js') -const index = require('./entry-index') -const path = require('path') -const ssri = require('ssri') - -const hasOwnProperty = (obj, key) => - Object.prototype.hasOwnProperty.call(obj, key) - -const verifyOpts = (opts) => ({ - concurrency: 20, - log: { silly () {} }, - ...opts, -}) - -module.exports = verify - -async function verify (cache, opts) { - opts = verifyOpts(opts) - opts.log.silly('verify', 'verifying cache at', cache) - - const steps = [ - markStartTime, - fixPerms, - garbageCollect, - rebuildIndex, - cleanTmp, - writeVerifile, - markEndTime, - ] - - const stats = {} - for (const step of steps) { - const label = step.name - const start = new Date() - const s = await step(cache, opts) - if (s) { - Object.keys(s).forEach((k) => { - stats[k] = s[k] - }) - } - const end = new Date() - if (!stats.runTime) { - stats.runTime = {} - } - stats.runTime[label] = end - start - } - stats.runTime.total = stats.endTime - stats.startTime - opts.log.silly( - 'verify', - 'verification finished for', - cache, - 'in', - `${stats.runTime.total}ms` - ) - return stats -} - -async function markStartTime (cache, opts) { - return { startTime: new Date() } -} - -async function markEndTime (cache, opts) { - return { endTime: new Date() } -} - -async function fixPerms (cache, opts) { - opts.log.silly('verify', 'fixing cache permissions') - await mkdir(cache, { recursive: true }) - return null -} - -// Implements a naive mark-and-sweep tracing garbage collector. -// -// The algorithm is basically as follows: -// 1. Read (and filter) all index entries ("pointers") -// 2. Mark each integrity value as "live" -// 3. Read entire filesystem tree in `content-vX/` dir -// 4. If content is live, verify its checksum and delete it if it fails -// 5. If content is not marked as live, rm it. -// -async function garbageCollect (cache, opts) { - opts.log.silly('verify', 'garbage collecting content') - const indexStream = index.lsStream(cache) - const liveContent = new Set() - indexStream.on('data', (entry) => { - if (opts.filter && !opts.filter(entry)) { - return - } - - // integrity is stringified, re-parse it so we can get each hash - const integrity = ssri.parse(entry.integrity) - for (const algo in integrity) { - liveContent.add(integrity[algo].toString()) - } - }) - await new Promise((resolve, reject) => { - indexStream.on('end', resolve).on('error', reject) - }) - const contentDir = contentPath.contentDir(cache) - const files = await glob(path.join(contentDir, '**'), { - follow: false, - nodir: true, - nosort: true, - }) - const stats = { - verifiedContent: 0, - reclaimedCount: 0, - reclaimedSize: 0, - badContentCount: 0, - keptSize: 0, - } - await pMap( - files, - async (f) => { - const split = f.split(/[/\\]/) - const digest = split.slice(split.length - 3).join('') - const algo = split[split.length - 4] - const integrity = ssri.fromHex(digest, algo) - if (liveContent.has(integrity.toString())) { - const info = await verifyContent(f, integrity) - if (!info.valid) { - stats.reclaimedCount++ - stats.badContentCount++ - stats.reclaimedSize += info.size - } else { - stats.verifiedContent++ - stats.keptSize += info.size - } - } else { - // No entries refer to this content. We can delete. - stats.reclaimedCount++ - const s = await stat(f) - await rm(f, { recursive: true, force: true }) - stats.reclaimedSize += s.size - } - return stats - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function verifyContent (filepath, sri) { - const contentInfo = {} - try { - const { size } = await stat(filepath) - contentInfo.size = size - contentInfo.valid = true - await ssri.checkStream(new fsm.ReadStream(filepath), sri) - } catch (err) { - if (err.code === 'ENOENT') { - return { size: 0, valid: false } - } - if (err.code !== 'EINTEGRITY') { - throw err - } - - await rm(filepath, { recursive: true, force: true }) - contentInfo.valid = false - } - return contentInfo -} - -async function rebuildIndex (cache, opts) { - opts.log.silly('verify', 'rebuilding index') - const entries = await index.ls(cache) - const stats = { - missingContent: 0, - rejectedEntries: 0, - totalEntries: 0, - } - const buckets = {} - for (const k in entries) { - /* istanbul ignore else */ - if (hasOwnProperty(entries, k)) { - const hashed = index.hashKey(k) - const entry = entries[k] - const excluded = opts.filter && !opts.filter(entry) - excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { - buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { - // skip - } else if (excluded) { - buckets[hashed] = [] - buckets[hashed]._path = index.bucketPath(cache, k) - } else { - buckets[hashed] = [entry] - buckets[hashed]._path = index.bucketPath(cache, k) - } - } - } - await pMap( - Object.keys(buckets), - (key) => { - return rebuildBucket(cache, buckets[key], stats, opts) - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function rebuildBucket (cache, bucket, stats, opts) { - await truncate(bucket._path) - // This needs to be serialized because cacache explicitly - // lets very racy bucket conflicts clobber each other. - for (const entry of bucket) { - const content = contentPath(cache, entry.integrity) - try { - await stat(content) - await index.insert(cache, entry.key, entry.integrity, { - metadata: entry.metadata, - size: entry.size, - time: entry.time, - }) - stats.totalEntries++ - } catch (err) { - if (err.code === 'ENOENT') { - stats.rejectedEntries++ - stats.missingContent++ - } else { - throw err - } - } - } -} - -function cleanTmp (cache, opts) { - opts.log.silly('verify', 'cleaning tmp directory') - return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) -} - -async function writeVerifile (cache, opts) { - const verifile = path.join(cache, '_lastverified') - opts.log.silly('verify', 'writing verifile to ' + verifile) - return writeFile(verifile, `${Date.now()}`) -} - -module.exports.lastRun = lastRun - -async function lastRun (cache) { - const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) - return new Date(+data) -} diff --git a/node_modules/pacote/node_modules/cacache/package.json b/node_modules/pacote/node_modules/cacache/package.json deleted file mode 100644 index ab58cb8b7c50f..0000000000000 --- a/node_modules/pacote/node_modules/cacache/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "cacache", - "version": "17.1.4", - "cache-version": { - "content": "2", - "index": "5" - }, - "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "test": "tap", - "snap": "tap", - "coverage": "tap", - "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "eslint \"**/*.js\"", - "npmclilint": "npmcli-lint", - "lintfix": "npm run lint -- --fix", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/cacache.git" - }, - "keywords": [ - "cache", - "caching", - "content-addressable", - "sri", - "sri hash", - "subresource integrity", - "cache", - "storage", - "store", - "file store", - "filesystem", - "disk cache", - "disk storage" - ], - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^3.1.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^7.7.1", - "minipass": "^7.0.3", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.18.0", - "tap": "^16.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "windowsCI": false, - "version": "4.18.0", - "publish": "true" - }, - "author": "GitHub Inc.", - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/pacote/node_modules/normalize-package-data/LICENSE b/node_modules/pacote/node_modules/normalize-package-data/LICENSE new file mode 100644 index 0000000000000..19d1364a8ac08 --- /dev/null +++ b/node_modules/pacote/node_modules/normalize-package-data/LICENSE @@ -0,0 +1,15 @@ +This package contains code originally written by Isaac Z. Schlueter. +Used with permission. + +Copyright (c) Meryn Stol ("Author") +All rights reserved. + +The BSD License + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/extract_description.js b/node_modules/pacote/node_modules/normalize-package-data/lib/extract_description.js new file mode 100644 index 0000000000000..631966b5f29af --- /dev/null +++ b/node_modules/pacote/node_modules/normalize-package-data/lib/extract_description.js @@ -0,0 +1,24 @@ +module.exports = extractDescription + +// Extracts description from contents of a readme file in markdown format +function extractDescription (d) { + if (!d) { + return + } + if (d === 'ERROR: No README data found!') { + return + } + // the first block of text before the first heading + // that isn't the first line heading + d = d.trim().split('\n') + let s = 0 + while (d[s] && d[s].trim().match(/^(#|$)/)) { + s++ + } + const l = d.length + let e = s + 1 + while (e < l && d[e].trim()) { + e++ + } + return d.slice(s, e).join(' ').trim() +} diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/fixer.js b/node_modules/pacote/node_modules/normalize-package-data/lib/fixer.js new file mode 100644 index 0000000000000..bb78231d83ca9 --- /dev/null +++ b/node_modules/pacote/node_modules/normalize-package-data/lib/fixer.js @@ -0,0 +1,475 @@ +var isValidSemver = require('semver/functions/valid') +var cleanSemver = require('semver/functions/clean') +var validateLicense = require('validate-npm-package-license') +var hostedGitInfo = require('hosted-git-info') +var isBuiltinModule = require('is-core-module') +var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies'] +var extractDescription = require('./extract_description') +var url = require('url') +var typos = require('./typos.json') + +var isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.')) + +module.exports = { + // default warning function + warn: function () {}, + + fixRepositoryField: function (data) { + if (data.repositories) { + this.warn('repositories') + data.repository = data.repositories[0] + } + if (!data.repository) { + return this.warn('missingRepository') + } + if (typeof data.repository === 'string') { + data.repository = { + type: 'git', + url: data.repository, + } + } + var r = data.repository.url || '' + if (r) { + var hosted = hostedGitInfo.fromUrl(r) + if (hosted) { + r = data.repository.url + = hosted.getDefaultRepresentation() === 'shortcut' ? hosted.https() : hosted.toString() + } + } + + if (r.match(/github.com\/[^/]+\/[^/]+\.git\.git$/)) { + this.warn('brokenGitUrl', r) + } + }, + + fixTypos: function (data) { + Object.keys(typos.topLevel).forEach(function (d) { + if (Object.prototype.hasOwnProperty.call(data, d)) { + this.warn('typo', d, typos.topLevel[d]) + } + }, this) + }, + + fixScriptsField: function (data) { + if (!data.scripts) { + return + } + if (typeof data.scripts !== 'object') { + this.warn('nonObjectScripts') + delete data.scripts + return + } + Object.keys(data.scripts).forEach(function (k) { + if (typeof data.scripts[k] !== 'string') { + this.warn('nonStringScript') + delete data.scripts[k] + } else if (typos.script[k] && !data.scripts[typos.script[k]]) { + this.warn('typo', k, typos.script[k], 'scripts') + } + }, this) + }, + + fixFilesField: function (data) { + var files = data.files + if (files && !Array.isArray(files)) { + this.warn('nonArrayFiles') + delete data.files + } else if (data.files) { + data.files = data.files.filter(function (file) { + if (!file || typeof file !== 'string') { + this.warn('invalidFilename', file) + return false + } else { + return true + } + }, this) + } + }, + + fixBinField: function (data) { + if (!data.bin) { + return + } + if (typeof data.bin === 'string') { + var b = {} + var match + if (match = data.name.match(/^@[^/]+[/](.*)$/)) { + b[match[1]] = data.bin + } else { + b[data.name] = data.bin + } + data.bin = b + } + }, + + fixManField: function (data) { + if (!data.man) { + return + } + if (typeof data.man === 'string') { + data.man = [data.man] + } + }, + fixBundleDependenciesField: function (data) { + var bdd = 'bundledDependencies' + var bd = 'bundleDependencies' + if (data[bdd] && !data[bd]) { + data[bd] = data[bdd] + delete data[bdd] + } + if (data[bd] && !Array.isArray(data[bd])) { + this.warn('nonArrayBundleDependencies') + delete data[bd] + } else if (data[bd]) { + data[bd] = data[bd].filter(function (filtered) { + if (!filtered || typeof filtered !== 'string') { + this.warn('nonStringBundleDependency', filtered) + return false + } else { + if (!data.dependencies) { + data.dependencies = {} + } + if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) { + this.warn('nonDependencyBundleDependency', filtered) + data.dependencies[filtered] = '*' + } + return true + } + }, this) + } + }, + + fixDependencies: function (data, strict) { + objectifyDeps(data, this.warn) + addOptionalDepsToDeps(data, this.warn) + this.fixBundleDependenciesField(data) + + ;['dependencies', 'devDependencies'].forEach(function (deps) { + if (!(deps in data)) { + return + } + if (!data[deps] || typeof data[deps] !== 'object') { + this.warn('nonObjectDependencies', deps) + delete data[deps] + return + } + Object.keys(data[deps]).forEach(function (d) { + var r = data[deps][d] + if (typeof r !== 'string') { + this.warn('nonStringDependency', d, JSON.stringify(r)) + delete data[deps][d] + } + var hosted = hostedGitInfo.fromUrl(data[deps][d]) + if (hosted) { + data[deps][d] = hosted.toString() + } + }, this) + }, this) + }, + + fixModulesField: function (data) { + if (data.modules) { + this.warn('deprecatedModules') + delete data.modules + } + }, + + fixKeywordsField: function (data) { + if (typeof data.keywords === 'string') { + data.keywords = data.keywords.split(/,\s+/) + } + if (data.keywords && !Array.isArray(data.keywords)) { + delete data.keywords + this.warn('nonArrayKeywords') + } else if (data.keywords) { + data.keywords = data.keywords.filter(function (kw) { + if (typeof kw !== 'string' || !kw) { + this.warn('nonStringKeyword') + return false + } else { + return true + } + }, this) + } + }, + + fixVersionField: function (data, strict) { + // allow "loose" semver 1.0 versions in non-strict mode + // enforce strict semver 2.0 compliance in strict mode + var loose = !strict + if (!data.version) { + data.version = '' + return true + } + if (!isValidSemver(data.version, loose)) { + throw new Error('Invalid version: "' + data.version + '"') + } + data.version = cleanSemver(data.version, loose) + return true + }, + + fixPeople: function (data) { + modifyPeople(data, unParsePerson) + modifyPeople(data, parsePerson) + }, + + fixNameField: function (data, options) { + if (typeof options === 'boolean') { + options = { strict: options } + } else if (typeof options === 'undefined') { + options = {} + } + var strict = options.strict + if (!data.name && !strict) { + data.name = '' + return + } + if (typeof data.name !== 'string') { + throw new Error('name field must be a string.') + } + if (!strict) { + data.name = data.name.trim() + } + ensureValidName(data.name, strict, options.allowLegacyCase) + if (isBuiltinModule(data.name)) { + this.warn('conflictingName', data.name) + } + }, + + fixDescriptionField: function (data) { + if (data.description && typeof data.description !== 'string') { + this.warn('nonStringDescription') + delete data.description + } + if (data.readme && !data.description) { + data.description = extractDescription(data.readme) + } + if (data.description === undefined) { + delete data.description + } + if (!data.description) { + this.warn('missingDescription') + } + }, + + fixReadmeField: function (data) { + if (!data.readme) { + this.warn('missingReadme') + data.readme = 'ERROR: No README data found!' + } + }, + + fixBugsField: function (data) { + if (!data.bugs && data.repository && data.repository.url) { + var hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted && hosted.bugs()) { + data.bugs = { url: hosted.bugs() } + } + } else if (data.bugs) { + if (typeof data.bugs === 'string') { + if (isEmail(data.bugs)) { + data.bugs = { email: data.bugs } + /* eslint-disable-next-line node/no-deprecated-api */ + } else if (url.parse(data.bugs).protocol) { + data.bugs = { url: data.bugs } + } else { + this.warn('nonEmailUrlBugsString') + } + } else { + bugsTypos(data.bugs, this.warn) + var oldBugs = data.bugs + data.bugs = {} + if (oldBugs.url) { + /* eslint-disable-next-line node/no-deprecated-api */ + if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) { + data.bugs.url = oldBugs.url + } else { + this.warn('nonUrlBugsUrlField') + } + } + if (oldBugs.email) { + if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) { + data.bugs.email = oldBugs.email + } else { + this.warn('nonEmailBugsEmailField') + } + } + } + if (!data.bugs.email && !data.bugs.url) { + delete data.bugs + this.warn('emptyNormalizedBugs') + } + } + }, + + fixHomepageField: function (data) { + if (!data.homepage && data.repository && data.repository.url) { + var hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted && hosted.docs()) { + data.homepage = hosted.docs() + } + } + if (!data.homepage) { + return + } + + if (typeof data.homepage !== 'string') { + this.warn('nonUrlHomepage') + return delete data.homepage + } + /* eslint-disable-next-line node/no-deprecated-api */ + if (!url.parse(data.homepage).protocol) { + data.homepage = 'http://' + data.homepage + } + }, + + fixLicenseField: function (data) { + const license = data.license || data.licence + if (!license) { + return this.warn('missingLicense') + } + if ( + typeof (license) !== 'string' || + license.length < 1 || + license.trim() === '' + ) { + return this.warn('invalidLicense') + } + if (!validateLicense(license).validForNewPackages) { + return this.warn('invalidLicense') + } + }, +} + +function isValidScopedPackageName (spec) { + if (spec.charAt(0) !== '@') { + return false + } + + var rest = spec.slice(1).split('/') + if (rest.length !== 2) { + return false + } + + return rest[0] && rest[1] && + rest[0] === encodeURIComponent(rest[0]) && + rest[1] === encodeURIComponent(rest[1]) +} + +function isCorrectlyEncodedName (spec) { + return !spec.match(/[/@\s+%:]/) && + spec === encodeURIComponent(spec) +} + +function ensureValidName (name, strict, allowLegacyCase) { + if (name.charAt(0) === '.' || + !(isValidScopedPackageName(name) || isCorrectlyEncodedName(name)) || + (strict && (!allowLegacyCase) && name !== name.toLowerCase()) || + name.toLowerCase() === 'node_modules' || + name.toLowerCase() === 'favicon.ico') { + throw new Error('Invalid name: ' + JSON.stringify(name)) + } +} + +function modifyPeople (data, fn) { + if (data.author) { + data.author = fn(data.author) + }['maintainers', 'contributors'].forEach(function (set) { + if (!Array.isArray(data[set])) { + return + } + data[set] = data[set].map(fn) + }) + return data +} + +function unParsePerson (person) { + if (typeof person === 'string') { + return person + } + var name = person.name || '' + var u = person.url || person.web + var wrappedUrl = u ? (' (' + u + ')') : '' + var e = person.email || person.mail + var wrappedEmail = e ? (' <' + e + '>') : '' + return name + wrappedEmail + wrappedUrl +} + +function parsePerson (person) { + if (typeof person !== 'string') { + return person + } + var matchedName = person.match(/^([^(<]+)/) + var matchedUrl = person.match(/\(([^()]+)\)/) + var matchedEmail = person.match(/<([^<>]+)>/) + var obj = {} + if (matchedName && matchedName[0].trim()) { + obj.name = matchedName[0].trim() + } + if (matchedEmail) { + obj.email = matchedEmail[1] + } + if (matchedUrl) { + obj.url = matchedUrl[1] + } + return obj +} + +function addOptionalDepsToDeps (data, warn) { + var o = data.optionalDependencies + if (!o) { + return + } + var d = data.dependencies || {} + Object.keys(o).forEach(function (k) { + d[k] = o[k] + }) + data.dependencies = d +} + +function depObjectify (deps, type, warn) { + if (!deps) { + return {} + } + if (typeof deps === 'string') { + deps = deps.trim().split(/[\n\r\s\t ,]+/) + } + if (!Array.isArray(deps)) { + return deps + } + warn('deprecatedArrayDependencies', type) + var o = {} + deps.filter(function (d) { + return typeof d === 'string' + }).forEach(function (d) { + d = d.trim().split(/(:?[@\s><=])/) + var dn = d.shift() + var dv = d.join('') + dv = dv.trim() + dv = dv.replace(/^@/, '') + o[dn] = dv + }) + return o +} + +function objectifyDeps (data, warn) { + depTypes.forEach(function (type) { + if (!data[type]) { + return + } + data[type] = depObjectify(data[type], type, warn) + }) +} + +function bugsTypos (bugs, warn) { + if (!bugs) { + return + } + Object.keys(bugs).forEach(function (k) { + if (typos.bugs[k]) { + warn('typo', k, typos.bugs[k], 'bugs') + bugs[typos.bugs[k]] = bugs[k] + delete bugs[k] + } + }) +} diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/make_warning.js b/node_modules/pacote/node_modules/normalize-package-data/lib/make_warning.js new file mode 100644 index 0000000000000..3be9c86539952 --- /dev/null +++ b/node_modules/pacote/node_modules/normalize-package-data/lib/make_warning.js @@ -0,0 +1,22 @@ +var util = require('util') +var messages = require('./warning_messages.json') + +module.exports = function () { + var args = Array.prototype.slice.call(arguments, 0) + var warningName = args.shift() + if (warningName === 'typo') { + return makeTypoWarning.apply(null, args) + } else { + var msgTemplate = messages[warningName] ? messages[warningName] : warningName + ": '%s'" + args.unshift(msgTemplate) + return util.format.apply(null, args) + } +} + +function makeTypoWarning (providedName, probableName, field) { + if (field) { + providedName = field + "['" + providedName + "']" + probableName = field + "['" + probableName + "']" + } + return util.format(messages.typo, providedName, probableName) +} diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/normalize.js b/node_modules/pacote/node_modules/normalize-package-data/lib/normalize.js new file mode 100644 index 0000000000000..bf71d2c1e2235 --- /dev/null +++ b/node_modules/pacote/node_modules/normalize-package-data/lib/normalize.js @@ -0,0 +1,48 @@ +module.exports = normalize + +var fixer = require('./fixer') +normalize.fixer = fixer + +var makeWarning = require('./make_warning') + +var fieldsToFix = ['name', 'version', 'description', 'repository', 'modules', 'scripts', + 'files', 'bin', 'man', 'bugs', 'keywords', 'readme', 'homepage', 'license'] +var otherThingsToFix = ['dependencies', 'people', 'typos'] + +var thingsToFix = fieldsToFix.map(function (fieldName) { + return ucFirst(fieldName) + 'Field' +}) +// two ways to do this in CoffeeScript on only one line, sub-70 chars: +// thingsToFix = fieldsToFix.map (name) -> ucFirst(name) + "Field" +// thingsToFix = (ucFirst(name) + "Field" for name in fieldsToFix) +thingsToFix = thingsToFix.concat(otherThingsToFix) + +function normalize (data, warn, strict) { + if (warn === true) { + warn = null + strict = true + } + if (!strict) { + strict = false + } + if (!warn || data.private) { + warn = function (msg) { /* noop */ } + } + + if (data.scripts && + data.scripts.install === 'node-gyp rebuild' && + !data.scripts.preinstall) { + data.gypfile = true + } + fixer.warn = function () { + warn(makeWarning.apply(null, arguments)) + } + thingsToFix.forEach(function (thingName) { + fixer['fix' + ucFirst(thingName)](data, strict) + }) + data._id = data.name + '@' + data.version +} + +function ucFirst (string) { + return string.charAt(0).toUpperCase() + string.slice(1) +} diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/safe_format.js b/node_modules/pacote/node_modules/normalize-package-data/lib/safe_format.js new file mode 100644 index 0000000000000..5fc888e5450cd --- /dev/null +++ b/node_modules/pacote/node_modules/normalize-package-data/lib/safe_format.js @@ -0,0 +1,11 @@ +var util = require('util') + +module.exports = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.forEach(function (arg) { + if (!arg) { + throw new TypeError('Bad arguments.') + } + }) + return util.format.apply(null, arguments) +} diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/typos.json b/node_modules/pacote/node_modules/normalize-package-data/lib/typos.json new file mode 100644 index 0000000000000..7f9dd283b30ff --- /dev/null +++ b/node_modules/pacote/node_modules/normalize-package-data/lib/typos.json @@ -0,0 +1,25 @@ +{ + "topLevel": { + "dependancies": "dependencies" + ,"dependecies": "dependencies" + ,"depdenencies": "dependencies" + ,"devEependencies": "devDependencies" + ,"depends": "dependencies" + ,"dev-dependencies": "devDependencies" + ,"devDependences": "devDependencies" + ,"devDepenencies": "devDependencies" + ,"devdependencies": "devDependencies" + ,"repostitory": "repository" + ,"repo": "repository" + ,"prefereGlobal": "preferGlobal" + ,"hompage": "homepage" + ,"hampage": "homepage" + ,"autohr": "author" + ,"autor": "author" + ,"contributers": "contributors" + ,"publicationConfig": "publishConfig" + ,"script": "scripts" + }, + "bugs": { "web": "url", "name": "url" }, + "script": { "server": "start", "tests": "test" } +} diff --git a/node_modules/pacote/node_modules/normalize-package-data/lib/warning_messages.json b/node_modules/pacote/node_modules/normalize-package-data/lib/warning_messages.json new file mode 100644 index 0000000000000..4890f506ed965 --- /dev/null +++ b/node_modules/pacote/node_modules/normalize-package-data/lib/warning_messages.json @@ -0,0 +1,30 @@ +{ + "repositories": "'repositories' (plural) Not supported. Please pick one as the 'repository' field" + ,"missingRepository": "No repository field." + ,"brokenGitUrl": "Probably broken git url: %s" + ,"nonObjectScripts": "scripts must be an object" + ,"nonStringScript": "script values must be string commands" + ,"nonArrayFiles": "Invalid 'files' member" + ,"invalidFilename": "Invalid filename in 'files' list: %s" + ,"nonArrayBundleDependencies": "Invalid 'bundleDependencies' list. Must be array of package names" + ,"nonStringBundleDependency": "Invalid bundleDependencies member: %s" + ,"nonDependencyBundleDependency": "Non-dependency in bundleDependencies: %s" + ,"nonObjectDependencies": "%s field must be an object" + ,"nonStringDependency": "Invalid dependency: %s %s" + ,"deprecatedArrayDependencies": "specifying %s as array is deprecated" + ,"deprecatedModules": "modules field is deprecated" + ,"nonArrayKeywords": "keywords should be an array of strings" + ,"nonStringKeyword": "keywords should be an array of strings" + ,"conflictingName": "%s is also the name of a node core module." + ,"nonStringDescription": "'description' field should be a string" + ,"missingDescription": "No description" + ,"missingReadme": "No README data" + ,"missingLicense": "No license field." + ,"nonEmailUrlBugsString": "Bug string field must be url, email, or {email,url}" + ,"nonUrlBugsUrlField": "bugs.url field must be a string url. Deleted." + ,"nonEmailBugsEmailField": "bugs.email field must be a string email. Deleted." + ,"emptyNormalizedBugs": "Normalized value of bugs field is an empty object. Deleted." + ,"nonUrlHomepage": "homepage field must be a string url. Deleted." + ,"invalidLicense": "license should be a valid SPDX license expression" + ,"typo": "%s should probably be %s." +} diff --git a/node_modules/pacote/node_modules/normalize-package-data/package.json b/node_modules/pacote/node_modules/normalize-package-data/package.json new file mode 100644 index 0000000000000..48d2371d4a66b --- /dev/null +++ b/node_modules/pacote/node_modules/normalize-package-data/package.json @@ -0,0 +1,62 @@ +{ + "name": "normalize-package-data", + "version": "6.0.0", + "author": "GitHub Inc.", + "description": "Normalizes data that can be found in package.json files.", + "license": "BSD-2-Clause", + "repository": { + "type": "git", + "url": "https://github.com/npm/normalize-package-data.git" + }, + "main": "lib/normalize.js", + "scripts": { + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint \"**/*.js\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "dependencies": { + "hosted-git-info": "^7.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "tap": "^16.0.1" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.18.0", + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] + }, + "tap": { + "branches": 86, + "functions": 92, + "lines": 86, + "statements": 86, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE new file mode 100644 index 0000000000000..45055763dc838 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js new file mode 100644 index 0000000000000..efc1247d59d12 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/from-url.js @@ -0,0 +1,122 @@ +'use strict' + +const parseUrl = require('./parse-url') + +// look for github shorthand inputs, such as npm/cli +const isGitHubShorthand = (arg) => { + // it cannot contain whitespace before the first # + // it cannot start with a / because that's probably an absolute file path + // but it must include a slash since repos are username/repository + // it cannot start with a . because that's probably a relative file path + // it cannot start with an @ because that's a scoped package if it passes the other tests + // it cannot contain a : before a # because that tells us that there's a protocol + // a second / may not exist before a # + const firstHash = arg.indexOf('#') + const firstSlash = arg.indexOf('/') + const secondSlash = arg.indexOf('/', firstSlash + 1) + const firstColon = arg.indexOf(':') + const firstSpace = /\s/.exec(arg) + const firstAt = arg.indexOf('@') + + const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash) + const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash) + const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash) + const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash) + const hasSlash = firstSlash > 0 + // if a # is found, what we really want to know is that the character + // immediately before # is not a / + const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/') + const doesNotStartWithDot = !arg.startsWith('.') + + return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash && + doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash && + secondSlashOnlyAfterHash +} + +module.exports = (giturl, opts, { gitHosts, protocols }) => { + if (!giturl) { + return + } + + const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl + const parsed = parseUrl(correctedUrl, protocols) + if (!parsed) { + return + } + + const gitHostShortcut = gitHosts.byShortcut[parsed.protocol] + const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.') + ? parsed.hostname.slice(4) + : parsed.hostname] + const gitHostName = gitHostShortcut || gitHostDomain + if (!gitHostName) { + return + } + + const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain] + let auth = null + if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) { + auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}` + } + + let committish = null + let user = null + let project = null + let defaultRepresentation = null + + try { + if (gitHostShortcut) { + let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname + const firstAt = pathname.indexOf('@') + // we ignore auth for shortcuts, so just trim it out + if (firstAt > -1) { + pathname = pathname.slice(firstAt + 1) + } + + const lastSlash = pathname.lastIndexOf('/') + if (lastSlash > -1) { + user = decodeURIComponent(pathname.slice(0, lastSlash)) + // we want nulls only, never empty strings + if (!user) { + user = null + } + project = decodeURIComponent(pathname.slice(lastSlash + 1)) + } else { + project = decodeURIComponent(pathname) + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (parsed.hash) { + committish = decodeURIComponent(parsed.hash.slice(1)) + } + + defaultRepresentation = 'shortcut' + } else { + if (!gitHostInfo.protocols.includes(parsed.protocol)) { + return + } + + const segments = gitHostInfo.extract(parsed) + if (!segments) { + return + } + + user = segments.user && decodeURIComponent(segments.user) + project = decodeURIComponent(segments.project) + committish = decodeURIComponent(segments.committish) + defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1) + } + } catch (err) { + /* istanbul ignore else */ + if (err instanceof URIError) { + return + } else { + throw err + } + } + + return [gitHostName, user, auth, project, committish, defaultRepresentation, opts] +} diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js new file mode 100644 index 0000000000000..013712b7842c8 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/hosts.js @@ -0,0 +1,228 @@ +/* eslint-disable max-len */ + +'use strict' + +const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : '' +const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : '' +const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-') + +const defaults = { + sshtemplate: ({ domain, user, project, committish }) => + `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, user, project, committish }) => + `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + edittemplate: ({ domain, user, project, committish, editpath, path }) => + `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`, + browsetemplate: ({ domain, user, project, committish, treepath }) => + `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`, + browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => + `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) => + `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + docstemplate: ({ domain, user, project, treepath, committish }) => + `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, + httpstemplate: ({ auth, domain, user, project, committish }) => + `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ domain, user, project, committish, path }) => + `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`, + shortcuttemplate: ({ type, user, project, committish }) => + `${type}:${user}/${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ user, project, committish }) => + `${user}/${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, user, project }) => + `https://${domain}/${user}/${project}/issues`, + hashformat: formatHashFragment, +} + +const hosts = {} +hosts.github = { + // First two are insecure and generally shouldn't be used any more, but + // they are still supported. + protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'github.com', + treepath: 'tree', + blobpath: 'blob', + editpath: 'edit', + filetemplate: ({ auth, user, project, committish, path }) => + `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`, + gittemplate: ({ auth, domain, user, project, committish }) => + `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`, + extract: (url) => { + let [, user, project, type, committish] = url.pathname.split('/', 5) + if (type && type !== 'tree') { + return + } + + if (!type) { + committish = url.hash.slice(1) + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish } + }, +} + +hosts.bitbucket = { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'bitbucket.org', + treepath: 'src', + blobpath: 'src', + editpath: '?mode=edit', + edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) => + `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (['get'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +} + +hosts.gitlab = { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gitlab.com', + treepath: 'tree', + blobpath: 'tree', + editpath: '-/edit', + httpstemplate: ({ auth, domain, user, project, committish }) => + `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`, + extract: (url) => { + const path = url.pathname.slice(1) + if (path.includes('/-/') || path.includes('/archive.tar.gz')) { + return + } + + const segments = path.split('/') + let project = segments.pop() + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + const user = segments.join('/') + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +} + +hosts.gist = { + protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gist.github.com', + editpath: 'edit', + sshtemplate: ({ domain, project, committish }) => + `git@${domain}:${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, project, committish }) => + `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`, + edittemplate: ({ domain, user, project, committish, editpath }) => + `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`, + browsetemplate: ({ domain, project, committish }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + browsetreetemplate: ({ domain, project, committish, path, hashformat }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, + browseblobtemplate: ({ domain, project, committish, path, hashformat }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, + docstemplate: ({ domain, project, committish }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + httpstemplate: ({ domain, project, committish }) => + `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ user, project, committish, path }) => + `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`, + shortcuttemplate: ({ type, project, committish }) => + `${type}:${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ project, committish }) => + `${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, project }) => + `https://${domain}/${project}`, + gittemplate: ({ domain, project, committish }) => + `git://${domain}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ project, committish }) => + `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (aux === 'raw') { + return + } + + if (!project) { + if (!user) { + return + } + + project = user + user = null + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + return { user, project, committish: url.hash.slice(1) } + }, + hashformat: function (fragment) { + return fragment && 'file-' + formatHashFragment(fragment) + }, +} + +hosts.sourcehut = { + protocols: ['git+ssh:', 'https:'], + domain: 'git.sr.ht', + treepath: 'tree', + blobpath: 'tree', + filetemplate: ({ domain, user, project, committish, path }) => + `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`, + httpstemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`, + bugstemplate: ({ user, project }) => + `https://todo.sr.ht/${user}/${project}`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + + // tarball url + if (['archive'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +} + +for (const [name, host] of Object.entries(hosts)) { + hosts[name] = Object.assign({}, defaults, host) +} + +module.exports = hosts diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js new file mode 100644 index 0000000000000..a7339c217e9a3 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/index.js @@ -0,0 +1,179 @@ +'use strict' + +const LRU = require('lru-cache') +const hosts = require('./hosts.js') +const fromUrl = require('./from-url.js') +const parseUrl = require('./parse-url.js') + +const cache = new LRU({ max: 1000 }) + +class GitHost { + constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) { + Object.assign(this, GitHost.#gitHosts[type], { + type, + user, + auth, + project, + committish, + default: defaultRepresentation, + opts, + }) + } + + static #gitHosts = { byShortcut: {}, byDomain: {} } + static #protocols = { + 'git+ssh:': { name: 'sshurl' }, + 'ssh:': { name: 'sshurl' }, + 'git+https:': { name: 'https', auth: true }, + 'git:': { auth: true }, + 'http:': { auth: true }, + 'https:': { auth: true }, + 'git+http:': { auth: true }, + } + + static addHost (name, host) { + GitHost.#gitHosts[name] = host + GitHost.#gitHosts.byDomain[host.domain] = name + GitHost.#gitHosts.byShortcut[`${name}:`] = name + GitHost.#protocols[`${name}:`] = { name } + } + + static fromUrl (giturl, opts) { + if (typeof giturl !== 'string') { + return + } + + const key = giturl + JSON.stringify(opts || {}) + + if (!cache.has(key)) { + const hostArgs = fromUrl(giturl, opts, { + gitHosts: GitHost.#gitHosts, + protocols: GitHost.#protocols, + }) + cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined) + } + + return cache.get(key) + } + + static parseUrl (url) { + return parseUrl(url) + } + + #fill (template, opts) { + if (typeof template !== 'function') { + return null + } + + const options = { ...this, ...this.opts, ...opts } + + // the path should always be set so we don't end up with 'undefined' in urls + if (!options.path) { + options.path = '' + } + + // template functions will insert the leading slash themselves + if (options.path.startsWith('/')) { + options.path = options.path.slice(1) + } + + if (options.noCommittish) { + options.committish = null + } + + const result = template(options) + return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result + } + + hash () { + return this.committish ? `#${this.committish}` : '' + } + + ssh (opts) { + return this.#fill(this.sshtemplate, opts) + } + + sshurl (opts) { + return this.#fill(this.sshurltemplate, opts) + } + + browse (path, ...args) { + // not a string, treat path as opts + if (typeof path !== 'string') { + return this.#fill(this.browsetemplate, path) + } + + if (typeof args[0] !== 'string') { + return this.#fill(this.browsetreetemplate, { ...args[0], path }) + } + + return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path }) + } + + // If the path is known to be a file, then browseFile should be used. For some hosts + // the url is the same as browse, but for others like GitHub a file can use both `/tree/` + // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/` + // path will redirect to a specific commit. Using the `/blob/` path avoids this and + // does not redirect to a different commit. + browseFile (path, ...args) { + if (typeof args[0] !== 'string') { + return this.#fill(this.browseblobtemplate, { ...args[0], path }) + } + + return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path }) + } + + docs (opts) { + return this.#fill(this.docstemplate, opts) + } + + bugs (opts) { + return this.#fill(this.bugstemplate, opts) + } + + https (opts) { + return this.#fill(this.httpstemplate, opts) + } + + git (opts) { + return this.#fill(this.gittemplate, opts) + } + + shortcut (opts) { + return this.#fill(this.shortcuttemplate, opts) + } + + path (opts) { + return this.#fill(this.pathtemplate, opts) + } + + tarball (opts) { + return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false }) + } + + file (path, opts) { + return this.#fill(this.filetemplate, { ...opts, path }) + } + + edit (path, opts) { + return this.#fill(this.edittemplate, { ...opts, path }) + } + + getDefaultRepresentation () { + return this.default + } + + toString (opts) { + if (this.default && typeof this[this.default] === 'function') { + return this[this.default](opts) + } + + return this.sshurl(opts) + } +} + +for (const [name, host] of Object.entries(hosts)) { + GitHost.addHost(name, host) +} + +module.exports = GitHost diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js new file mode 100644 index 0000000000000..7d5489c008ab4 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/lib/parse-url.js @@ -0,0 +1,78 @@ +const url = require('url') + +const lastIndexOfBefore = (str, char, beforeChar) => { + const startPosition = str.indexOf(beforeChar) + return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity) +} + +const safeUrl = (u) => { + try { + return new url.URL(u) + } catch { + // this fn should never throw + } +} + +// accepts input like git:github.com:user/repo and inserts the // after the first : +const correctProtocol = (arg, protocols) => { + const firstColon = arg.indexOf(':') + const proto = arg.slice(0, firstColon + 1) + if (Object.prototype.hasOwnProperty.call(protocols, proto)) { + return arg + } + + const firstAt = arg.indexOf('@') + if (firstAt > -1) { + if (firstAt > firstColon) { + return `git+ssh://${arg}` + } else { + return arg + } + } + + const doubleSlash = arg.indexOf('//') + if (doubleSlash === firstColon + 1) { + return arg + } + + return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}` +} + +// attempt to correct an scp style url so that it will parse with `new URL()` +const correctUrl = (giturl) => { + // ignore @ that come after the first hash since the denotes the start + // of a committish which can contain @ characters + const firstAt = lastIndexOfBefore(giturl, '@', '#') + // ignore colons that come after the hash since that could include colons such as: + // git@github.com:user/package-2#semver:^1.0.0 + const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#') + + if (lastColonBeforeHash > firstAt) { + // the last : comes after the first @ (or there is no @) + // like it would in: + // proto://hostname.com:user/repo + // username@hostname.com:user/repo + // :password@hostname.com:user/repo + // username:password@hostname.com:user/repo + // proto://username@hostname.com:user/repo + // proto://:password@hostname.com:user/repo + // proto://username:password@hostname.com:user/repo + // then we replace the last : with a / to create a valid path + giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1) + } + + if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) { + // we have no : at all + // as it would be in: + // username@hostname.com/user/repo + // then we prepend a protocol + giturl = `git+ssh://${giturl}` + } + + return giturl +} + +module.exports = (giturl, protocols) => { + const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl + return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol)) +} diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json new file mode 100644 index 0000000000000..612259948afe7 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info/package.json @@ -0,0 +1,59 @@ +{ + "name": "hosted-git-info", + "version": "6.1.1", + "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab", + "main": "./lib/index.js", + "repository": { + "type": "git", + "url": "https://github.com/npm/hosted-git-info.git" + }, + "keywords": [ + "git", + "github", + "bitbucket", + "gitlab" + ], + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/hosted-git-info/issues" + }, + "homepage": "https://github.com/npm/hosted-git-info", + "scripts": { + "posttest": "npm run lint", + "snap": "tap", + "test": "tap", + "test:coverage": "tap --coverage-report=html", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "template-oss-apply": "template-oss-apply --force" + }, + "dependencies": { + "lru-cache": "^7.5.1" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.7.1", + "tap": "^16.0.1" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "tap": { + "color": 1, + "coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.7.1" + } +} diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE new file mode 100644 index 0000000000000..19cec97b18468 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js new file mode 100644 index 0000000000000..36bd18cd9f9a6 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js @@ -0,0 +1,431 @@ +'use strict' +module.exports = npa +module.exports.resolve = resolve +module.exports.toPurl = toPurl +module.exports.Result = Result + +const url = require('url') +const HostedGit = require('hosted-git-info') +const semver = require('semver') +const path = global.FAKE_WINDOWS ? require('path').win32 : require('path') +const validatePackageName = require('validate-npm-package-name') +const { homedir } = require('os') +const log = require('proc-log') + +const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS +const hasSlashes = isWindows ? /\\|[/]/ : /[/]/ +const isURL = /^(?:git[+])?[a-z]+:/i +const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i +const isFilename = /[.](?:tgz|tar.gz|tar)$/i + +function npa (arg, where) { + let name + let spec + if (typeof arg === 'object') { + if (arg instanceof Result && (!where || where === arg.where)) { + return arg + } else if (arg.name && arg.rawSpec) { + return npa.resolve(arg.name, arg.rawSpec, where || arg.where) + } else { + return npa(arg.raw, where || arg.where) + } + } + const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@') + const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg + if (isURL.test(arg)) { + spec = arg + } else if (isGit.test(arg)) { + spec = `git+ssh://${arg}` + } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) { + spec = arg + } else if (nameEndsAt > 0) { + name = namePart + spec = arg.slice(nameEndsAt + 1) || '*' + } else { + const valid = validatePackageName(arg) + if (valid.validForOldPackages) { + name = arg + spec = '*' + } else { + spec = arg + } + } + return resolve(name, spec, where, arg) +} + +const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/ + +function resolve (name, spec, where, arg) { + const res = new Result({ + raw: arg, + name: name, + rawSpec: spec, + fromArgument: arg != null, + }) + + if (name) { + res.setName(name) + } + + if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) { + return fromFile(res, where) + } else if (spec && /^npm:/i.test(spec)) { + return fromAlias(res, where) + } + + const hosted = HostedGit.fromUrl(spec, { + noGitPlus: true, + noCommittish: true, + }) + if (hosted) { + return fromHostedGit(res, hosted) + } else if (spec && isURL.test(spec)) { + return fromURL(res) + } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) { + return fromFile(res, where) + } else { + return fromRegistry(res) + } +} + +const defaultRegistry = 'https://registry.npmjs.org' + +function toPurl (arg, reg = defaultRegistry) { + const res = npa(arg) + + if (res.type !== 'version') { + throw invalidPurlType(res.type, res.raw) + } + + // URI-encode leading @ of scoped packages + let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec + if (reg !== defaultRegistry) { + purl += '?repository_url=' + reg + } + + return purl +} + +function invalidPackageName (name, valid, raw) { + // eslint-disable-next-line max-len + const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`) + err.code = 'EINVALIDPACKAGENAME' + return err +} + +function invalidTagName (name, raw) { + // eslint-disable-next-line max-len + const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`) + err.code = 'EINVALIDTAGNAME' + return err +} + +function invalidPurlType (type, raw) { + // eslint-disable-next-line max-len + const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`) + err.code = 'EINVALIDPURLTYPE' + return err +} + +function Result (opts) { + this.type = opts.type + this.registry = opts.registry + this.where = opts.where + if (opts.raw == null) { + this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec + } else { + this.raw = opts.raw + } + + this.name = undefined + this.escapedName = undefined + this.scope = undefined + this.rawSpec = opts.rawSpec || '' + this.saveSpec = opts.saveSpec + this.fetchSpec = opts.fetchSpec + if (opts.name) { + this.setName(opts.name) + } + this.gitRange = opts.gitRange + this.gitCommittish = opts.gitCommittish + this.gitSubdir = opts.gitSubdir + this.hosted = opts.hosted +} + +Result.prototype.setName = function (name) { + const valid = validatePackageName(name) + if (!valid.validForOldPackages) { + throw invalidPackageName(name, valid, this.raw) + } + + this.name = name + this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined + // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar + this.escapedName = name.replace('/', '%2f') + return this +} + +Result.prototype.toString = function () { + const full = [] + if (this.name != null && this.name !== '') { + full.push(this.name) + } + const spec = this.saveSpec || this.fetchSpec || this.rawSpec + if (spec != null && spec !== '') { + full.push(spec) + } + return full.length ? full.join('@') : this.raw +} + +Result.prototype.toJSON = function () { + const result = Object.assign({}, this) + delete result.hosted + return result +} + +function setGitCommittish (res, committish) { + if (!committish) { + res.gitCommittish = null + return res + } + + // for each :: separated item: + for (const part of committish.split('::')) { + // if the item has no : the n it is a commit-ish + if (!part.includes(':')) { + if (res.gitRange) { + throw new Error('cannot override existing semver range with a committish') + } + if (res.gitCommittish) { + throw new Error('cannot override existing committish with a second committish') + } + res.gitCommittish = part + continue + } + // split on name:value + const [name, value] = part.split(':') + // if name is semver do semver lookup of ref or tag + if (name === 'semver') { + if (res.gitCommittish) { + throw new Error('cannot override existing committish with a semver range') + } + if (res.gitRange) { + throw new Error('cannot override existing semver range with a second semver range') + } + res.gitRange = decodeURIComponent(value) + continue + } + if (name === 'path') { + if (res.gitSubdir) { + throw new Error('cannot override existing path with a second path') + } + res.gitSubdir = `/${value}` + continue + } + log.warn('npm-package-arg', `ignoring unknown key "${name}"`) + } + + return res +} + +function fromFile (res, where) { + if (!where) { + where = process.cwd() + } + res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory' + res.where = where + + // always put the '/' on where when resolving urls, or else + // file:foo from /path/to/bar goes to /path/to/foo, when we want + // it to be /path/to/bar/foo + + let specUrl + let resolvedUrl + const prefix = (!/^file:/.test(res.rawSpec) ? 'file:' : '') + const rawWithPrefix = prefix + res.rawSpec + let rawNoPrefix = rawWithPrefix.replace(/^file:/, '') + try { + resolvedUrl = new url.URL(rawWithPrefix, `file://${path.resolve(where)}/`) + specUrl = new url.URL(rawWithPrefix) + } catch (originalError) { + const er = new Error('Invalid file: URL, must comply with RFC 8909') + throw Object.assign(er, { + raw: res.rawSpec, + spec: res, + where, + originalError, + }) + } + + // environment switch for testing + if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') { + // XXX backwards compatibility lack of compliance with 8909 + // Remove when we want a breaking change to come into RFC compliance. + if (resolvedUrl.host && resolvedUrl.host !== 'localhost') { + const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///') + resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`) + specUrl = new url.URL(rawSpec) + rawNoPrefix = rawSpec.replace(/^file:/, '') + } + // turn file:/../foo into file:../foo + // for 1, 2 or 3 leading slashes since we attempted + // in the previous step to make it a file protocol url with a leading slash + if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) { + const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:') + resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`) + specUrl = new url.URL(rawSpec) + rawNoPrefix = rawSpec.replace(/^file:/, '') + } + // XXX end 8909 violation backwards compatibility section + } + + // file:foo - relative url to ./foo + // file:/foo - absolute path /foo + // file:///foo - absolute path to /foo, no authority host + // file://localhost/foo - absolute path to /foo, on localhost + // file://foo - absolute path to / on foo host (error!) + if (resolvedUrl.host && resolvedUrl.host !== 'localhost') { + const msg = `Invalid file: URL, must be absolute if // present` + throw Object.assign(new Error(msg), { + raw: res.rawSpec, + parsed: resolvedUrl, + }) + } + + // turn /C:/blah into just C:/blah on windows + let specPath = decodeURIComponent(specUrl.pathname) + let resolvedPath = decodeURIComponent(resolvedUrl.pathname) + if (isWindows) { + specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1') + resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1') + } + + // replace ~ with homedir, but keep the ~ in the saveSpec + // otherwise, make it relative to where param + if (/^\/~(\/|$)/.test(specPath)) { + res.saveSpec = `file:${specPath.substr(1)}` + resolvedPath = path.resolve(homedir(), specPath.substr(3)) + } else if (!path.isAbsolute(rawNoPrefix)) { + res.saveSpec = `file:${path.relative(where, resolvedPath)}` + } else { + res.saveSpec = `file:${path.resolve(resolvedPath)}` + } + + res.fetchSpec = path.resolve(where, resolvedPath) + return res +} + +function fromHostedGit (res, hosted) { + res.type = 'git' + res.hosted = hosted + res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false }) + res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString() + return setGitCommittish(res, hosted.committish) +} + +function unsupportedURLType (protocol, spec) { + const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`) + err.code = 'EUNSUPPORTEDPROTOCOL' + return err +} + +function matchGitScp (spec) { + // git ssh specifiers are overloaded to also use scp-style git + // specifiers, so we have to parse those out and treat them special. + // They are NOT true URIs, so we can't hand them to `url.parse`. + // + // This regex looks for things that look like: + // git+ssh://git@my.custom.git.com:username/project.git#deadbeef + // + // ...and various combinations. The username in the beginning is *required*. + const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i) + return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && { + fetchSpec: matched[1], + gitCommittish: matched[2] == null ? null : matched[2], + } +} + +function fromURL (res) { + // eslint-disable-next-line node/no-deprecated-api + const urlparse = url.parse(res.rawSpec) + res.saveSpec = res.rawSpec + // check the protocol, and then see if it's git or not + switch (urlparse.protocol) { + case 'git:': + case 'git+http:': + case 'git+https:': + case 'git+rsync:': + case 'git+ftp:': + case 'git+file:': + case 'git+ssh:': { + res.type = 'git' + const match = urlparse.protocol === 'git+ssh:' ? matchGitScp(res.rawSpec) + : null + if (match) { + setGitCommittish(res, match.gitCommittish) + res.fetchSpec = match.fetchSpec + } else { + setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '') + urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '') + if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) { + // keep the drive letter : on windows file paths + urlparse.host += ':' + urlparse.hostname += ':' + } + delete urlparse.hash + res.fetchSpec = url.format(urlparse) + } + break + } + case 'http:': + case 'https:': + res.type = 'remote' + res.fetchSpec = res.saveSpec + break + + default: + throw unsupportedURLType(urlparse.protocol, res.rawSpec) + } + + return res +} + +function fromAlias (res, where) { + const subSpec = npa(res.rawSpec.substr(4), where) + if (subSpec.type === 'alias') { + throw new Error('nested aliases not supported') + } + + if (!subSpec.registry) { + throw new Error('aliases only work for registry deps') + } + + res.subSpec = subSpec + res.registry = true + res.type = 'alias' + res.saveSpec = null + res.fetchSpec = null + return res +} + +function fromRegistry (res) { + res.registry = true + const spec = res.rawSpec.trim() + // no save spec for registry components as we save based on the fetched + // version, not on the argument so this can't compute that. + res.saveSpec = null + res.fetchSpec = spec + const version = semver.valid(spec, true) + const range = semver.validRange(spec, true) + if (version) { + res.type = 'version' + } else if (range) { + res.type = 'range' + } else { + if (encodeURIComponent(spec) !== spec) { + throw invalidTagName(spec, res.raw) + } + res.type = 'tag' + } + return res +} diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json new file mode 100644 index 0000000000000..bb9e71b258a93 --- /dev/null +++ b/node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json @@ -0,0 +1,59 @@ +{ + "name": "npm-package-arg", + "version": "10.1.0", + "description": "Parse the things that can be arguments to `npm install`", + "main": "./lib/npa.js", + "directories": { + "test": "test" + }, + "files": [ + "bin/", + "lib/" + ], + "dependencies": { + "hosted-git-info": "^6.0.0", + "proc-log": "^3.0.0", + "semver": "^7.3.5", + "validate-npm-package-name": "^5.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.10.0", + "tap": "^16.0.1" + }, + "scripts": { + "test": "tap", + "snap": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint \"**/*.js\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/npm-package-arg.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/npm-package-arg/issues" + }, + "homepage": "https://github.com/npm/npm-package-arg", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "tap": { + "branches": 97, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.10.0" + } +} diff --git a/node_modules/pacote/node_modules/read-package-json/LICENSE b/node_modules/pacote/node_modules/read-package-json/LICENSE new file mode 100644 index 0000000000000..052085c436514 --- /dev/null +++ b/node_modules/pacote/node_modules/read-package-json/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/read-package-json/lib/read-json.js b/node_modules/pacote/node_modules/read-package-json/lib/read-json.js new file mode 100644 index 0000000000000..d35f09ebd208f --- /dev/null +++ b/node_modules/pacote/node_modules/read-package-json/lib/read-json.js @@ -0,0 +1,589 @@ +var fs = require('fs') + +var path = require('path') + +var { glob } = require('glob') +var normalizeData = require('normalize-package-data') +var safeJSON = require('json-parse-even-better-errors') +var util = require('util') +var normalizePackageBin = require('npm-normalize-package-bin') + +module.exports = readJson + +// put more stuff on here to customize. +readJson.extraSet = [ + bundleDependencies, + gypfile, + serverjs, + scriptpath, + authors, + readme, + mans, + bins, + githead, + fillTypes, +] + +var typoWarned = {} +var cache = {} + +function readJson (file, log_, strict_, cb_) { + var log, strict, cb + for (var i = 1; i < arguments.length - 1; i++) { + if (typeof arguments[i] === 'boolean') { + strict = arguments[i] + } else if (typeof arguments[i] === 'function') { + log = arguments[i] + } + } + + if (!log) { + log = function () {} + } + cb = arguments[arguments.length - 1] + + readJson_(file, log, strict, cb) +} + +function readJson_ (file, log, strict, cb) { + fs.readFile(file, 'utf8', function (er, d) { + parseJson(file, er, d, log, strict, cb) + }) +} + +function stripBOM (content) { + // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) + // because the buffer-to-string conversion in `fs.readFileSync()` + // translates it to FEFF, the UTF-16 BOM. + if (content.charCodeAt(0) === 0xFEFF) { + content = content.slice(1) + } + return content +} + +function jsonClone (obj) { + if (obj == null) { + return obj + } else if (Array.isArray(obj)) { + var newarr = new Array(obj.length) + for (var ii in obj) { + newarr[ii] = jsonClone(obj[ii]) + } + return newarr + } else if (typeof obj === 'object') { + var newobj = {} + for (var kk in obj) { + newobj[kk] = jsonClone(obj[kk]) + } + return newobj + } else { + return obj + } +} + +function parseJson (file, er, d, log, strict, cb) { + if (er && er.code === 'ENOENT') { + return fs.stat(path.dirname(file), function (err, stat) { + if (!err && stat && !stat.isDirectory()) { + // ENOTDIR isn't used on Windows, but npm expects it. + er = Object.create(er) + er.code = 'ENOTDIR' + return cb(er) + } else { + return indexjs(file, er, log, strict, cb) + } + }) + } + if (er) { + return cb(er) + } + + if (cache[d]) { + return cb(null, jsonClone(cache[d])) + } + + var data + + try { + data = safeJSON(stripBOM(d)) + for (var key in data) { + if (/^_/.test(key)) { + delete data[key] + } + } + } catch (jsonErr) { + data = parseIndex(d) + if (!data) { + return cb(parseError(jsonErr, file)) + } + } + extrasCached(file, d, data, log, strict, cb) +} + +function extrasCached (file, d, data, log, strict, cb) { + extras(file, data, log, strict, function (err, extrasData) { + if (!err) { + cache[d] = jsonClone(extrasData) + } + cb(err, extrasData) + }) +} + +function indexjs (file, er, log, strict, cb) { + if (path.basename(file) === 'index.js') { + return cb(er) + } + + var index = path.resolve(path.dirname(file), 'index.js') + fs.readFile(index, 'utf8', function (er2, d) { + if (er2) { + return cb(er) + } + + if (cache[d]) { + return cb(null, cache[d]) + } + + var data = parseIndex(d) + if (!data) { + return cb(er) + } + + extrasCached(file, d, data, log, strict, cb) + }) +} + +readJson.extras = extras +function extras (file, data, log_, strict_, cb_) { + var log, strict, cb + for (var i = 2; i < arguments.length - 1; i++) { + if (typeof arguments[i] === 'boolean') { + strict = arguments[i] + } else if (typeof arguments[i] === 'function') { + log = arguments[i] + } + } + + if (!log) { + log = function () {} + } + cb = arguments[i] + + var set = readJson.extraSet + var n = set.length + var errState = null + set.forEach(function (fn) { + fn(file, data, then) + }) + + function then (er) { + if (errState) { + return + } + if (er) { + return cb(errState = er) + } + if (--n > 0) { + return + } + final(file, data, log, strict, cb) + } +} + +function scriptpath (file, data, cb) { + if (!data.scripts) { + return cb(null, data) + } + var k = Object.keys(data.scripts) + k.forEach(scriptpath_, data.scripts) + cb(null, data) +} + +function scriptpath_ (key) { + var s = this[key] + // This is never allowed, and only causes problems + if (typeof s !== 'string') { + return delete this[key] + } + + var spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/ + if (s.match(spre)) { + this[key] = this[key].replace(spre, '') + } +} + +function gypfile (file, data, cb) { + var dir = path.dirname(file) + var s = data.scripts || {} + if (s.install || s.preinstall) { + return cb(null, data) + } + + if (data.gypfile === false) { + return cb(null, data) + } + glob('*.gyp', { cwd: dir }) + .then(files => gypfile_(file, data, files, cb)) + .catch(er => cb(er)) +} + +function gypfile_ (file, data, files, cb) { + if (!files.length) { + return cb(null, data) + } + var s = data.scripts || {} + s.install = 'node-gyp rebuild' + data.scripts = s + data.gypfile = true + return cb(null, data) +} + +function serverjs (file, data, cb) { + var dir = path.dirname(file) + var s = data.scripts || {} + if (s.start) { + return cb(null, data) + } + fs.access(path.join(dir, 'server.js'), (err) => { + if (!err) { + s.start = 'node server.js' + data.scripts = s + } + return cb(null, data) + }) +} + +function authors (file, data, cb) { + if (data.contributors) { + return cb(null, data) + } + var af = path.resolve(path.dirname(file), 'AUTHORS') + fs.readFile(af, 'utf8', function (er, ad) { + // ignore error. just checking it. + if (er) { + return cb(null, data) + } + authors_(file, data, ad, cb) + }) +} + +function authors_ (file, data, ad, cb) { + ad = ad.split(/\r?\n/g).map(function (line) { + return line.replace(/^\s*#.*$/, '').trim() + }).filter(function (line) { + return line + }) + data.contributors = ad + return cb(null, data) +} + +function readme (file, data, cb) { + if (data.readme) { + return cb(null, data) + } + var dir = path.dirname(file) + var globOpts = { cwd: dir, nocase: true, mark: true } + glob('{README,README.*}', globOpts) + .then(files => { + // don't accept directories. + files = files.filter(function (filtered) { + return !filtered.match(/\/$/) + }) + if (!files.length) { + return cb() + } + var fn = preferMarkdownReadme(files) + var rm = path.resolve(dir, fn) + return readme_(file, data, rm, cb) + }) + .catch(er => cb(er)) +} + +function preferMarkdownReadme (files) { + var fallback = 0 + var re = /\.m?a?r?k?d?o?w?n?$/i + for (var i = 0; i < files.length; i++) { + if (files[i].match(re)) { + return files[i] + } else if (files[i].match(/README$/)) { + fallback = i + } + } + // prefer README.md, followed by README; otherwise, return + // the first filename (which could be README) + return files[fallback] +} + +function readme_ (file, data, rm, cb) { + var rmfn = path.basename(rm) + fs.readFile(rm, 'utf8', function (er, rmData) { + // maybe not readable, or something. + if (er) { + return cb() + } + data.readme = rmData + data.readmeFilename = rmfn + return cb(er, data) + }) +} + +function mans (file, data, cb) { + let cwd = data.directories && data.directories.man + if (data.man || !cwd) { + return cb(null, data) + } + const dirname = path.dirname(file) + cwd = path.resolve(path.dirname(file), cwd) + glob('**/*.[0-9]', { cwd }) + .then(mansGlob => { + data.man = mansGlob.map(man => + path.relative(dirname, path.join(cwd, man)).split(path.sep).join('/') + ) + return cb(null, data) + }) + .catch(er => cb(er)) +} + +function bins (file, data, cb) { + data = normalizePackageBin(data) + + var m = data.directories && data.directories.bin + if (data.bin || !m) { + return cb(null, data) + } + + m = path.resolve(path.dirname(file), path.join('.', path.join('/', m))) + glob('**', { cwd: m }) + .then(binsGlob => bins_(file, data, binsGlob, cb)) + .catch(er => cb(er)) +} + +function bins_ (file, data, binsGlob, cb) { + var m = (data.directories && data.directories.bin) || '.' + data.bin = binsGlob.reduce(function (acc, mf) { + if (mf && mf.charAt(0) !== '.') { + var f = path.basename(mf) + acc[f] = path.join(m, mf) + } + return acc + }, {}) + return cb(null, normalizePackageBin(data)) +} + +function bundleDependencies (file, data, cb) { + var bd = 'bundleDependencies' + var bdd = 'bundledDependencies' + // normalize key name + if (data[bdd] !== undefined) { + if (data[bd] === undefined) { + data[bd] = data[bdd] + } + delete data[bdd] + } + if (data[bd] === false) { + delete data[bd] + } else if (data[bd] === true) { + data[bd] = Object.keys(data.dependencies || {}) + } else if (data[bd] !== undefined && !Array.isArray(data[bd])) { + delete data[bd] + } + return cb(null, data) +} + +function githead (file, data, cb) { + if (data.gitHead) { + return cb(null, data) + } + var dir = path.dirname(file) + var head = path.resolve(dir, '.git/HEAD') + fs.readFile(head, 'utf8', function (er, headData) { + if (er) { + var parent = path.dirname(dir) + if (parent === dir) { + return cb(null, data) + } + return githead(dir, data, cb) + } + githead_(data, dir, headData, cb) + }) +} + +function githead_ (data, dir, head, cb) { + if (!head.match(/^ref: /)) { + data.gitHead = head.trim() + return cb(null, data) + } + var headRef = head.replace(/^ref: /, '').trim() + var headFile = path.resolve(dir, '.git', headRef) + fs.readFile(headFile, 'utf8', function (er, headData) { + if (er || !headData) { + var packFile = path.resolve(dir, '.git/packed-refs') + return fs.readFile(packFile, 'utf8', function (readFileErr, refs) { + if (readFileErr || !refs) { + return cb(null, data) + } + refs = refs.split('\n') + for (var i = 0; i < refs.length; i++) { + var match = refs[i].match(/^([0-9a-f]{40}) (.+)$/) + if (match && match[2].trim() === headRef) { + data.gitHead = match[1] + break + } + } + return cb(null, data) + }) + } + headData = headData.replace(/^ref: /, '').trim() + data.gitHead = headData + return cb(null, data) + }) +} + +/** + * Warn if the bin references don't point to anything. This might be better in + * normalize-package-data if it had access to the file path. + */ +function checkBinReferences_ (file, data, warn, cb) { + if (!(data.bin instanceof Object)) { + return cb() + } + + var keys = Object.keys(data.bin) + var keysLeft = keys.length + if (!keysLeft) { + return cb() + } + + function handleExists (relName, result) { + keysLeft-- + if (!result) { + warn('No bin file found at ' + relName) + } + if (!keysLeft) { + cb() + } + } + + keys.forEach(function (key) { + var dirName = path.dirname(file) + var relName = data.bin[key] + /* istanbul ignore if - impossible, bins have been normalized */ + if (typeof relName !== 'string') { + var msg = 'Bin filename for ' + key + + ' is not a string: ' + util.inspect(relName) + warn(msg) + delete data.bin[key] + handleExists(relName, true) + return + } + var binPath = path.resolve(dirName, relName) + fs.stat(binPath, (err) => handleExists(relName, !err)) + }) +} + +function final (file, data, log, strict, cb) { + var pId = makePackageId(data) + + function warn (msg) { + if (typoWarned[pId]) { + return + } + if (log) { + log('package.json', pId, msg) + } + } + + try { + normalizeData(data, warn, strict) + } catch (error) { + return cb(error) + } + + checkBinReferences_(file, data, warn, function () { + typoWarned[pId] = true + cb(null, data) + }) +} + +function fillTypes (file, data, cb) { + var index = data.main || 'index.js' + + if (typeof index !== 'string') { + return cb(new TypeError('The "main" attribute must be of type string.')) + } + + // TODO exports is much more complicated than this in verbose format + // We need to support for instance + + // "exports": { + // ".": [ + // { + // "default": "./lib/npm.js" + // }, + // "./lib/npm.js" + // ], + // "./package.json": "./package.json" + // }, + // as well as conditional exports + + // if (data.exports && typeof data.exports === 'string') { + // index = data.exports + // } + + // if (data.exports && data.exports['.']) { + // index = data.exports['.'] + // if (typeof index !== 'string') { + // } + // } + + var extless = + path.join(path.dirname(index), path.basename(index, path.extname(index))) + var dts = `./${extless}.d.ts` + var dtsPath = path.join(path.dirname(file), dts) + var hasDTSFields = 'types' in data || 'typings' in data + if (!hasDTSFields && fs.existsSync(dtsPath)) { + data.types = dts.split(path.sep).join('/') + } + + cb(null, data) +} + +function makePackageId (data) { + var name = cleanString(data.name) + var ver = cleanString(data.version) + return name + '@' + ver +} + +function cleanString (str) { + return (!str || typeof (str) !== 'string') ? '' : str.trim() +} + +// /**package { "name": "foo", "version": "1.2.3", ... } **/ +function parseIndex (data) { + data = data.split(/^\/\*\*package(?:\s|$)/m) + + if (data.length < 2) { + return null + } + data = data[1] + data = data.split(/\*\*\/$/m) + + if (data.length < 2) { + return null + } + data = data[0] + data = data.replace(/^\s*\*/mg, '') + + try { + return safeJSON(data) + } catch (er) { + return null + } +} + +function parseError (ex, file) { + var e = new Error('Failed to parse json\n' + ex.message) + e.code = 'EJSONPARSE' + e.path = file + return e +} diff --git a/node_modules/pacote/node_modules/read-package-json/package.json b/node_modules/pacote/node_modules/read-package-json/package.json new file mode 100644 index 0000000000000..01061f2bc2792 --- /dev/null +++ b/node_modules/pacote/node_modules/read-package-json/package.json @@ -0,0 +1,65 @@ +{ + "name": "read-package-json", + "version": "7.0.0", + "author": "GitHub Inc.", + "description": "The thing npm uses to read package.json files with semantics and defaults and validation", + "repository": { + "type": "git", + "url": "https://github.com/npm/read-package-json.git" + }, + "main": "lib/read-json.js", + "scripts": { + "prerelease": "npm t", + "postrelease": "npm publish && git push --follow-tags", + "release": "standard-version -s", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint \"**/*.js\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "dependencies": { + "glob": "^10.2.2", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "tap": "^16.0.1" + }, + "license": "ISC", + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "tap": { + "branches": 73, + "functions": 77, + "lines": 77, + "statements": 77, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.18.0", + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] + } +} diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json index d9119065bfc3d..987cbccded90b 100644 --- a/node_modules/pacote/package.json +++ b/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "16.0.0", + "version": "17.0.0", "description": "JavaScript package downloader", "author": "GitHub Inc.", "bin": { @@ -28,7 +28,7 @@ "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0", "@npmcli/eslint-config": "^4.0.0", "@npmcli/template-oss": "4.18.0", - "hosted-git-info": "^6.0.0", + "hosted-git-info": "^7.0.0", "mutate-fs": "^2.1.1", "nock": "^13.2.4", "npm-registry-mock": "^1.3.2", @@ -44,27 +44,27 @@ "git" ], "dependencies": { - "@npmcli/git": "^4.0.0", + "@npmcli/git": "^5.0.0", "@npmcli/installed-package-contents": "^2.0.1", "@npmcli/promise-spawn": "^6.0.1", "@npmcli/run-script": "^6.0.0", - "cacache": "^17.0.0", + "cacache": "^18.0.0", "fs-minipass": "^3.0.0", "minipass": "^7.0.2", - "npm-package-arg": "^10.0.0", + "npm-package-arg": "^11.0.0", "npm-packlist": "^7.0.0", "npm-pick-manifest": "^8.0.0", "npm-registry-fetch": "^15.0.0", "proc-log": "^3.0.0", "promise-retry": "^2.0.1", - "read-package-json": "^6.0.0", + "read-package-json": "^7.0.0", "read-package-json-fast": "^3.0.0", "sigstore": "^1.3.0", "ssri": "^10.0.0", "tar": "^6.1.11" }, "engines": { - "node": "^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "repository": { "type": "git", @@ -73,7 +73,7 @@ "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "ciVersions": [ - "16.13.0", + "16.14.0", "16.x", "18.0.0", "18.x" diff --git a/package-lock.json b/package-lock.json index 693e245a4e45b..0d868b1c1b358 100644 --- a/package-lock.json +++ b/package-lock.json @@ -138,7 +138,7 @@ "npm-user-validate": "^2.0.0", "npmlog": "^7.0.1", "p-map": "^4.0.0", - "pacote": "^16.0.0", + "pacote": "^17.0.0", "parse-conflict-json": "^3.0.1", "proc-log": "^3.0.0", "qrcode-terminal": "^0.12.0", @@ -230,7 +230,7 @@ "json-stringify-safe": "^5.0.1", "nock": "^13.3.0", "npm-package-arg": "^11.0.0", - "pacote": "^16.0.0", + "pacote": "^17.0.0", "tap": "^16.3.4" }, "engines": { @@ -2524,6 +2524,24 @@ "node": "^16.13.0 || >=18.0.0" } }, + "node_modules/@npmcli/metavuln-calculator/node_modules/@npmcli/git": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-4.1.0.tgz", + "integrity": "sha512-9hwoB3gStVfa0N31ymBmrX+GuDGdVA/QWShZVqE0HK2Af+7QGGrCTbZia/SW0ImUTjTne7SP91qxDmtXvDHRPQ==", + "dependencies": { + "@npmcli/promise-spawn": "^6.0.0", + "lru-cache": "^7.4.4", + "npm-pick-manifest": "^8.0.0", + "proc-log": "^3.0.0", + "promise-inflight": "^1.0.1", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/@npmcli/metavuln-calculator/node_modules/cacache": { "version": "17.1.4", "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz", @@ -2546,6 +2564,17 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/@npmcli/metavuln-calculator/node_modules/hosted-git-info": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz", + "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==", + "dependencies": { + "lru-cache": "^7.5.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache": { "version": "7.18.3", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", @@ -2554,6 +2583,65 @@ "node": ">=12" } }, + "node_modules/@npmcli/metavuln-calculator/node_modules/npm-package-arg": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz", + "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==", + "dependencies": { + "hosted-git-info": "^6.0.0", + "proc-log": "^3.0.0", + "semver": "^7.3.5", + "validate-npm-package-name": "^5.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/npm-pick-manifest": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz", + "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==", + "dependencies": { + "npm-install-checks": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0", + "npm-package-arg": "^10.0.0", + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/metavuln-calculator/node_modules/pacote": { + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-16.0.0.tgz", + "integrity": "sha512-tXeSsl21IUIzw/zW0rzK/po2zlI5Nqbkpu0V6Pv99f3leHde7zdv+VjuP9pnVeTVV7OvaS49u+lgmvXjQ0TMJQ==", + "dependencies": { + "@npmcli/git": "^4.0.0", + "@npmcli/installed-package-contents": "^2.0.1", + "@npmcli/promise-spawn": "^6.0.1", + "@npmcli/run-script": "^6.0.0", + "cacache": "^17.0.0", + "fs-minipass": "^3.0.0", + "minipass": "^7.0.2", + "npm-package-arg": "^10.0.0", + "npm-packlist": "^7.0.0", + "npm-pick-manifest": "^8.0.0", + "npm-registry-fetch": "^15.0.0", + "proc-log": "^3.0.0", + "promise-retry": "^2.0.1", + "read-package-json": "^6.0.0", + "read-package-json-fast": "^3.0.0", + "sigstore": "^1.3.0", + "ssri": "^10.0.0", + "tar": "^6.1.11" + }, + "bin": { + "pacote": "lib/bin.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, "node_modules/@npmcli/mock-globals": { "resolved": "mock-globals", "link": true @@ -10612,25 +10700,25 @@ } }, "node_modules/pacote": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-16.0.0.tgz", - "integrity": "sha512-tXeSsl21IUIzw/zW0rzK/po2zlI5Nqbkpu0V6Pv99f3leHde7zdv+VjuP9pnVeTVV7OvaS49u+lgmvXjQ0TMJQ==", + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-17.0.0.tgz", + "integrity": "sha512-ho3CUJW0Eh/z6qus9YfPE7lbLoIr97T9KkvrcWcqmykNMuvF1bGL2IXJ0U+hoe7rAamLCX6CXl6xp0aPvyoPag==", "inBundle": true, "dependencies": { - "@npmcli/git": "^4.0.0", + "@npmcli/git": "^5.0.0", "@npmcli/installed-package-contents": "^2.0.1", "@npmcli/promise-spawn": "^6.0.1", "@npmcli/run-script": "^6.0.0", - "cacache": "^17.0.0", + "cacache": "^18.0.0", "fs-minipass": "^3.0.0", "minipass": "^7.0.2", - "npm-package-arg": "^10.0.0", + "npm-package-arg": "^11.0.0", "npm-packlist": "^7.0.0", "npm-pick-manifest": "^8.0.0", "npm-registry-fetch": "^15.0.0", "proc-log": "^3.0.0", "promise-retry": "^2.0.1", - "read-package-json": "^6.0.0", + "read-package-json": "^7.0.0", "read-package-json-fast": "^3.0.0", "sigstore": "^1.3.0", "ssri": "^10.0.0", @@ -10640,52 +10728,49 @@ "pacote": "lib/bin.js" }, "engines": { - "node": "^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } }, - "node_modules/pacote/node_modules/@npmcli/git": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-4.1.0.tgz", - "integrity": "sha512-9hwoB3gStVfa0N31ymBmrX+GuDGdVA/QWShZVqE0HK2Af+7QGGrCTbZia/SW0ImUTjTne7SP91qxDmtXvDHRPQ==", + "node_modules/pacote/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "inBundle": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/pacote/node_modules/normalize-package-data": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.0.tgz", + "integrity": "sha512-UL7ELRVxYBHBgYEtZCXjxuD5vPxnmvMGq0jp/dGPKKrN7tfsBh2IY7TlJ15WWwdjRWD3RJbnsygUurTK3xkPkg==", "inBundle": true, "dependencies": { - "@npmcli/promise-spawn": "^6.0.0", - "lru-cache": "^7.4.4", - "npm-pick-manifest": "^8.0.0", - "proc-log": "^3.0.0", - "promise-inflight": "^1.0.1", - "promise-retry": "^2.0.1", + "hosted-git-info": "^7.0.0", + "is-core-module": "^2.8.1", "semver": "^7.3.5", - "which": "^3.0.0" + "validate-npm-package-license": "^3.0.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } }, - "node_modules/pacote/node_modules/cacache": { - "version": "17.1.4", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz", - "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==", + "node_modules/pacote/node_modules/npm-pick-manifest": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz", + "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==", "inBundle": true, "dependencies": { - "@npmcli/fs": "^3.1.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^7.7.1", - "minipass": "^7.0.3", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" + "npm-install-checks": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0", + "npm-package-arg": "^10.0.0", + "semver": "^7.3.5" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, - "node_modules/pacote/node_modules/hosted-git-info": { + "node_modules/pacote/node_modules/npm-pick-manifest/node_modules/hosted-git-info": { "version": "6.1.1", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz", "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==", @@ -10697,16 +10782,7 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, - "node_modules/pacote/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "inBundle": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/pacote/node_modules/npm-package-arg": { + "node_modules/pacote/node_modules/npm-pick-manifest/node_modules/npm-package-arg": { "version": "10.1.0", "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz", "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==", @@ -10721,19 +10797,19 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, - "node_modules/pacote/node_modules/npm-pick-manifest": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-8.0.2.tgz", - "integrity": "sha512-1dKY+86/AIiq1tkKVD3l0WI+Gd3vkknVGAggsFeBkTvbhMQ1OND/LKkYv4JtXPKUJ8bOTCyLiqEg2P6QNdK+Gg==", + "node_modules/pacote/node_modules/read-package-json": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-7.0.0.tgz", + "integrity": "sha512-uL4Z10OKV4p6vbdvIXB+OzhInYtIozl/VxUBPgNkBuUi2DeRonnuspmaVAMcrkmfjKGNmRndyQAbE7/AmzGwFg==", "inBundle": true, "dependencies": { - "npm-install-checks": "^6.0.0", - "npm-normalize-package-bin": "^3.0.0", - "npm-package-arg": "^10.0.0", - "semver": "^7.3.5" + "glob": "^10.2.2", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } }, "node_modules/parent-module": { @@ -16332,7 +16408,7 @@ "npm-pick-manifest": "^9.0.0", "npm-registry-fetch": "^15.0.0", "npmlog": "^7.0.1", - "pacote": "^16.0.0", + "pacote": "^17.0.0", "parse-conflict-json": "^3.0.0", "proc-log": "^3.0.0", "promise-all-reject-late": "^1.0.0", @@ -16413,7 +16489,7 @@ "diff": "^5.1.0", "minimatch": "^9.0.0", "npm-package-arg": "^11.0.0", - "pacote": "^16.0.0", + "pacote": "^17.0.0", "tar": "^6.1.13" }, "devDependencies": { @@ -16434,7 +16510,7 @@ "ci-info": "^3.7.1", "npm-package-arg": "^11.0.0", "npmlog": "^7.0.1", - "pacote": "^16.0.0", + "pacote": "^17.0.0", "proc-log": "^3.0.0", "read": "^2.0.0", "read-package-json-fast": "^3.0.2", @@ -16512,7 +16588,7 @@ "@npmcli/arborist": "^6.3.0", "@npmcli/run-script": "^6.0.0", "npm-package-arg": "^11.0.0", - "pacote": "^16.0.0" + "pacote": "^17.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", diff --git a/package.json b/package.json index 2cc6dcb9e33f1..f13feb344bbfa 100644 --- a/package.json +++ b/package.json @@ -103,7 +103,7 @@ "npm-user-validate": "^2.0.0", "npmlog": "^7.0.1", "p-map": "^4.0.0", - "pacote": "^16.0.0", + "pacote": "^17.0.0", "parse-conflict-json": "^3.0.1", "proc-log": "^3.0.0", "qrcode-terminal": "^0.12.0", diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index 912cfe79779dc..d9aaf7aed8042 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -26,7 +26,7 @@ "npm-pick-manifest": "^9.0.0", "npm-registry-fetch": "^15.0.0", "npmlog": "^7.0.1", - "pacote": "^16.0.0", + "pacote": "^17.0.0", "parse-conflict-json": "^3.0.0", "proc-log": "^3.0.0", "promise-all-reject-late": "^1.0.0", diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json index 1bdcba1f3b5dc..12565b747d062 100644 --- a/workspaces/libnpmdiff/package.json +++ b/workspaces/libnpmdiff/package.json @@ -53,7 +53,7 @@ "diff": "^5.1.0", "minimatch": "^9.0.0", "npm-package-arg": "^11.0.0", - "pacote": "^16.0.0", + "pacote": "^17.0.0", "tar": "^6.1.13" }, "templateOSS": { diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json index a6501d3169caa..e633eb98dd50a 100644 --- a/workspaces/libnpmexec/package.json +++ b/workspaces/libnpmexec/package.json @@ -64,7 +64,7 @@ "ci-info": "^3.7.1", "npm-package-arg": "^11.0.0", "npmlog": "^7.0.1", - "pacote": "^16.0.0", + "pacote": "^17.0.0", "proc-log": "^3.0.0", "read": "^2.0.0", "read-package-json-fast": "^3.0.2", diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json index f5e98878096f9..0a8330815c2a2 100644 --- a/workspaces/libnpmpack/package.json +++ b/workspaces/libnpmpack/package.json @@ -39,7 +39,7 @@ "@npmcli/arborist": "^6.3.0", "@npmcli/run-script": "^6.0.0", "npm-package-arg": "^11.0.0", - "pacote": "^16.0.0" + "pacote": "^17.0.0" }, "engines": { "node": "^16.13.0 || >=18.0.0"