From 10792ea951a3ef8fc138f82d7b81484006213ce9 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Tue, 15 Aug 2023 13:53:22 -0700 Subject: [PATCH] deps: init-package-json@6.0.0 --- node_modules/.gitignore | 5 +- .../hosted-git-info/lib/from-url.js | 122 -- .../node_modules/hosted-git-info/lib/hosts.js | 228 --- .../node_modules/hosted-git-info/lib/index.js | 179 --- .../hosted-git-info/lib/parse-url.js | 78 -- .../node_modules/hosted-git-info/package.json | 59 - .../node_modules/lru-cache/LICENSE | 15 - .../node_modules/lru-cache/index.js | 1227 ----------------- .../node_modules/lru-cache/index.mjs | 1227 ----------------- .../node_modules/lru-cache/package.json | 96 -- .../normalize-package-data/LICENSE | 15 + .../lib/extract_description.js | 24 + .../normalize-package-data/lib/fixer.js | 475 +++++++ .../lib/make_warning.js | 22 + .../normalize-package-data/lib/normalize.js | 48 + .../normalize-package-data/lib/safe_format.js | 11 + .../normalize-package-data/lib/typos.json | 25 + .../lib/warning_messages.json | 30 + .../package.json | 75 +- .../node_modules/npm-package-arg/LICENSE | 15 - .../node_modules/npm-package-arg/lib/npa.js | 431 ------ .../LICENSE | 4 +- .../read-package-json/lib/read-json.js | 589 ++++++++ .../read-package-json/package.json | 65 + node_modules/init-package-json/package.json | 21 +- package-lock.json | 60 +- package.json | 2 +- 27 files changed, 1388 insertions(+), 3760 deletions(-) delete mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js delete mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js delete mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js delete mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js delete mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/package.json delete mode 100644 node_modules/init-package-json/node_modules/lru-cache/LICENSE delete mode 100644 node_modules/init-package-json/node_modules/lru-cache/index.js delete mode 100644 node_modules/init-package-json/node_modules/lru-cache/index.mjs delete mode 100644 node_modules/init-package-json/node_modules/lru-cache/package.json create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/LICENSE create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json create mode 100644 node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json rename node_modules/init-package-json/node_modules/{npm-package-arg => normalize-package-data}/package.json (55%) delete mode 100644 node_modules/init-package-json/node_modules/npm-package-arg/LICENSE delete mode 100644 node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js rename node_modules/init-package-json/node_modules/{hosted-git-info => read-package-json}/LICENSE (93%) create mode 100644 node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js create mode 100644 node_modules/init-package-json/node_modules/read-package-json/package.json diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 7bc0a96e9f6ef..106eec7635e0c 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -147,9 +147,8 @@ !/init-package-json !/init-package-json/node_modules/ /init-package-json/node_modules/* -!/init-package-json/node_modules/hosted-git-info -!/init-package-json/node_modules/lru-cache -!/init-package-json/node_modules/npm-package-arg +!/init-package-json/node_modules/normalize-package-data +!/init-package-json/node_modules/read-package-json !/ip-regex !/ip !/is-cidr diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js deleted file mode 100644 index efc1247d59d12..0000000000000 --- a/node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js +++ /dev/null @@ -1,122 +0,0 @@ -'use strict' - -const parseUrl = require('./parse-url') - -// look for github shorthand inputs, such as npm/cli -const isGitHubShorthand = (arg) => { - // it cannot contain whitespace before the first # - // it cannot start with a / because that's probably an absolute file path - // but it must include a slash since repos are username/repository - // it cannot start with a . because that's probably a relative file path - // it cannot start with an @ because that's a scoped package if it passes the other tests - // it cannot contain a : before a # because that tells us that there's a protocol - // a second / may not exist before a # - const firstHash = arg.indexOf('#') - const firstSlash = arg.indexOf('/') - const secondSlash = arg.indexOf('/', firstSlash + 1) - const firstColon = arg.indexOf(':') - const firstSpace = /\s/.exec(arg) - const firstAt = arg.indexOf('@') - - const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash) - const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash) - const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash) - const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash) - const hasSlash = firstSlash > 0 - // if a # is found, what we really want to know is that the character - // immediately before # is not a / - const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/') - const doesNotStartWithDot = !arg.startsWith('.') - - return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash && - doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash && - secondSlashOnlyAfterHash -} - -module.exports = (giturl, opts, { gitHosts, protocols }) => { - if (!giturl) { - return - } - - const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl - const parsed = parseUrl(correctedUrl, protocols) - if (!parsed) { - return - } - - const gitHostShortcut = gitHosts.byShortcut[parsed.protocol] - const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.') - ? parsed.hostname.slice(4) - : parsed.hostname] - const gitHostName = gitHostShortcut || gitHostDomain - if (!gitHostName) { - return - } - - const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain] - let auth = null - if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) { - auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}` - } - - let committish = null - let user = null - let project = null - let defaultRepresentation = null - - try { - if (gitHostShortcut) { - let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname - const firstAt = pathname.indexOf('@') - // we ignore auth for shortcuts, so just trim it out - if (firstAt > -1) { - pathname = pathname.slice(firstAt + 1) - } - - const lastSlash = pathname.lastIndexOf('/') - if (lastSlash > -1) { - user = decodeURIComponent(pathname.slice(0, lastSlash)) - // we want nulls only, never empty strings - if (!user) { - user = null - } - project = decodeURIComponent(pathname.slice(lastSlash + 1)) - } else { - project = decodeURIComponent(pathname) - } - - if (project.endsWith('.git')) { - project = project.slice(0, -4) - } - - if (parsed.hash) { - committish = decodeURIComponent(parsed.hash.slice(1)) - } - - defaultRepresentation = 'shortcut' - } else { - if (!gitHostInfo.protocols.includes(parsed.protocol)) { - return - } - - const segments = gitHostInfo.extract(parsed) - if (!segments) { - return - } - - user = segments.user && decodeURIComponent(segments.user) - project = decodeURIComponent(segments.project) - committish = decodeURIComponent(segments.committish) - defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1) - } - } catch (err) { - /* istanbul ignore else */ - if (err instanceof URIError) { - return - } else { - throw err - } - } - - return [gitHostName, user, auth, project, committish, defaultRepresentation, opts] -} diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js deleted file mode 100644 index 013712b7842c8..0000000000000 --- a/node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js +++ /dev/null @@ -1,228 +0,0 @@ -/* eslint-disable max-len */ - -'use strict' - -const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : '' -const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : '' -const formatHashFragment = (f) => f.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-') - -const defaults = { - sshtemplate: ({ domain, user, project, committish }) => - `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`, - sshurltemplate: ({ domain, user, project, committish }) => - `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - edittemplate: ({ domain, user, project, committish, editpath, path }) => - `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`, - browsetemplate: ({ domain, user, project, committish, treepath }) => - `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`, - browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => - `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, - browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) => - `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, - docstemplate: ({ domain, user, project, treepath, committish }) => - `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, - httpstemplate: ({ auth, domain, user, project, committish }) => - `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - filetemplate: ({ domain, user, project, committish, path }) => - `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`, - shortcuttemplate: ({ type, user, project, committish }) => - `${type}:${user}/${project}${maybeJoin('#', committish)}`, - pathtemplate: ({ user, project, committish }) => - `${user}/${project}${maybeJoin('#', committish)}`, - bugstemplate: ({ domain, user, project }) => - `https://${domain}/${user}/${project}/issues`, - hashformat: formatHashFragment, -} - -const hosts = {} -hosts.github = { - // First two are insecure and generally shouldn't be used any more, but - // they are still supported. - protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], - domain: 'github.com', - treepath: 'tree', - blobpath: 'blob', - editpath: 'edit', - filetemplate: ({ auth, user, project, committish, path }) => - `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`, - gittemplate: ({ auth, domain, user, project, committish }) => - `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - tarballtemplate: ({ domain, user, project, committish }) => - `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`, - extract: (url) => { - let [, user, project, type, committish] = url.pathname.split('/', 5) - if (type && type !== 'tree') { - return - } - - if (!type) { - committish = url.hash.slice(1) - } - - if (project && project.endsWith('.git')) { - project = project.slice(0, -4) - } - - if (!user || !project) { - return - } - - return { user, project, committish } - }, -} - -hosts.bitbucket = { - protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], - domain: 'bitbucket.org', - treepath: 'src', - blobpath: 'src', - editpath: '?mode=edit', - edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) => - `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`, - tarballtemplate: ({ domain, user, project, committish }) => - `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`, - extract: (url) => { - let [, user, project, aux] = url.pathname.split('/', 4) - if (['get'].includes(aux)) { - return - } - - if (project && project.endsWith('.git')) { - project = project.slice(0, -4) - } - - if (!user || !project) { - return - } - - return { user, project, committish: url.hash.slice(1) } - }, -} - -hosts.gitlab = { - protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], - domain: 'gitlab.com', - treepath: 'tree', - blobpath: 'tree', - editpath: '-/edit', - httpstemplate: ({ auth, domain, user, project, committish }) => - `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - tarballtemplate: ({ domain, user, project, committish }) => - `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`, - extract: (url) => { - const path = url.pathname.slice(1) - if (path.includes('/-/') || path.includes('/archive.tar.gz')) { - return - } - - const segments = path.split('/') - let project = segments.pop() - if (project.endsWith('.git')) { - project = project.slice(0, -4) - } - - const user = segments.join('/') - if (!user || !project) { - return - } - - return { user, project, committish: url.hash.slice(1) } - }, -} - -hosts.gist = { - protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], - domain: 'gist.github.com', - editpath: 'edit', - sshtemplate: ({ domain, project, committish }) => - `git@${domain}:${project}.git${maybeJoin('#', committish)}`, - sshurltemplate: ({ domain, project, committish }) => - `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`, - edittemplate: ({ domain, user, project, committish, editpath }) => - `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`, - browsetemplate: ({ domain, project, committish }) => - `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, - browsetreetemplate: ({ domain, project, committish, path, hashformat }) => - `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, - browseblobtemplate: ({ domain, project, committish, path, hashformat }) => - `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, - docstemplate: ({ domain, project, committish }) => - `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, - httpstemplate: ({ domain, project, committish }) => - `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`, - filetemplate: ({ user, project, committish, path }) => - `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`, - shortcuttemplate: ({ type, project, committish }) => - `${type}:${project}${maybeJoin('#', committish)}`, - pathtemplate: ({ project, committish }) => - `${project}${maybeJoin('#', committish)}`, - bugstemplate: ({ domain, project }) => - `https://${domain}/${project}`, - gittemplate: ({ domain, project, committish }) => - `git://${domain}/${project}.git${maybeJoin('#', committish)}`, - tarballtemplate: ({ project, committish }) => - `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`, - extract: (url) => { - let [, user, project, aux] = url.pathname.split('/', 4) - if (aux === 'raw') { - return - } - - if (!project) { - if (!user) { - return - } - - project = user - user = null - } - - if (project.endsWith('.git')) { - project = project.slice(0, -4) - } - - return { user, project, committish: url.hash.slice(1) } - }, - hashformat: function (fragment) { - return fragment && 'file-' + formatHashFragment(fragment) - }, -} - -hosts.sourcehut = { - protocols: ['git+ssh:', 'https:'], - domain: 'git.sr.ht', - treepath: 'tree', - blobpath: 'tree', - filetemplate: ({ domain, user, project, committish, path }) => - `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`, - httpstemplate: ({ domain, user, project, committish }) => - `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, - tarballtemplate: ({ domain, user, project, committish }) => - `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`, - bugstemplate: ({ user, project }) => - `https://todo.sr.ht/${user}/${project}`, - extract: (url) => { - let [, user, project, aux] = url.pathname.split('/', 4) - - // tarball url - if (['archive'].includes(aux)) { - return - } - - if (project && project.endsWith('.git')) { - project = project.slice(0, -4) - } - - if (!user || !project) { - return - } - - return { user, project, committish: url.hash.slice(1) } - }, -} - -for (const [name, host] of Object.entries(hosts)) { - hosts[name] = Object.assign({}, defaults, host) -} - -module.exports = hosts diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js deleted file mode 100644 index a7339c217e9a3..0000000000000 --- a/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js +++ /dev/null @@ -1,179 +0,0 @@ -'use strict' - -const LRU = require('lru-cache') -const hosts = require('./hosts.js') -const fromUrl = require('./from-url.js') -const parseUrl = require('./parse-url.js') - -const cache = new LRU({ max: 1000 }) - -class GitHost { - constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) { - Object.assign(this, GitHost.#gitHosts[type], { - type, - user, - auth, - project, - committish, - default: defaultRepresentation, - opts, - }) - } - - static #gitHosts = { byShortcut: {}, byDomain: {} } - static #protocols = { - 'git+ssh:': { name: 'sshurl' }, - 'ssh:': { name: 'sshurl' }, - 'git+https:': { name: 'https', auth: true }, - 'git:': { auth: true }, - 'http:': { auth: true }, - 'https:': { auth: true }, - 'git+http:': { auth: true }, - } - - static addHost (name, host) { - GitHost.#gitHosts[name] = host - GitHost.#gitHosts.byDomain[host.domain] = name - GitHost.#gitHosts.byShortcut[`${name}:`] = name - GitHost.#protocols[`${name}:`] = { name } - } - - static fromUrl (giturl, opts) { - if (typeof giturl !== 'string') { - return - } - - const key = giturl + JSON.stringify(opts || {}) - - if (!cache.has(key)) { - const hostArgs = fromUrl(giturl, opts, { - gitHosts: GitHost.#gitHosts, - protocols: GitHost.#protocols, - }) - cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined) - } - - return cache.get(key) - } - - static parseUrl (url) { - return parseUrl(url) - } - - #fill (template, opts) { - if (typeof template !== 'function') { - return null - } - - const options = { ...this, ...this.opts, ...opts } - - // the path should always be set so we don't end up with 'undefined' in urls - if (!options.path) { - options.path = '' - } - - // template functions will insert the leading slash themselves - if (options.path.startsWith('/')) { - options.path = options.path.slice(1) - } - - if (options.noCommittish) { - options.committish = null - } - - const result = template(options) - return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result - } - - hash () { - return this.committish ? `#${this.committish}` : '' - } - - ssh (opts) { - return this.#fill(this.sshtemplate, opts) - } - - sshurl (opts) { - return this.#fill(this.sshurltemplate, opts) - } - - browse (path, ...args) { - // not a string, treat path as opts - if (typeof path !== 'string') { - return this.#fill(this.browsetemplate, path) - } - - if (typeof args[0] !== 'string') { - return this.#fill(this.browsetreetemplate, { ...args[0], path }) - } - - return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path }) - } - - // If the path is known to be a file, then browseFile should be used. For some hosts - // the url is the same as browse, but for others like GitHub a file can use both `/tree/` - // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/` - // path will redirect to a specific commit. Using the `/blob/` path avoids this and - // does not redirect to a different commit. - browseFile (path, ...args) { - if (typeof args[0] !== 'string') { - return this.#fill(this.browseblobtemplate, { ...args[0], path }) - } - - return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path }) - } - - docs (opts) { - return this.#fill(this.docstemplate, opts) - } - - bugs (opts) { - return this.#fill(this.bugstemplate, opts) - } - - https (opts) { - return this.#fill(this.httpstemplate, opts) - } - - git (opts) { - return this.#fill(this.gittemplate, opts) - } - - shortcut (opts) { - return this.#fill(this.shortcuttemplate, opts) - } - - path (opts) { - return this.#fill(this.pathtemplate, opts) - } - - tarball (opts) { - return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false }) - } - - file (path, opts) { - return this.#fill(this.filetemplate, { ...opts, path }) - } - - edit (path, opts) { - return this.#fill(this.edittemplate, { ...opts, path }) - } - - getDefaultRepresentation () { - return this.default - } - - toString (opts) { - if (this.default && typeof this[this.default] === 'function') { - return this[this.default](opts) - } - - return this.sshurl(opts) - } -} - -for (const [name, host] of Object.entries(hosts)) { - GitHost.addHost(name, host) -} - -module.exports = GitHost diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js deleted file mode 100644 index 7d5489c008ab4..0000000000000 --- a/node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js +++ /dev/null @@ -1,78 +0,0 @@ -const url = require('url') - -const lastIndexOfBefore = (str, char, beforeChar) => { - const startPosition = str.indexOf(beforeChar) - return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity) -} - -const safeUrl = (u) => { - try { - return new url.URL(u) - } catch { - // this fn should never throw - } -} - -// accepts input like git:github.com:user/repo and inserts the // after the first : -const correctProtocol = (arg, protocols) => { - const firstColon = arg.indexOf(':') - const proto = arg.slice(0, firstColon + 1) - if (Object.prototype.hasOwnProperty.call(protocols, proto)) { - return arg - } - - const firstAt = arg.indexOf('@') - if (firstAt > -1) { - if (firstAt > firstColon) { - return `git+ssh://${arg}` - } else { - return arg - } - } - - const doubleSlash = arg.indexOf('//') - if (doubleSlash === firstColon + 1) { - return arg - } - - return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}` -} - -// attempt to correct an scp style url so that it will parse with `new URL()` -const correctUrl = (giturl) => { - // ignore @ that come after the first hash since the denotes the start - // of a committish which can contain @ characters - const firstAt = lastIndexOfBefore(giturl, '@', '#') - // ignore colons that come after the hash since that could include colons such as: - // git@github.com:user/package-2#semver:^1.0.0 - const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#') - - if (lastColonBeforeHash > firstAt) { - // the last : comes after the first @ (or there is no @) - // like it would in: - // proto://hostname.com:user/repo - // username@hostname.com:user/repo - // :password@hostname.com:user/repo - // username:password@hostname.com:user/repo - // proto://username@hostname.com:user/repo - // proto://:password@hostname.com:user/repo - // proto://username:password@hostname.com:user/repo - // then we replace the last : with a / to create a valid path - giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1) - } - - if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) { - // we have no : at all - // as it would be in: - // username@hostname.com/user/repo - // then we prepend a protocol - giturl = `git+ssh://${giturl}` - } - - return giturl -} - -module.exports = (giturl, protocols) => { - const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl - return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol)) -} diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/package.json b/node_modules/init-package-json/node_modules/hosted-git-info/package.json deleted file mode 100644 index 612259948afe7..0000000000000 --- a/node_modules/init-package-json/node_modules/hosted-git-info/package.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "name": "hosted-git-info", - "version": "6.1.1", - "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab", - "main": "./lib/index.js", - "repository": { - "type": "git", - "url": "https://github.com/npm/hosted-git-info.git" - }, - "keywords": [ - "git", - "github", - "bitbucket", - "gitlab" - ], - "author": "GitHub Inc.", - "license": "ISC", - "bugs": { - "url": "https://github.com/npm/hosted-git-info/issues" - }, - "homepage": "https://github.com/npm/hosted-git-info", - "scripts": { - "posttest": "npm run lint", - "snap": "tap", - "test": "tap", - "test:coverage": "tap --coverage-report=html", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "template-oss-apply": "template-oss-apply --force" - }, - "dependencies": { - "lru-cache": "^7.5.1" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.7.1", - "tap": "^16.0.1" - }, - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "tap": { - "color": 1, - "coverage": true, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.7.1" - } -} diff --git a/node_modules/init-package-json/node_modules/lru-cache/LICENSE b/node_modules/init-package-json/node_modules/lru-cache/LICENSE deleted file mode 100644 index f785757cd63f8..0000000000000 --- a/node_modules/init-package-json/node_modules/lru-cache/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/init-package-json/node_modules/lru-cache/index.js b/node_modules/init-package-json/node_modules/lru-cache/index.js deleted file mode 100644 index 48e99fe5e5a70..0000000000000 --- a/node_modules/init-package-json/node_modules/lru-cache/index.js +++ /dev/null @@ -1,1227 +0,0 @@ -const perf = - typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date - -const hasAbortController = typeof AbortController === 'function' - -// minimal backwards-compatibility polyfill -// this doesn't have nearly all the checks and whatnot that -// actual AbortController/Signal has, but it's enough for -// our purposes, and if used properly, behaves the same. -const AC = hasAbortController - ? AbortController - : class AbortController { - constructor() { - this.signal = new AS() - } - abort(reason = new Error('This operation was aborted')) { - this.signal.reason = this.signal.reason || reason - this.signal.aborted = true - this.signal.dispatchEvent({ - type: 'abort', - target: this.signal, - }) - } - } - -const hasAbortSignal = typeof AbortSignal === 'function' -// Some polyfills put this on the AC class, not global -const hasACAbortSignal = typeof AC.AbortSignal === 'function' -const AS = hasAbortSignal - ? AbortSignal - : hasACAbortSignal - ? AC.AbortController - : class AbortSignal { - constructor() { - this.reason = undefined - this.aborted = false - this._listeners = [] - } - dispatchEvent(e) { - if (e.type === 'abort') { - this.aborted = true - this.onabort(e) - this._listeners.forEach(f => f(e), this) - } - } - onabort() {} - addEventListener(ev, fn) { - if (ev === 'abort') { - this._listeners.push(fn) - } - } - removeEventListener(ev, fn) { - if (ev === 'abort') { - this._listeners = this._listeners.filter(f => f !== fn) - } - } - } - -const warned = new Set() -const deprecatedOption = (opt, instead) => { - const code = `LRU_CACHE_OPTION_${opt}` - if (shouldWarn(code)) { - warn(code, `${opt} option`, `options.${instead}`, LRUCache) - } -} -const deprecatedMethod = (method, instead) => { - const code = `LRU_CACHE_METHOD_${method}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, method) - warn(code, `${method} method`, `cache.${instead}()`, get) - } -} -const deprecatedProperty = (field, instead) => { - const code = `LRU_CACHE_PROPERTY_${field}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, field) - warn(code, `${field} property`, `cache.${instead}`, get) - } -} - -const emitWarning = (...a) => { - typeof process === 'object' && - process && - typeof process.emitWarning === 'function' - ? process.emitWarning(...a) - : console.error(...a) -} - -const shouldWarn = code => !warned.has(code) - -const warn = (code, what, instead, fn) => { - warned.add(code) - const msg = `The ${what} is deprecated. Please use ${instead} instead.` - emitWarning(msg, 'DeprecationWarning', code, fn) -} - -const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) - -/* istanbul ignore next - This is a little bit ridiculous, tbh. - * The maximum array length is 2^32-1 or thereabouts on most JS impls. - * And well before that point, you're caching the entire world, I mean, - * that's ~32GB of just integers for the next/prev links, plus whatever - * else to hold that many keys and values. Just filling the memory with - * zeroes at init time is brutal when you get that big. - * But why not be complete? - * Maybe in the future, these limits will have expanded. */ -const getUintArray = max => - !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null - -class ZeroArray extends Array { - constructor(size) { - super(size) - this.fill(0) - } -} - -class Stack { - constructor(max) { - if (max === 0) { - return [] - } - const UintArray = getUintArray(max) - this.heap = new UintArray(max) - this.length = 0 - } - push(n) { - this.heap[this.length++] = n - } - pop() { - return this.heap[--this.length] - } -} - -class LRUCache { - constructor(options = {}) { - const { - max = 0, - ttl, - ttlResolution = 1, - ttlAutopurge, - updateAgeOnGet, - updateAgeOnHas, - allowStale, - dispose, - disposeAfter, - noDisposeOnSet, - noUpdateTTL, - maxSize = 0, - maxEntrySize = 0, - sizeCalculation, - fetchMethod, - fetchContext, - noDeleteOnFetchRejection, - noDeleteOnStaleGet, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - } = options - - // deprecated options, don't trigger a warning for getting them if - // the thing being passed in is another LRUCache we're copying. - const { length, maxAge, stale } = - options instanceof LRUCache ? {} : options - - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer') - } - - const UintArray = max ? getUintArray(max) : Array - if (!UintArray) { - throw new Error('invalid max value: ' + max) - } - - this.max = max - this.maxSize = maxSize - this.maxEntrySize = maxEntrySize || this.maxSize - this.sizeCalculation = sizeCalculation || length - if (this.sizeCalculation) { - if (!this.maxSize && !this.maxEntrySize) { - throw new TypeError( - 'cannot set sizeCalculation without setting maxSize or maxEntrySize' - ) - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function') - } - } - - this.fetchMethod = fetchMethod || null - if (this.fetchMethod && typeof this.fetchMethod !== 'function') { - throw new TypeError( - 'fetchMethod must be a function if specified' - ) - } - - this.fetchContext = fetchContext - if (!this.fetchMethod && fetchContext !== undefined) { - throw new TypeError( - 'cannot set fetchContext without fetchMethod' - ) - } - - this.keyMap = new Map() - this.keyList = new Array(max).fill(null) - this.valList = new Array(max).fill(null) - this.next = new UintArray(max) - this.prev = new UintArray(max) - this.head = 0 - this.tail = 0 - this.free = new Stack(max) - this.initialFill = 1 - this.size = 0 - - if (typeof dispose === 'function') { - this.dispose = dispose - } - if (typeof disposeAfter === 'function') { - this.disposeAfter = disposeAfter - this.disposed = [] - } else { - this.disposeAfter = null - this.disposed = null - } - this.noDisposeOnSet = !!noDisposeOnSet - this.noUpdateTTL = !!noUpdateTTL - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort - this.ignoreFetchAbort = !!ignoreFetchAbort - - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.maxSize !== 0) { - if (!isPosInt(this.maxSize)) { - throw new TypeError( - 'maxSize must be a positive integer if specified' - ) - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError( - 'maxEntrySize must be a positive integer if specified' - ) - } - this.initializeSizeTracking() - } - - this.allowStale = !!allowStale || !!stale - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet - this.updateAgeOnGet = !!updateAgeOnGet - this.updateAgeOnHas = !!updateAgeOnHas - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1 - this.ttlAutopurge = !!ttlAutopurge - this.ttl = ttl || maxAge || 0 - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError( - 'ttl must be a positive integer if specified' - ) - } - this.initializeTTLTracking() - } - - // do not allow completely unbounded caches - if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { - throw new TypeError( - 'At least one of max, maxSize, or ttl is required' - ) - } - if (!this.ttlAutopurge && !this.max && !this.maxSize) { - const code = 'LRU_CACHE_UNBOUNDED' - if (shouldWarn(code)) { - warned.add(code) - const msg = - 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.' - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) - } - } - - if (stale) { - deprecatedOption('stale', 'allowStale') - } - if (maxAge) { - deprecatedOption('maxAge', 'ttl') - } - if (length) { - deprecatedOption('length', 'sizeCalculation') - } - } - - getRemainingTTL(key) { - return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 - } - - initializeTTLTracking() { - this.ttls = new ZeroArray(this.max) - this.starts = new ZeroArray(this.max) - - this.setItemTTL = (index, ttl, start = perf.now()) => { - this.starts[index] = ttl !== 0 ? start : 0 - this.ttls[index] = ttl - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.isStale(index)) { - this.delete(this.keyList[index]) - } - }, ttl + 1) - /* istanbul ignore else - unref() not supported on all platforms */ - if (t.unref) { - t.unref() - } - } - } - - this.updateItemAge = index => { - this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 - } - - this.statusTTL = (status, index) => { - if (status) { - status.ttl = this.ttls[index] - status.start = this.starts[index] - status.now = cachedNow || getNow() - status.remainingTTL = status.now + status.ttl - status.start - } - } - - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0 - const getNow = () => { - const n = perf.now() - if (this.ttlResolution > 0) { - cachedNow = n - const t = setTimeout( - () => (cachedNow = 0), - this.ttlResolution - ) - /* istanbul ignore else - not available on all platforms */ - if (t.unref) { - t.unref() - } - } - return n - } - - this.getRemainingTTL = key => { - const index = this.keyMap.get(key) - if (index === undefined) { - return 0 - } - return this.ttls[index] === 0 || this.starts[index] === 0 - ? Infinity - : this.starts[index] + - this.ttls[index] - - (cachedNow || getNow()) - } - - this.isStale = index => { - return ( - this.ttls[index] !== 0 && - this.starts[index] !== 0 && - (cachedNow || getNow()) - this.starts[index] > - this.ttls[index] - ) - } - } - updateItemAge(_index) {} - statusTTL(_status, _index) {} - setItemTTL(_index, _ttl, _start) {} - isStale(_index) { - return false - } - - initializeSizeTracking() { - this.calculatedSize = 0 - this.sizes = new ZeroArray(this.max) - this.removeItemSize = index => { - this.calculatedSize -= this.sizes[index] - this.sizes[index] = 0 - } - this.requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.isBackgroundFetch(v)) { - return 0 - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function') - } - size = sizeCalculation(v, k) - if (!isPosInt(size)) { - throw new TypeError( - 'sizeCalculation return invalid (expect positive integer)' - ) - } - } else { - throw new TypeError( - 'invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + - 'must be set.' - ) - } - } - return size - } - this.addItemSize = (index, size, status) => { - this.sizes[index] = size - if (this.maxSize) { - const maxSize = this.maxSize - this.sizes[index] - while (this.calculatedSize > maxSize) { - this.evict(true) - } - } - this.calculatedSize += this.sizes[index] - if (status) { - status.entrySize = size - status.totalCalculatedSize = this.calculatedSize - } - } - } - removeItemSize(_index) {} - addItemSize(_index, _size) {} - requireSize(_k, _v, size, sizeCalculation) { - if (size || sizeCalculation) { - throw new TypeError( - 'cannot set size without setting maxSize or maxEntrySize on cache' - ) - } - } - - *indexes({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.tail; true; ) { - if (!this.isValidIndex(i)) { - break - } - if (allowStale || !this.isStale(i)) { - yield i - } - if (i === this.head) { - break - } else { - i = this.prev[i] - } - } - } - } - - *rindexes({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.head; true; ) { - if (!this.isValidIndex(i)) { - break - } - if (allowStale || !this.isStale(i)) { - yield i - } - if (i === this.tail) { - break - } else { - i = this.next[i] - } - } - } - } - - isValidIndex(index) { - return ( - index !== undefined && - this.keyMap.get(this.keyList[index]) === index - ) - } - - *entries() { - for (const i of this.indexes()) { - if ( - this.valList[i] !== undefined && - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield [this.keyList[i], this.valList[i]] - } - } - } - *rentries() { - for (const i of this.rindexes()) { - if ( - this.valList[i] !== undefined && - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield [this.keyList[i], this.valList[i]] - } - } - } - - *keys() { - for (const i of this.indexes()) { - if ( - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.keyList[i] - } - } - } - *rkeys() { - for (const i of this.rindexes()) { - if ( - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.keyList[i] - } - } - } - - *values() { - for (const i of this.indexes()) { - if ( - this.valList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.valList[i] - } - } - } - *rvalues() { - for (const i of this.rindexes()) { - if ( - this.valList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.valList[i] - } - } - } - - [Symbol.iterator]() { - return this.entries() - } - - find(fn, getOptions) { - for (const i of this.indexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - if (fn(value, this.keyList[i], this)) { - return this.get(this.keyList[i], getOptions) - } - } - } - - forEach(fn, thisp = this) { - for (const i of this.indexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - fn.call(thisp, value, this.keyList[i], this) - } - } - - rforEach(fn, thisp = this) { - for (const i of this.rindexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - fn.call(thisp, value, this.keyList[i], this) - } - } - - get prune() { - deprecatedMethod('prune', 'purgeStale') - return this.purgeStale - } - - purgeStale() { - let deleted = false - for (const i of this.rindexes({ allowStale: true })) { - if (this.isStale(i)) { - this.delete(this.keyList[i]) - deleted = true - } - } - return deleted - } - - dump() { - const arr = [] - for (const i of this.indexes({ allowStale: true })) { - const key = this.keyList[i] - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - const entry = { value } - if (this.ttls) { - entry.ttl = this.ttls[i] - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.starts[i] - entry.start = Math.floor(Date.now() - age) - } - if (this.sizes) { - entry.size = this.sizes[i] - } - arr.unshift([key, entry]) - } - return arr - } - - load(arr) { - this.clear() - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset. - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start - entry.start = perf.now() - age - } - this.set(key, entry.value, entry) - } - } - - dispose(_v, _k, _reason) {} - - set( - k, - v, - { - ttl = this.ttl, - start, - noDisposeOnSet = this.noDisposeOnSet, - size = 0, - sizeCalculation = this.sizeCalculation, - noUpdateTTL = this.noUpdateTTL, - status, - } = {} - ) { - size = this.requireSize(k, v, size, sizeCalculation) - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss' - status.maxEntrySizeExceeded = true - } - // have to delete, in case a background fetch is there already. - // in non-async cases, this is a no-op - this.delete(k) - return this - } - let index = this.size === 0 ? undefined : this.keyMap.get(k) - if (index === undefined) { - // addition - index = this.newIndex() - this.keyList[index] = k - this.valList[index] = v - this.keyMap.set(k, index) - this.next[this.tail] = index - this.prev[index] = this.tail - this.tail = index - this.size++ - this.addItemSize(index, size, status) - if (status) { - status.set = 'add' - } - noUpdateTTL = false - } else { - // update - this.moveToTail(index) - const oldVal = this.valList[index] - if (v !== oldVal) { - if (this.isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')) - } else { - if (!noDisposeOnSet) { - this.dispose(oldVal, k, 'set') - if (this.disposeAfter) { - this.disposed.push([oldVal, k, 'set']) - } - } - } - this.removeItemSize(index) - this.valList[index] = v - this.addItemSize(index, size, status) - if (status) { - status.set = 'replace' - const oldValue = - oldVal && this.isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal - if (oldValue !== undefined) status.oldValue = oldValue - } - } else if (status) { - status.set = 'update' - } - } - if (ttl !== 0 && this.ttl === 0 && !this.ttls) { - this.initializeTTLTracking() - } - if (!noUpdateTTL) { - this.setItemTTL(index, ttl, start) - } - this.statusTTL(status, index) - if (this.disposeAfter) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return this - } - - newIndex() { - if (this.size === 0) { - return this.tail - } - if (this.size === this.max && this.max !== 0) { - return this.evict(false) - } - if (this.free.length !== 0) { - return this.free.pop() - } - // initial fill, just keep writing down the list - return this.initialFill++ - } - - pop() { - if (this.size) { - const val = this.valList[this.head] - this.evict(true) - return val - } - } - - evict(free) { - const head = this.head - const k = this.keyList[head] - const v = this.valList[head] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')) - } else { - this.dispose(v, k, 'evict') - if (this.disposeAfter) { - this.disposed.push([v, k, 'evict']) - } - } - this.removeItemSize(head) - // if we aren't about to use the index, then null these out - if (free) { - this.keyList[head] = null - this.valList[head] = null - this.free.push(head) - } - this.head = this.next[head] - this.keyMap.delete(k) - this.size-- - return head - } - - has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined) { - if (!this.isStale(index)) { - if (updateAgeOnHas) { - this.updateItemAge(index) - } - if (status) status.has = 'hit' - this.statusTTL(status, index) - return true - } else if (status) { - status.has = 'stale' - this.statusTTL(status, index) - } - } else if (status) { - status.has = 'miss' - } - return false - } - - // like get(), but without any LRU updating or TTL expiration - peek(k, { allowStale = this.allowStale } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined && (allowStale || !this.isStale(index))) { - const v = this.valList[index] - // either stale and allowed, or forcing a refresh of non-stale value - return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v - } - } - - backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.valList[index] - if (this.isBackgroundFetch(v)) { - return v - } - const ac = new AC() - if (options.signal) { - options.signal.addEventListener('abort', () => - ac.abort(options.signal.reason) - ) - } - const fetchOpts = { - signal: ac.signal, - options, - context, - } - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal - const ignoreAbort = options.ignoreFetchAbort && v !== undefined - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true - options.status.fetchError = ac.signal.reason - if (ignoreAbort) options.status.fetchAbortIgnored = true - } else { - options.status.fetchResolved = true - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason) - } - // either we didn't abort, and are still here, or we did, and ignored - if (this.valList[index] === p) { - if (v === undefined) { - if (p.__staleWhileFetching) { - this.valList[index] = p.__staleWhileFetching - } else { - this.delete(k) - } - } else { - if (options.status) options.status.fetchUpdated = true - this.set(k, v, fetchOpts.options) - } - } - return v - } - const eb = er => { - if (options.status) { - options.status.fetchRejected = true - options.status.fetchError = er - } - return fetchFail(er) - } - const fetchFail = er => { - const { aborted } = ac.signal - const allowStaleAborted = - aborted && options.allowStaleOnFetchAbort - const allowStale = - allowStaleAborted || options.allowStaleOnFetchRejection - const noDelete = allowStale || options.noDeleteOnFetchRejection - if (this.valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || p.__staleWhileFetching === undefined - if (del) { - this.delete(k) - } else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.valList[index] = p.__staleWhileFetching - } - } - if (allowStale) { - if (options.status && p.__staleWhileFetching !== undefined) { - options.status.returnedStale = true - } - return p.__staleWhileFetching - } else if (p.__returned === p) { - throw er - } - } - const pcall = (res, rej) => { - this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if ( - !options.ignoreFetchAbort || - options.allowStaleOnFetchAbort - ) { - res() - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true) - } - } - }) - } - if (options.status) options.status.fetchDispatched = true - const p = new Promise(pcall).then(cb, eb) - p.__abortController = ac - p.__staleWhileFetching = v - p.__returned = null - if (index === undefined) { - // internal, don't expose status. - this.set(k, p, { ...fetchOpts.options, status: undefined }) - index = this.keyMap.get(k) - } else { - this.valList[index] = p - } - return p - } - - isBackgroundFetch(p) { - return ( - p && - typeof p === 'object' && - typeof p.then === 'function' && - Object.prototype.hasOwnProperty.call( - p, - '__staleWhileFetching' - ) && - Object.prototype.hasOwnProperty.call(p, '__returned') && - (p.__returned === p || p.__returned === null) - ) - } - - // this takes the union of get() and set() opts, because it does both - async fetch( - k, - { - // get options - allowStale = this.allowStale, - updateAgeOnGet = this.updateAgeOnGet, - noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, - noDisposeOnSet = this.noDisposeOnSet, - size = 0, - sizeCalculation = this.sizeCalculation, - noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, - allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, - ignoreFetchAbort = this.ignoreFetchAbort, - allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, - fetchContext = this.fetchContext, - forceRefresh = false, - status, - signal, - } = {} - ) { - if (!this.fetchMethod) { - if (status) status.fetch = 'get' - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }) - } - - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - } - - let index = this.keyMap.get(k) - if (index === undefined) { - if (status) status.fetch = 'miss' - const p = this.backgroundFetch(k, index, options, fetchContext) - return (p.__returned = p) - } else { - // in cache, maybe already fetching - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - const stale = - allowStale && v.__staleWhileFetching !== undefined - if (status) { - status.fetch = 'inflight' - if (stale) status.returnedStale = true - } - return stale ? v.__staleWhileFetching : (v.__returned = v) - } - - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.isStale(index) - if (!forceRefresh && !isStale) { - if (status) status.fetch = 'hit' - this.moveToTail(index) - if (updateAgeOnGet) { - this.updateItemAge(index) - } - this.statusTTL(status, index) - return v - } - - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.backgroundFetch(k, index, options, fetchContext) - const hasStale = p.__staleWhileFetching !== undefined - const staleVal = hasStale && allowStale - if (status) { - status.fetch = hasStale && isStale ? 'stale' : 'refresh' - if (staleVal && isStale) status.returnedStale = true - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p) - } - } - - get( - k, - { - allowStale = this.allowStale, - updateAgeOnGet = this.updateAgeOnGet, - noDeleteOnStaleGet = this.noDeleteOnStaleGet, - status, - } = {} - ) { - const index = this.keyMap.get(k) - if (index !== undefined) { - const value = this.valList[index] - const fetching = this.isBackgroundFetch(value) - this.statusTTL(status, index) - if (this.isStale(index)) { - if (status) status.get = 'stale' - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.delete(k) - } - if (status) status.returnedStale = allowStale - return allowStale ? value : undefined - } else { - if (status) { - status.returnedStale = - allowStale && value.__staleWhileFetching !== undefined - } - return allowStale ? value.__staleWhileFetching : undefined - } - } else { - if (status) status.get = 'hit' - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching - } - this.moveToTail(index) - if (updateAgeOnGet) { - this.updateItemAge(index) - } - return value - } - } else if (status) { - status.get = 'miss' - } - } - - connect(p, n) { - this.prev[n] = p - this.next[p] = n - } - - moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.tail) { - if (index === this.head) { - this.head = this.next[index] - } else { - this.connect(this.prev[index], this.next[index]) - } - this.connect(this.tail, index) - this.tail = index - } - } - - get del() { - deprecatedMethod('del', 'delete') - return this.delete - } - - delete(k) { - let deleted = false - if (this.size !== 0) { - const index = this.keyMap.get(k) - if (index !== undefined) { - deleted = true - if (this.size === 1) { - this.clear() - } else { - this.removeItemSize(index) - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')) - } else { - this.dispose(v, k, 'delete') - if (this.disposeAfter) { - this.disposed.push([v, k, 'delete']) - } - } - this.keyMap.delete(k) - this.keyList[index] = null - this.valList[index] = null - if (index === this.tail) { - this.tail = this.prev[index] - } else if (index === this.head) { - this.head = this.next[index] - } else { - this.next[this.prev[index]] = this.next[index] - this.prev[this.next[index]] = this.prev[index] - } - this.size-- - this.free.push(index) - } - } - } - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return deleted - } - - clear() { - for (const index of this.rindexes({ allowStale: true })) { - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')) - } else { - const k = this.keyList[index] - this.dispose(v, k, 'delete') - if (this.disposeAfter) { - this.disposed.push([v, k, 'delete']) - } - } - } - - this.keyMap.clear() - this.valList.fill(null) - this.keyList.fill(null) - if (this.ttls) { - this.ttls.fill(0) - this.starts.fill(0) - } - if (this.sizes) { - this.sizes.fill(0) - } - this.head = 0 - this.tail = 0 - this.initialFill = 1 - this.free.length = 0 - this.calculatedSize = 0 - this.size = 0 - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - } - - get reset() { - deprecatedMethod('reset', 'clear') - return this.clear - } - - get length() { - deprecatedProperty('length', 'size') - return this.size - } - - static get AbortController() { - return AC - } - static get AbortSignal() { - return AS - } -} - -module.exports = LRUCache diff --git a/node_modules/init-package-json/node_modules/lru-cache/index.mjs b/node_modules/init-package-json/node_modules/lru-cache/index.mjs deleted file mode 100644 index 4a0b4813ec515..0000000000000 --- a/node_modules/init-package-json/node_modules/lru-cache/index.mjs +++ /dev/null @@ -1,1227 +0,0 @@ -const perf = - typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date - -const hasAbortController = typeof AbortController === 'function' - -// minimal backwards-compatibility polyfill -// this doesn't have nearly all the checks and whatnot that -// actual AbortController/Signal has, but it's enough for -// our purposes, and if used properly, behaves the same. -const AC = hasAbortController - ? AbortController - : class AbortController { - constructor() { - this.signal = new AS() - } - abort(reason = new Error('This operation was aborted')) { - this.signal.reason = this.signal.reason || reason - this.signal.aborted = true - this.signal.dispatchEvent({ - type: 'abort', - target: this.signal, - }) - } - } - -const hasAbortSignal = typeof AbortSignal === 'function' -// Some polyfills put this on the AC class, not global -const hasACAbortSignal = typeof AC.AbortSignal === 'function' -const AS = hasAbortSignal - ? AbortSignal - : hasACAbortSignal - ? AC.AbortController - : class AbortSignal { - constructor() { - this.reason = undefined - this.aborted = false - this._listeners = [] - } - dispatchEvent(e) { - if (e.type === 'abort') { - this.aborted = true - this.onabort(e) - this._listeners.forEach(f => f(e), this) - } - } - onabort() {} - addEventListener(ev, fn) { - if (ev === 'abort') { - this._listeners.push(fn) - } - } - removeEventListener(ev, fn) { - if (ev === 'abort') { - this._listeners = this._listeners.filter(f => f !== fn) - } - } - } - -const warned = new Set() -const deprecatedOption = (opt, instead) => { - const code = `LRU_CACHE_OPTION_${opt}` - if (shouldWarn(code)) { - warn(code, `${opt} option`, `options.${instead}`, LRUCache) - } -} -const deprecatedMethod = (method, instead) => { - const code = `LRU_CACHE_METHOD_${method}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, method) - warn(code, `${method} method`, `cache.${instead}()`, get) - } -} -const deprecatedProperty = (field, instead) => { - const code = `LRU_CACHE_PROPERTY_${field}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, field) - warn(code, `${field} property`, `cache.${instead}`, get) - } -} - -const emitWarning = (...a) => { - typeof process === 'object' && - process && - typeof process.emitWarning === 'function' - ? process.emitWarning(...a) - : console.error(...a) -} - -const shouldWarn = code => !warned.has(code) - -const warn = (code, what, instead, fn) => { - warned.add(code) - const msg = `The ${what} is deprecated. Please use ${instead} instead.` - emitWarning(msg, 'DeprecationWarning', code, fn) -} - -const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) - -/* istanbul ignore next - This is a little bit ridiculous, tbh. - * The maximum array length is 2^32-1 or thereabouts on most JS impls. - * And well before that point, you're caching the entire world, I mean, - * that's ~32GB of just integers for the next/prev links, plus whatever - * else to hold that many keys and values. Just filling the memory with - * zeroes at init time is brutal when you get that big. - * But why not be complete? - * Maybe in the future, these limits will have expanded. */ -const getUintArray = max => - !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null - -class ZeroArray extends Array { - constructor(size) { - super(size) - this.fill(0) - } -} - -class Stack { - constructor(max) { - if (max === 0) { - return [] - } - const UintArray = getUintArray(max) - this.heap = new UintArray(max) - this.length = 0 - } - push(n) { - this.heap[this.length++] = n - } - pop() { - return this.heap[--this.length] - } -} - -class LRUCache { - constructor(options = {}) { - const { - max = 0, - ttl, - ttlResolution = 1, - ttlAutopurge, - updateAgeOnGet, - updateAgeOnHas, - allowStale, - dispose, - disposeAfter, - noDisposeOnSet, - noUpdateTTL, - maxSize = 0, - maxEntrySize = 0, - sizeCalculation, - fetchMethod, - fetchContext, - noDeleteOnFetchRejection, - noDeleteOnStaleGet, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - } = options - - // deprecated options, don't trigger a warning for getting them if - // the thing being passed in is another LRUCache we're copying. - const { length, maxAge, stale } = - options instanceof LRUCache ? {} : options - - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer') - } - - const UintArray = max ? getUintArray(max) : Array - if (!UintArray) { - throw new Error('invalid max value: ' + max) - } - - this.max = max - this.maxSize = maxSize - this.maxEntrySize = maxEntrySize || this.maxSize - this.sizeCalculation = sizeCalculation || length - if (this.sizeCalculation) { - if (!this.maxSize && !this.maxEntrySize) { - throw new TypeError( - 'cannot set sizeCalculation without setting maxSize or maxEntrySize' - ) - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function') - } - } - - this.fetchMethod = fetchMethod || null - if (this.fetchMethod && typeof this.fetchMethod !== 'function') { - throw new TypeError( - 'fetchMethod must be a function if specified' - ) - } - - this.fetchContext = fetchContext - if (!this.fetchMethod && fetchContext !== undefined) { - throw new TypeError( - 'cannot set fetchContext without fetchMethod' - ) - } - - this.keyMap = new Map() - this.keyList = new Array(max).fill(null) - this.valList = new Array(max).fill(null) - this.next = new UintArray(max) - this.prev = new UintArray(max) - this.head = 0 - this.tail = 0 - this.free = new Stack(max) - this.initialFill = 1 - this.size = 0 - - if (typeof dispose === 'function') { - this.dispose = dispose - } - if (typeof disposeAfter === 'function') { - this.disposeAfter = disposeAfter - this.disposed = [] - } else { - this.disposeAfter = null - this.disposed = null - } - this.noDisposeOnSet = !!noDisposeOnSet - this.noUpdateTTL = !!noUpdateTTL - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort - this.ignoreFetchAbort = !!ignoreFetchAbort - - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.maxSize !== 0) { - if (!isPosInt(this.maxSize)) { - throw new TypeError( - 'maxSize must be a positive integer if specified' - ) - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError( - 'maxEntrySize must be a positive integer if specified' - ) - } - this.initializeSizeTracking() - } - - this.allowStale = !!allowStale || !!stale - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet - this.updateAgeOnGet = !!updateAgeOnGet - this.updateAgeOnHas = !!updateAgeOnHas - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1 - this.ttlAutopurge = !!ttlAutopurge - this.ttl = ttl || maxAge || 0 - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError( - 'ttl must be a positive integer if specified' - ) - } - this.initializeTTLTracking() - } - - // do not allow completely unbounded caches - if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { - throw new TypeError( - 'At least one of max, maxSize, or ttl is required' - ) - } - if (!this.ttlAutopurge && !this.max && !this.maxSize) { - const code = 'LRU_CACHE_UNBOUNDED' - if (shouldWarn(code)) { - warned.add(code) - const msg = - 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.' - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) - } - } - - if (stale) { - deprecatedOption('stale', 'allowStale') - } - if (maxAge) { - deprecatedOption('maxAge', 'ttl') - } - if (length) { - deprecatedOption('length', 'sizeCalculation') - } - } - - getRemainingTTL(key) { - return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 - } - - initializeTTLTracking() { - this.ttls = new ZeroArray(this.max) - this.starts = new ZeroArray(this.max) - - this.setItemTTL = (index, ttl, start = perf.now()) => { - this.starts[index] = ttl !== 0 ? start : 0 - this.ttls[index] = ttl - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.isStale(index)) { - this.delete(this.keyList[index]) - } - }, ttl + 1) - /* istanbul ignore else - unref() not supported on all platforms */ - if (t.unref) { - t.unref() - } - } - } - - this.updateItemAge = index => { - this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 - } - - this.statusTTL = (status, index) => { - if (status) { - status.ttl = this.ttls[index] - status.start = this.starts[index] - status.now = cachedNow || getNow() - status.remainingTTL = status.now + status.ttl - status.start - } - } - - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0 - const getNow = () => { - const n = perf.now() - if (this.ttlResolution > 0) { - cachedNow = n - const t = setTimeout( - () => (cachedNow = 0), - this.ttlResolution - ) - /* istanbul ignore else - not available on all platforms */ - if (t.unref) { - t.unref() - } - } - return n - } - - this.getRemainingTTL = key => { - const index = this.keyMap.get(key) - if (index === undefined) { - return 0 - } - return this.ttls[index] === 0 || this.starts[index] === 0 - ? Infinity - : this.starts[index] + - this.ttls[index] - - (cachedNow || getNow()) - } - - this.isStale = index => { - return ( - this.ttls[index] !== 0 && - this.starts[index] !== 0 && - (cachedNow || getNow()) - this.starts[index] > - this.ttls[index] - ) - } - } - updateItemAge(_index) {} - statusTTL(_status, _index) {} - setItemTTL(_index, _ttl, _start) {} - isStale(_index) { - return false - } - - initializeSizeTracking() { - this.calculatedSize = 0 - this.sizes = new ZeroArray(this.max) - this.removeItemSize = index => { - this.calculatedSize -= this.sizes[index] - this.sizes[index] = 0 - } - this.requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.isBackgroundFetch(v)) { - return 0 - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function') - } - size = sizeCalculation(v, k) - if (!isPosInt(size)) { - throw new TypeError( - 'sizeCalculation return invalid (expect positive integer)' - ) - } - } else { - throw new TypeError( - 'invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + - 'must be set.' - ) - } - } - return size - } - this.addItemSize = (index, size, status) => { - this.sizes[index] = size - if (this.maxSize) { - const maxSize = this.maxSize - this.sizes[index] - while (this.calculatedSize > maxSize) { - this.evict(true) - } - } - this.calculatedSize += this.sizes[index] - if (status) { - status.entrySize = size - status.totalCalculatedSize = this.calculatedSize - } - } - } - removeItemSize(_index) {} - addItemSize(_index, _size) {} - requireSize(_k, _v, size, sizeCalculation) { - if (size || sizeCalculation) { - throw new TypeError( - 'cannot set size without setting maxSize or maxEntrySize on cache' - ) - } - } - - *indexes({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.tail; true; ) { - if (!this.isValidIndex(i)) { - break - } - if (allowStale || !this.isStale(i)) { - yield i - } - if (i === this.head) { - break - } else { - i = this.prev[i] - } - } - } - } - - *rindexes({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.head; true; ) { - if (!this.isValidIndex(i)) { - break - } - if (allowStale || !this.isStale(i)) { - yield i - } - if (i === this.tail) { - break - } else { - i = this.next[i] - } - } - } - } - - isValidIndex(index) { - return ( - index !== undefined && - this.keyMap.get(this.keyList[index]) === index - ) - } - - *entries() { - for (const i of this.indexes()) { - if ( - this.valList[i] !== undefined && - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield [this.keyList[i], this.valList[i]] - } - } - } - *rentries() { - for (const i of this.rindexes()) { - if ( - this.valList[i] !== undefined && - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield [this.keyList[i], this.valList[i]] - } - } - } - - *keys() { - for (const i of this.indexes()) { - if ( - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.keyList[i] - } - } - } - *rkeys() { - for (const i of this.rindexes()) { - if ( - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.keyList[i] - } - } - } - - *values() { - for (const i of this.indexes()) { - if ( - this.valList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.valList[i] - } - } - } - *rvalues() { - for (const i of this.rindexes()) { - if ( - this.valList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.valList[i] - } - } - } - - [Symbol.iterator]() { - return this.entries() - } - - find(fn, getOptions) { - for (const i of this.indexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - if (fn(value, this.keyList[i], this)) { - return this.get(this.keyList[i], getOptions) - } - } - } - - forEach(fn, thisp = this) { - for (const i of this.indexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - fn.call(thisp, value, this.keyList[i], this) - } - } - - rforEach(fn, thisp = this) { - for (const i of this.rindexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - fn.call(thisp, value, this.keyList[i], this) - } - } - - get prune() { - deprecatedMethod('prune', 'purgeStale') - return this.purgeStale - } - - purgeStale() { - let deleted = false - for (const i of this.rindexes({ allowStale: true })) { - if (this.isStale(i)) { - this.delete(this.keyList[i]) - deleted = true - } - } - return deleted - } - - dump() { - const arr = [] - for (const i of this.indexes({ allowStale: true })) { - const key = this.keyList[i] - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - const entry = { value } - if (this.ttls) { - entry.ttl = this.ttls[i] - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.starts[i] - entry.start = Math.floor(Date.now() - age) - } - if (this.sizes) { - entry.size = this.sizes[i] - } - arr.unshift([key, entry]) - } - return arr - } - - load(arr) { - this.clear() - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset. - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start - entry.start = perf.now() - age - } - this.set(key, entry.value, entry) - } - } - - dispose(_v, _k, _reason) {} - - set( - k, - v, - { - ttl = this.ttl, - start, - noDisposeOnSet = this.noDisposeOnSet, - size = 0, - sizeCalculation = this.sizeCalculation, - noUpdateTTL = this.noUpdateTTL, - status, - } = {} - ) { - size = this.requireSize(k, v, size, sizeCalculation) - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss' - status.maxEntrySizeExceeded = true - } - // have to delete, in case a background fetch is there already. - // in non-async cases, this is a no-op - this.delete(k) - return this - } - let index = this.size === 0 ? undefined : this.keyMap.get(k) - if (index === undefined) { - // addition - index = this.newIndex() - this.keyList[index] = k - this.valList[index] = v - this.keyMap.set(k, index) - this.next[this.tail] = index - this.prev[index] = this.tail - this.tail = index - this.size++ - this.addItemSize(index, size, status) - if (status) { - status.set = 'add' - } - noUpdateTTL = false - } else { - // update - this.moveToTail(index) - const oldVal = this.valList[index] - if (v !== oldVal) { - if (this.isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')) - } else { - if (!noDisposeOnSet) { - this.dispose(oldVal, k, 'set') - if (this.disposeAfter) { - this.disposed.push([oldVal, k, 'set']) - } - } - } - this.removeItemSize(index) - this.valList[index] = v - this.addItemSize(index, size, status) - if (status) { - status.set = 'replace' - const oldValue = - oldVal && this.isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal - if (oldValue !== undefined) status.oldValue = oldValue - } - } else if (status) { - status.set = 'update' - } - } - if (ttl !== 0 && this.ttl === 0 && !this.ttls) { - this.initializeTTLTracking() - } - if (!noUpdateTTL) { - this.setItemTTL(index, ttl, start) - } - this.statusTTL(status, index) - if (this.disposeAfter) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return this - } - - newIndex() { - if (this.size === 0) { - return this.tail - } - if (this.size === this.max && this.max !== 0) { - return this.evict(false) - } - if (this.free.length !== 0) { - return this.free.pop() - } - // initial fill, just keep writing down the list - return this.initialFill++ - } - - pop() { - if (this.size) { - const val = this.valList[this.head] - this.evict(true) - return val - } - } - - evict(free) { - const head = this.head - const k = this.keyList[head] - const v = this.valList[head] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')) - } else { - this.dispose(v, k, 'evict') - if (this.disposeAfter) { - this.disposed.push([v, k, 'evict']) - } - } - this.removeItemSize(head) - // if we aren't about to use the index, then null these out - if (free) { - this.keyList[head] = null - this.valList[head] = null - this.free.push(head) - } - this.head = this.next[head] - this.keyMap.delete(k) - this.size-- - return head - } - - has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined) { - if (!this.isStale(index)) { - if (updateAgeOnHas) { - this.updateItemAge(index) - } - if (status) status.has = 'hit' - this.statusTTL(status, index) - return true - } else if (status) { - status.has = 'stale' - this.statusTTL(status, index) - } - } else if (status) { - status.has = 'miss' - } - return false - } - - // like get(), but without any LRU updating or TTL expiration - peek(k, { allowStale = this.allowStale } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined && (allowStale || !this.isStale(index))) { - const v = this.valList[index] - // either stale and allowed, or forcing a refresh of non-stale value - return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v - } - } - - backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.valList[index] - if (this.isBackgroundFetch(v)) { - return v - } - const ac = new AC() - if (options.signal) { - options.signal.addEventListener('abort', () => - ac.abort(options.signal.reason) - ) - } - const fetchOpts = { - signal: ac.signal, - options, - context, - } - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal - const ignoreAbort = options.ignoreFetchAbort && v !== undefined - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true - options.status.fetchError = ac.signal.reason - if (ignoreAbort) options.status.fetchAbortIgnored = true - } else { - options.status.fetchResolved = true - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason) - } - // either we didn't abort, and are still here, or we did, and ignored - if (this.valList[index] === p) { - if (v === undefined) { - if (p.__staleWhileFetching) { - this.valList[index] = p.__staleWhileFetching - } else { - this.delete(k) - } - } else { - if (options.status) options.status.fetchUpdated = true - this.set(k, v, fetchOpts.options) - } - } - return v - } - const eb = er => { - if (options.status) { - options.status.fetchRejected = true - options.status.fetchError = er - } - return fetchFail(er) - } - const fetchFail = er => { - const { aborted } = ac.signal - const allowStaleAborted = - aborted && options.allowStaleOnFetchAbort - const allowStale = - allowStaleAborted || options.allowStaleOnFetchRejection - const noDelete = allowStale || options.noDeleteOnFetchRejection - if (this.valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || p.__staleWhileFetching === undefined - if (del) { - this.delete(k) - } else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.valList[index] = p.__staleWhileFetching - } - } - if (allowStale) { - if (options.status && p.__staleWhileFetching !== undefined) { - options.status.returnedStale = true - } - return p.__staleWhileFetching - } else if (p.__returned === p) { - throw er - } - } - const pcall = (res, rej) => { - this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if ( - !options.ignoreFetchAbort || - options.allowStaleOnFetchAbort - ) { - res() - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true) - } - } - }) - } - if (options.status) options.status.fetchDispatched = true - const p = new Promise(pcall).then(cb, eb) - p.__abortController = ac - p.__staleWhileFetching = v - p.__returned = null - if (index === undefined) { - // internal, don't expose status. - this.set(k, p, { ...fetchOpts.options, status: undefined }) - index = this.keyMap.get(k) - } else { - this.valList[index] = p - } - return p - } - - isBackgroundFetch(p) { - return ( - p && - typeof p === 'object' && - typeof p.then === 'function' && - Object.prototype.hasOwnProperty.call( - p, - '__staleWhileFetching' - ) && - Object.prototype.hasOwnProperty.call(p, '__returned') && - (p.__returned === p || p.__returned === null) - ) - } - - // this takes the union of get() and set() opts, because it does both - async fetch( - k, - { - // get options - allowStale = this.allowStale, - updateAgeOnGet = this.updateAgeOnGet, - noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, - noDisposeOnSet = this.noDisposeOnSet, - size = 0, - sizeCalculation = this.sizeCalculation, - noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, - allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, - ignoreFetchAbort = this.ignoreFetchAbort, - allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, - fetchContext = this.fetchContext, - forceRefresh = false, - status, - signal, - } = {} - ) { - if (!this.fetchMethod) { - if (status) status.fetch = 'get' - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }) - } - - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - } - - let index = this.keyMap.get(k) - if (index === undefined) { - if (status) status.fetch = 'miss' - const p = this.backgroundFetch(k, index, options, fetchContext) - return (p.__returned = p) - } else { - // in cache, maybe already fetching - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - const stale = - allowStale && v.__staleWhileFetching !== undefined - if (status) { - status.fetch = 'inflight' - if (stale) status.returnedStale = true - } - return stale ? v.__staleWhileFetching : (v.__returned = v) - } - - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.isStale(index) - if (!forceRefresh && !isStale) { - if (status) status.fetch = 'hit' - this.moveToTail(index) - if (updateAgeOnGet) { - this.updateItemAge(index) - } - this.statusTTL(status, index) - return v - } - - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.backgroundFetch(k, index, options, fetchContext) - const hasStale = p.__staleWhileFetching !== undefined - const staleVal = hasStale && allowStale - if (status) { - status.fetch = hasStale && isStale ? 'stale' : 'refresh' - if (staleVal && isStale) status.returnedStale = true - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p) - } - } - - get( - k, - { - allowStale = this.allowStale, - updateAgeOnGet = this.updateAgeOnGet, - noDeleteOnStaleGet = this.noDeleteOnStaleGet, - status, - } = {} - ) { - const index = this.keyMap.get(k) - if (index !== undefined) { - const value = this.valList[index] - const fetching = this.isBackgroundFetch(value) - this.statusTTL(status, index) - if (this.isStale(index)) { - if (status) status.get = 'stale' - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.delete(k) - } - if (status) status.returnedStale = allowStale - return allowStale ? value : undefined - } else { - if (status) { - status.returnedStale = - allowStale && value.__staleWhileFetching !== undefined - } - return allowStale ? value.__staleWhileFetching : undefined - } - } else { - if (status) status.get = 'hit' - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching - } - this.moveToTail(index) - if (updateAgeOnGet) { - this.updateItemAge(index) - } - return value - } - } else if (status) { - status.get = 'miss' - } - } - - connect(p, n) { - this.prev[n] = p - this.next[p] = n - } - - moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.tail) { - if (index === this.head) { - this.head = this.next[index] - } else { - this.connect(this.prev[index], this.next[index]) - } - this.connect(this.tail, index) - this.tail = index - } - } - - get del() { - deprecatedMethod('del', 'delete') - return this.delete - } - - delete(k) { - let deleted = false - if (this.size !== 0) { - const index = this.keyMap.get(k) - if (index !== undefined) { - deleted = true - if (this.size === 1) { - this.clear() - } else { - this.removeItemSize(index) - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')) - } else { - this.dispose(v, k, 'delete') - if (this.disposeAfter) { - this.disposed.push([v, k, 'delete']) - } - } - this.keyMap.delete(k) - this.keyList[index] = null - this.valList[index] = null - if (index === this.tail) { - this.tail = this.prev[index] - } else if (index === this.head) { - this.head = this.next[index] - } else { - this.next[this.prev[index]] = this.next[index] - this.prev[this.next[index]] = this.prev[index] - } - this.size-- - this.free.push(index) - } - } - } - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return deleted - } - - clear() { - for (const index of this.rindexes({ allowStale: true })) { - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')) - } else { - const k = this.keyList[index] - this.dispose(v, k, 'delete') - if (this.disposeAfter) { - this.disposed.push([v, k, 'delete']) - } - } - } - - this.keyMap.clear() - this.valList.fill(null) - this.keyList.fill(null) - if (this.ttls) { - this.ttls.fill(0) - this.starts.fill(0) - } - if (this.sizes) { - this.sizes.fill(0) - } - this.head = 0 - this.tail = 0 - this.initialFill = 1 - this.free.length = 0 - this.calculatedSize = 0 - this.size = 0 - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - } - - get reset() { - deprecatedMethod('reset', 'clear') - return this.clear - } - - get length() { - deprecatedProperty('length', 'size') - return this.size - } - - static get AbortController() { - return AC - } - static get AbortSignal() { - return AS - } -} - -export default LRUCache diff --git a/node_modules/init-package-json/node_modules/lru-cache/package.json b/node_modules/init-package-json/node_modules/lru-cache/package.json deleted file mode 100644 index 9684991727e7a..0000000000000 --- a/node_modules/init-package-json/node_modules/lru-cache/package.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "name": "lru-cache", - "description": "A cache object that deletes the least-recently-used items.", - "version": "7.18.3", - "author": "Isaac Z. Schlueter ", - "keywords": [ - "mru", - "lru", - "cache" - ], - "sideEffects": false, - "scripts": { - "build": "npm run prepare", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "prepare": "node ./scripts/transpile-to-esm.js", - "size": "size-limit", - "test": "tap", - "snap": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "format": "prettier --write .", - "typedoc": "typedoc ./index.d.ts" - }, - "type": "commonjs", - "main": "./index.js", - "module": "./index.mjs", - "types": "./index.d.ts", - "exports": { - ".": { - "import": { - "types": "./index.d.ts", - "default": "./index.mjs" - }, - "require": { - "types": "./index.d.ts", - "default": "./index.js" - } - }, - "./package.json": "./package.json" - }, - "repository": "git://github.com/isaacs/node-lru-cache.git", - "devDependencies": { - "@size-limit/preset-small-lib": "^7.0.8", - "@types/node": "^17.0.31", - "@types/tap": "^15.0.6", - "benchmark": "^2.1.4", - "c8": "^7.11.2", - "clock-mock": "^1.0.6", - "eslint-config-prettier": "^8.5.0", - "prettier": "^2.6.2", - "size-limit": "^7.0.8", - "tap": "^16.3.4", - "ts-node": "^10.7.0", - "tslib": "^2.4.0", - "typedoc": "^0.23.24", - "typescript": "^4.6.4" - }, - "license": "ISC", - "files": [ - "index.js", - "index.mjs", - "index.d.ts" - ], - "engines": { - "node": ">=12" - }, - "prettier": { - "semi": false, - "printWidth": 70, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "tap": { - "nyc-arg": [ - "--include=index.js" - ], - "node-arg": [ - "--expose-gc", - "--require", - "ts-node/register" - ], - "ts": false - }, - "size-limit": [ - { - "path": "./index.js" - } - ] -} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/LICENSE b/node_modules/init-package-json/node_modules/normalize-package-data/LICENSE new file mode 100644 index 0000000000000..19d1364a8ac08 --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/LICENSE @@ -0,0 +1,15 @@ +This package contains code originally written by Isaac Z. Schlueter. +Used with permission. + +Copyright (c) Meryn Stol ("Author") +All rights reserved. + +The BSD License + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js new file mode 100644 index 0000000000000..631966b5f29af --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/extract_description.js @@ -0,0 +1,24 @@ +module.exports = extractDescription + +// Extracts description from contents of a readme file in markdown format +function extractDescription (d) { + if (!d) { + return + } + if (d === 'ERROR: No README data found!') { + return + } + // the first block of text before the first heading + // that isn't the first line heading + d = d.trim().split('\n') + let s = 0 + while (d[s] && d[s].trim().match(/^(#|$)/)) { + s++ + } + const l = d.length + let e = s + 1 + while (e < l && d[e].trim()) { + e++ + } + return d.slice(s, e).join(' ').trim() +} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js new file mode 100644 index 0000000000000..bb78231d83ca9 --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/fixer.js @@ -0,0 +1,475 @@ +var isValidSemver = require('semver/functions/valid') +var cleanSemver = require('semver/functions/clean') +var validateLicense = require('validate-npm-package-license') +var hostedGitInfo = require('hosted-git-info') +var isBuiltinModule = require('is-core-module') +var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies'] +var extractDescription = require('./extract_description') +var url = require('url') +var typos = require('./typos.json') + +var isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.')) + +module.exports = { + // default warning function + warn: function () {}, + + fixRepositoryField: function (data) { + if (data.repositories) { + this.warn('repositories') + data.repository = data.repositories[0] + } + if (!data.repository) { + return this.warn('missingRepository') + } + if (typeof data.repository === 'string') { + data.repository = { + type: 'git', + url: data.repository, + } + } + var r = data.repository.url || '' + if (r) { + var hosted = hostedGitInfo.fromUrl(r) + if (hosted) { + r = data.repository.url + = hosted.getDefaultRepresentation() === 'shortcut' ? hosted.https() : hosted.toString() + } + } + + if (r.match(/github.com\/[^/]+\/[^/]+\.git\.git$/)) { + this.warn('brokenGitUrl', r) + } + }, + + fixTypos: function (data) { + Object.keys(typos.topLevel).forEach(function (d) { + if (Object.prototype.hasOwnProperty.call(data, d)) { + this.warn('typo', d, typos.topLevel[d]) + } + }, this) + }, + + fixScriptsField: function (data) { + if (!data.scripts) { + return + } + if (typeof data.scripts !== 'object') { + this.warn('nonObjectScripts') + delete data.scripts + return + } + Object.keys(data.scripts).forEach(function (k) { + if (typeof data.scripts[k] !== 'string') { + this.warn('nonStringScript') + delete data.scripts[k] + } else if (typos.script[k] && !data.scripts[typos.script[k]]) { + this.warn('typo', k, typos.script[k], 'scripts') + } + }, this) + }, + + fixFilesField: function (data) { + var files = data.files + if (files && !Array.isArray(files)) { + this.warn('nonArrayFiles') + delete data.files + } else if (data.files) { + data.files = data.files.filter(function (file) { + if (!file || typeof file !== 'string') { + this.warn('invalidFilename', file) + return false + } else { + return true + } + }, this) + } + }, + + fixBinField: function (data) { + if (!data.bin) { + return + } + if (typeof data.bin === 'string') { + var b = {} + var match + if (match = data.name.match(/^@[^/]+[/](.*)$/)) { + b[match[1]] = data.bin + } else { + b[data.name] = data.bin + } + data.bin = b + } + }, + + fixManField: function (data) { + if (!data.man) { + return + } + if (typeof data.man === 'string') { + data.man = [data.man] + } + }, + fixBundleDependenciesField: function (data) { + var bdd = 'bundledDependencies' + var bd = 'bundleDependencies' + if (data[bdd] && !data[bd]) { + data[bd] = data[bdd] + delete data[bdd] + } + if (data[bd] && !Array.isArray(data[bd])) { + this.warn('nonArrayBundleDependencies') + delete data[bd] + } else if (data[bd]) { + data[bd] = data[bd].filter(function (filtered) { + if (!filtered || typeof filtered !== 'string') { + this.warn('nonStringBundleDependency', filtered) + return false + } else { + if (!data.dependencies) { + data.dependencies = {} + } + if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) { + this.warn('nonDependencyBundleDependency', filtered) + data.dependencies[filtered] = '*' + } + return true + } + }, this) + } + }, + + fixDependencies: function (data, strict) { + objectifyDeps(data, this.warn) + addOptionalDepsToDeps(data, this.warn) + this.fixBundleDependenciesField(data) + + ;['dependencies', 'devDependencies'].forEach(function (deps) { + if (!(deps in data)) { + return + } + if (!data[deps] || typeof data[deps] !== 'object') { + this.warn('nonObjectDependencies', deps) + delete data[deps] + return + } + Object.keys(data[deps]).forEach(function (d) { + var r = data[deps][d] + if (typeof r !== 'string') { + this.warn('nonStringDependency', d, JSON.stringify(r)) + delete data[deps][d] + } + var hosted = hostedGitInfo.fromUrl(data[deps][d]) + if (hosted) { + data[deps][d] = hosted.toString() + } + }, this) + }, this) + }, + + fixModulesField: function (data) { + if (data.modules) { + this.warn('deprecatedModules') + delete data.modules + } + }, + + fixKeywordsField: function (data) { + if (typeof data.keywords === 'string') { + data.keywords = data.keywords.split(/,\s+/) + } + if (data.keywords && !Array.isArray(data.keywords)) { + delete data.keywords + this.warn('nonArrayKeywords') + } else if (data.keywords) { + data.keywords = data.keywords.filter(function (kw) { + if (typeof kw !== 'string' || !kw) { + this.warn('nonStringKeyword') + return false + } else { + return true + } + }, this) + } + }, + + fixVersionField: function (data, strict) { + // allow "loose" semver 1.0 versions in non-strict mode + // enforce strict semver 2.0 compliance in strict mode + var loose = !strict + if (!data.version) { + data.version = '' + return true + } + if (!isValidSemver(data.version, loose)) { + throw new Error('Invalid version: "' + data.version + '"') + } + data.version = cleanSemver(data.version, loose) + return true + }, + + fixPeople: function (data) { + modifyPeople(data, unParsePerson) + modifyPeople(data, parsePerson) + }, + + fixNameField: function (data, options) { + if (typeof options === 'boolean') { + options = { strict: options } + } else if (typeof options === 'undefined') { + options = {} + } + var strict = options.strict + if (!data.name && !strict) { + data.name = '' + return + } + if (typeof data.name !== 'string') { + throw new Error('name field must be a string.') + } + if (!strict) { + data.name = data.name.trim() + } + ensureValidName(data.name, strict, options.allowLegacyCase) + if (isBuiltinModule(data.name)) { + this.warn('conflictingName', data.name) + } + }, + + fixDescriptionField: function (data) { + if (data.description && typeof data.description !== 'string') { + this.warn('nonStringDescription') + delete data.description + } + if (data.readme && !data.description) { + data.description = extractDescription(data.readme) + } + if (data.description === undefined) { + delete data.description + } + if (!data.description) { + this.warn('missingDescription') + } + }, + + fixReadmeField: function (data) { + if (!data.readme) { + this.warn('missingReadme') + data.readme = 'ERROR: No README data found!' + } + }, + + fixBugsField: function (data) { + if (!data.bugs && data.repository && data.repository.url) { + var hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted && hosted.bugs()) { + data.bugs = { url: hosted.bugs() } + } + } else if (data.bugs) { + if (typeof data.bugs === 'string') { + if (isEmail(data.bugs)) { + data.bugs = { email: data.bugs } + /* eslint-disable-next-line node/no-deprecated-api */ + } else if (url.parse(data.bugs).protocol) { + data.bugs = { url: data.bugs } + } else { + this.warn('nonEmailUrlBugsString') + } + } else { + bugsTypos(data.bugs, this.warn) + var oldBugs = data.bugs + data.bugs = {} + if (oldBugs.url) { + /* eslint-disable-next-line node/no-deprecated-api */ + if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) { + data.bugs.url = oldBugs.url + } else { + this.warn('nonUrlBugsUrlField') + } + } + if (oldBugs.email) { + if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) { + data.bugs.email = oldBugs.email + } else { + this.warn('nonEmailBugsEmailField') + } + } + } + if (!data.bugs.email && !data.bugs.url) { + delete data.bugs + this.warn('emptyNormalizedBugs') + } + } + }, + + fixHomepageField: function (data) { + if (!data.homepage && data.repository && data.repository.url) { + var hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted && hosted.docs()) { + data.homepage = hosted.docs() + } + } + if (!data.homepage) { + return + } + + if (typeof data.homepage !== 'string') { + this.warn('nonUrlHomepage') + return delete data.homepage + } + /* eslint-disable-next-line node/no-deprecated-api */ + if (!url.parse(data.homepage).protocol) { + data.homepage = 'http://' + data.homepage + } + }, + + fixLicenseField: function (data) { + const license = data.license || data.licence + if (!license) { + return this.warn('missingLicense') + } + if ( + typeof (license) !== 'string' || + license.length < 1 || + license.trim() === '' + ) { + return this.warn('invalidLicense') + } + if (!validateLicense(license).validForNewPackages) { + return this.warn('invalidLicense') + } + }, +} + +function isValidScopedPackageName (spec) { + if (spec.charAt(0) !== '@') { + return false + } + + var rest = spec.slice(1).split('/') + if (rest.length !== 2) { + return false + } + + return rest[0] && rest[1] && + rest[0] === encodeURIComponent(rest[0]) && + rest[1] === encodeURIComponent(rest[1]) +} + +function isCorrectlyEncodedName (spec) { + return !spec.match(/[/@\s+%:]/) && + spec === encodeURIComponent(spec) +} + +function ensureValidName (name, strict, allowLegacyCase) { + if (name.charAt(0) === '.' || + !(isValidScopedPackageName(name) || isCorrectlyEncodedName(name)) || + (strict && (!allowLegacyCase) && name !== name.toLowerCase()) || + name.toLowerCase() === 'node_modules' || + name.toLowerCase() === 'favicon.ico') { + throw new Error('Invalid name: ' + JSON.stringify(name)) + } +} + +function modifyPeople (data, fn) { + if (data.author) { + data.author = fn(data.author) + }['maintainers', 'contributors'].forEach(function (set) { + if (!Array.isArray(data[set])) { + return + } + data[set] = data[set].map(fn) + }) + return data +} + +function unParsePerson (person) { + if (typeof person === 'string') { + return person + } + var name = person.name || '' + var u = person.url || person.web + var wrappedUrl = u ? (' (' + u + ')') : '' + var e = person.email || person.mail + var wrappedEmail = e ? (' <' + e + '>') : '' + return name + wrappedEmail + wrappedUrl +} + +function parsePerson (person) { + if (typeof person !== 'string') { + return person + } + var matchedName = person.match(/^([^(<]+)/) + var matchedUrl = person.match(/\(([^()]+)\)/) + var matchedEmail = person.match(/<([^<>]+)>/) + var obj = {} + if (matchedName && matchedName[0].trim()) { + obj.name = matchedName[0].trim() + } + if (matchedEmail) { + obj.email = matchedEmail[1] + } + if (matchedUrl) { + obj.url = matchedUrl[1] + } + return obj +} + +function addOptionalDepsToDeps (data, warn) { + var o = data.optionalDependencies + if (!o) { + return + } + var d = data.dependencies || {} + Object.keys(o).forEach(function (k) { + d[k] = o[k] + }) + data.dependencies = d +} + +function depObjectify (deps, type, warn) { + if (!deps) { + return {} + } + if (typeof deps === 'string') { + deps = deps.trim().split(/[\n\r\s\t ,]+/) + } + if (!Array.isArray(deps)) { + return deps + } + warn('deprecatedArrayDependencies', type) + var o = {} + deps.filter(function (d) { + return typeof d === 'string' + }).forEach(function (d) { + d = d.trim().split(/(:?[@\s><=])/) + var dn = d.shift() + var dv = d.join('') + dv = dv.trim() + dv = dv.replace(/^@/, '') + o[dn] = dv + }) + return o +} + +function objectifyDeps (data, warn) { + depTypes.forEach(function (type) { + if (!data[type]) { + return + } + data[type] = depObjectify(data[type], type, warn) + }) +} + +function bugsTypos (bugs, warn) { + if (!bugs) { + return + } + Object.keys(bugs).forEach(function (k) { + if (typos.bugs[k]) { + warn('typo', k, typos.bugs[k], 'bugs') + bugs[typos.bugs[k]] = bugs[k] + delete bugs[k] + } + }) +} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js new file mode 100644 index 0000000000000..3be9c86539952 --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/make_warning.js @@ -0,0 +1,22 @@ +var util = require('util') +var messages = require('./warning_messages.json') + +module.exports = function () { + var args = Array.prototype.slice.call(arguments, 0) + var warningName = args.shift() + if (warningName === 'typo') { + return makeTypoWarning.apply(null, args) + } else { + var msgTemplate = messages[warningName] ? messages[warningName] : warningName + ": '%s'" + args.unshift(msgTemplate) + return util.format.apply(null, args) + } +} + +function makeTypoWarning (providedName, probableName, field) { + if (field) { + providedName = field + "['" + providedName + "']" + probableName = field + "['" + probableName + "']" + } + return util.format(messages.typo, providedName, probableName) +} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js new file mode 100644 index 0000000000000..bf71d2c1e2235 --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/normalize.js @@ -0,0 +1,48 @@ +module.exports = normalize + +var fixer = require('./fixer') +normalize.fixer = fixer + +var makeWarning = require('./make_warning') + +var fieldsToFix = ['name', 'version', 'description', 'repository', 'modules', 'scripts', + 'files', 'bin', 'man', 'bugs', 'keywords', 'readme', 'homepage', 'license'] +var otherThingsToFix = ['dependencies', 'people', 'typos'] + +var thingsToFix = fieldsToFix.map(function (fieldName) { + return ucFirst(fieldName) + 'Field' +}) +// two ways to do this in CoffeeScript on only one line, sub-70 chars: +// thingsToFix = fieldsToFix.map (name) -> ucFirst(name) + "Field" +// thingsToFix = (ucFirst(name) + "Field" for name in fieldsToFix) +thingsToFix = thingsToFix.concat(otherThingsToFix) + +function normalize (data, warn, strict) { + if (warn === true) { + warn = null + strict = true + } + if (!strict) { + strict = false + } + if (!warn || data.private) { + warn = function (msg) { /* noop */ } + } + + if (data.scripts && + data.scripts.install === 'node-gyp rebuild' && + !data.scripts.preinstall) { + data.gypfile = true + } + fixer.warn = function () { + warn(makeWarning.apply(null, arguments)) + } + thingsToFix.forEach(function (thingName) { + fixer['fix' + ucFirst(thingName)](data, strict) + }) + data._id = data.name + '@' + data.version +} + +function ucFirst (string) { + return string.charAt(0).toUpperCase() + string.slice(1) +} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js b/node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js new file mode 100644 index 0000000000000..5fc888e5450cd --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/safe_format.js @@ -0,0 +1,11 @@ +var util = require('util') + +module.exports = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.forEach(function (arg) { + if (!arg) { + throw new TypeError('Bad arguments.') + } + }) + return util.format.apply(null, arguments) +} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json b/node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json new file mode 100644 index 0000000000000..7f9dd283b30ff --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/typos.json @@ -0,0 +1,25 @@ +{ + "topLevel": { + "dependancies": "dependencies" + ,"dependecies": "dependencies" + ,"depdenencies": "dependencies" + ,"devEependencies": "devDependencies" + ,"depends": "dependencies" + ,"dev-dependencies": "devDependencies" + ,"devDependences": "devDependencies" + ,"devDepenencies": "devDependencies" + ,"devdependencies": "devDependencies" + ,"repostitory": "repository" + ,"repo": "repository" + ,"prefereGlobal": "preferGlobal" + ,"hompage": "homepage" + ,"hampage": "homepage" + ,"autohr": "author" + ,"autor": "author" + ,"contributers": "contributors" + ,"publicationConfig": "publishConfig" + ,"script": "scripts" + }, + "bugs": { "web": "url", "name": "url" }, + "script": { "server": "start", "tests": "test" } +} diff --git a/node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json b/node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json new file mode 100644 index 0000000000000..4890f506ed965 --- /dev/null +++ b/node_modules/init-package-json/node_modules/normalize-package-data/lib/warning_messages.json @@ -0,0 +1,30 @@ +{ + "repositories": "'repositories' (plural) Not supported. Please pick one as the 'repository' field" + ,"missingRepository": "No repository field." + ,"brokenGitUrl": "Probably broken git url: %s" + ,"nonObjectScripts": "scripts must be an object" + ,"nonStringScript": "script values must be string commands" + ,"nonArrayFiles": "Invalid 'files' member" + ,"invalidFilename": "Invalid filename in 'files' list: %s" + ,"nonArrayBundleDependencies": "Invalid 'bundleDependencies' list. Must be array of package names" + ,"nonStringBundleDependency": "Invalid bundleDependencies member: %s" + ,"nonDependencyBundleDependency": "Non-dependency in bundleDependencies: %s" + ,"nonObjectDependencies": "%s field must be an object" + ,"nonStringDependency": "Invalid dependency: %s %s" + ,"deprecatedArrayDependencies": "specifying %s as array is deprecated" + ,"deprecatedModules": "modules field is deprecated" + ,"nonArrayKeywords": "keywords should be an array of strings" + ,"nonStringKeyword": "keywords should be an array of strings" + ,"conflictingName": "%s is also the name of a node core module." + ,"nonStringDescription": "'description' field should be a string" + ,"missingDescription": "No description" + ,"missingReadme": "No README data" + ,"missingLicense": "No license field." + ,"nonEmailUrlBugsString": "Bug string field must be url, email, or {email,url}" + ,"nonUrlBugsUrlField": "bugs.url field must be a string url. Deleted." + ,"nonEmailBugsEmailField": "bugs.email field must be a string email. Deleted." + ,"emptyNormalizedBugs": "Normalized value of bugs field is an empty object. Deleted." + ,"nonUrlHomepage": "homepage field must be a string url. Deleted." + ,"invalidLicense": "license should be a valid SPDX license expression" + ,"typo": "%s should probably be %s." +} diff --git a/node_modules/init-package-json/node_modules/npm-package-arg/package.json b/node_modules/init-package-json/node_modules/normalize-package-data/package.json similarity index 55% rename from node_modules/init-package-json/node_modules/npm-package-arg/package.json rename to node_modules/init-package-json/node_modules/normalize-package-data/package.json index bb9e71b258a93..48d2371d4a66b 100644 --- a/node_modules/init-package-json/node_modules/npm-package-arg/package.json +++ b/node_modules/init-package-json/node_modules/normalize-package-data/package.json @@ -1,59 +1,62 @@ { - "name": "npm-package-arg", - "version": "10.1.0", - "description": "Parse the things that can be arguments to `npm install`", - "main": "./lib/npa.js", - "directories": { - "test": "test" - }, - "files": [ - "bin/", - "lib/" - ], - "dependencies": { - "hosted-git-info": "^6.0.0", - "proc-log": "^3.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^5.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.10.0", - "tap": "^16.0.1" + "name": "normalize-package-data", + "version": "6.0.0", + "author": "GitHub Inc.", + "description": "Normalizes data that can be found in package.json files.", + "license": "BSD-2-Clause", + "repository": { + "type": "git", + "url": "https://github.com/npm/normalize-package-data.git" }, + "main": "lib/normalize.js", "scripts": { "test": "tap", - "snap": "tap", "npmclilint": "npmcli-lint", "lint": "eslint \"**/*.js\"", "lintfix": "npm run lint -- --fix", "posttest": "npm run lint", "postsnap": "npm run lintfix --", "postlint": "template-oss-check", + "snap": "tap", "template-oss-apply": "template-oss-apply --force" }, - "repository": { - "type": "git", - "url": "https://github.com/npm/npm-package-arg.git" + "dependencies": { + "hosted-git-info": "^7.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" }, - "author": "GitHub Inc.", - "license": "ISC", - "bugs": { - "url": "https://github.com/npm/npm-package-arg/issues" + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "tap": "^16.0.1" }, - "homepage": "https://github.com/npm/npm-package-arg", + "files": [ + "bin/", + "lib/" + ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.18.0", + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] }, "tap": { - "branches": 97, + "branches": 86, + "functions": 92, + "lines": 86, + "statements": 86, "nyc-arg": [ "--exclude", "tap-snapshots/**" ] - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.10.0" } } diff --git a/node_modules/init-package-json/node_modules/npm-package-arg/LICENSE b/node_modules/init-package-json/node_modules/npm-package-arg/LICENSE deleted file mode 100644 index 19cec97b18468..0000000000000 --- a/node_modules/init-package-json/node_modules/npm-package-arg/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js b/node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js deleted file mode 100644 index 36bd18cd9f9a6..0000000000000 --- a/node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js +++ /dev/null @@ -1,431 +0,0 @@ -'use strict' -module.exports = npa -module.exports.resolve = resolve -module.exports.toPurl = toPurl -module.exports.Result = Result - -const url = require('url') -const HostedGit = require('hosted-git-info') -const semver = require('semver') -const path = global.FAKE_WINDOWS ? require('path').win32 : require('path') -const validatePackageName = require('validate-npm-package-name') -const { homedir } = require('os') -const log = require('proc-log') - -const isWindows = process.platform === 'win32' || global.FAKE_WINDOWS -const hasSlashes = isWindows ? /\\|[/]/ : /[/]/ -const isURL = /^(?:git[+])?[a-z]+:/i -const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i -const isFilename = /[.](?:tgz|tar.gz|tar)$/i - -function npa (arg, where) { - let name - let spec - if (typeof arg === 'object') { - if (arg instanceof Result && (!where || where === arg.where)) { - return arg - } else if (arg.name && arg.rawSpec) { - return npa.resolve(arg.name, arg.rawSpec, where || arg.where) - } else { - return npa(arg.raw, where || arg.where) - } - } - const nameEndsAt = arg[0] === '@' ? arg.slice(1).indexOf('@') + 1 : arg.indexOf('@') - const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg - if (isURL.test(arg)) { - spec = arg - } else if (isGit.test(arg)) { - spec = `git+ssh://${arg}` - } else if (namePart[0] !== '@' && (hasSlashes.test(namePart) || isFilename.test(namePart))) { - spec = arg - } else if (nameEndsAt > 0) { - name = namePart - spec = arg.slice(nameEndsAt + 1) || '*' - } else { - const valid = validatePackageName(arg) - if (valid.validForOldPackages) { - name = arg - spec = '*' - } else { - spec = arg - } - } - return resolve(name, spec, where, arg) -} - -const isFilespec = isWindows ? /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/ : /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/ - -function resolve (name, spec, where, arg) { - const res = new Result({ - raw: arg, - name: name, - rawSpec: spec, - fromArgument: arg != null, - }) - - if (name) { - res.setName(name) - } - - if (spec && (isFilespec.test(spec) || /^file:/i.test(spec))) { - return fromFile(res, where) - } else if (spec && /^npm:/i.test(spec)) { - return fromAlias(res, where) - } - - const hosted = HostedGit.fromUrl(spec, { - noGitPlus: true, - noCommittish: true, - }) - if (hosted) { - return fromHostedGit(res, hosted) - } else if (spec && isURL.test(spec)) { - return fromURL(res) - } else if (spec && (hasSlashes.test(spec) || isFilename.test(spec))) { - return fromFile(res, where) - } else { - return fromRegistry(res) - } -} - -const defaultRegistry = 'https://registry.npmjs.org' - -function toPurl (arg, reg = defaultRegistry) { - const res = npa(arg) - - if (res.type !== 'version') { - throw invalidPurlType(res.type, res.raw) - } - - // URI-encode leading @ of scoped packages - let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec - if (reg !== defaultRegistry) { - purl += '?repository_url=' + reg - } - - return purl -} - -function invalidPackageName (name, valid, raw) { - // eslint-disable-next-line max-len - const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`) - err.code = 'EINVALIDPACKAGENAME' - return err -} - -function invalidTagName (name, raw) { - // eslint-disable-next-line max-len - const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`) - err.code = 'EINVALIDTAGNAME' - return err -} - -function invalidPurlType (type, raw) { - // eslint-disable-next-line max-len - const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`) - err.code = 'EINVALIDPURLTYPE' - return err -} - -function Result (opts) { - this.type = opts.type - this.registry = opts.registry - this.where = opts.where - if (opts.raw == null) { - this.raw = opts.name ? opts.name + '@' + opts.rawSpec : opts.rawSpec - } else { - this.raw = opts.raw - } - - this.name = undefined - this.escapedName = undefined - this.scope = undefined - this.rawSpec = opts.rawSpec || '' - this.saveSpec = opts.saveSpec - this.fetchSpec = opts.fetchSpec - if (opts.name) { - this.setName(opts.name) - } - this.gitRange = opts.gitRange - this.gitCommittish = opts.gitCommittish - this.gitSubdir = opts.gitSubdir - this.hosted = opts.hosted -} - -Result.prototype.setName = function (name) { - const valid = validatePackageName(name) - if (!valid.validForOldPackages) { - throw invalidPackageName(name, valid, this.raw) - } - - this.name = name - this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined - // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar - this.escapedName = name.replace('/', '%2f') - return this -} - -Result.prototype.toString = function () { - const full = [] - if (this.name != null && this.name !== '') { - full.push(this.name) - } - const spec = this.saveSpec || this.fetchSpec || this.rawSpec - if (spec != null && spec !== '') { - full.push(spec) - } - return full.length ? full.join('@') : this.raw -} - -Result.prototype.toJSON = function () { - const result = Object.assign({}, this) - delete result.hosted - return result -} - -function setGitCommittish (res, committish) { - if (!committish) { - res.gitCommittish = null - return res - } - - // for each :: separated item: - for (const part of committish.split('::')) { - // if the item has no : the n it is a commit-ish - if (!part.includes(':')) { - if (res.gitRange) { - throw new Error('cannot override existing semver range with a committish') - } - if (res.gitCommittish) { - throw new Error('cannot override existing committish with a second committish') - } - res.gitCommittish = part - continue - } - // split on name:value - const [name, value] = part.split(':') - // if name is semver do semver lookup of ref or tag - if (name === 'semver') { - if (res.gitCommittish) { - throw new Error('cannot override existing committish with a semver range') - } - if (res.gitRange) { - throw new Error('cannot override existing semver range with a second semver range') - } - res.gitRange = decodeURIComponent(value) - continue - } - if (name === 'path') { - if (res.gitSubdir) { - throw new Error('cannot override existing path with a second path') - } - res.gitSubdir = `/${value}` - continue - } - log.warn('npm-package-arg', `ignoring unknown key "${name}"`) - } - - return res -} - -function fromFile (res, where) { - if (!where) { - where = process.cwd() - } - res.type = isFilename.test(res.rawSpec) ? 'file' : 'directory' - res.where = where - - // always put the '/' on where when resolving urls, or else - // file:foo from /path/to/bar goes to /path/to/foo, when we want - // it to be /path/to/bar/foo - - let specUrl - let resolvedUrl - const prefix = (!/^file:/.test(res.rawSpec) ? 'file:' : '') - const rawWithPrefix = prefix + res.rawSpec - let rawNoPrefix = rawWithPrefix.replace(/^file:/, '') - try { - resolvedUrl = new url.URL(rawWithPrefix, `file://${path.resolve(where)}/`) - specUrl = new url.URL(rawWithPrefix) - } catch (originalError) { - const er = new Error('Invalid file: URL, must comply with RFC 8909') - throw Object.assign(er, { - raw: res.rawSpec, - spec: res, - where, - originalError, - }) - } - - // environment switch for testing - if (process.env.NPM_PACKAGE_ARG_8909_STRICT !== '1') { - // XXX backwards compatibility lack of compliance with 8909 - // Remove when we want a breaking change to come into RFC compliance. - if (resolvedUrl.host && resolvedUrl.host !== 'localhost') { - const rawSpec = res.rawSpec.replace(/^file:\/\//, 'file:///') - resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`) - specUrl = new url.URL(rawSpec) - rawNoPrefix = rawSpec.replace(/^file:/, '') - } - // turn file:/../foo into file:../foo - // for 1, 2 or 3 leading slashes since we attempted - // in the previous step to make it a file protocol url with a leading slash - if (/^\/{1,3}\.\.?(\/|$)/.test(rawNoPrefix)) { - const rawSpec = res.rawSpec.replace(/^file:\/{1,3}/, 'file:') - resolvedUrl = new url.URL(rawSpec, `file://${path.resolve(where)}/`) - specUrl = new url.URL(rawSpec) - rawNoPrefix = rawSpec.replace(/^file:/, '') - } - // XXX end 8909 violation backwards compatibility section - } - - // file:foo - relative url to ./foo - // file:/foo - absolute path /foo - // file:///foo - absolute path to /foo, no authority host - // file://localhost/foo - absolute path to /foo, on localhost - // file://foo - absolute path to / on foo host (error!) - if (resolvedUrl.host && resolvedUrl.host !== 'localhost') { - const msg = `Invalid file: URL, must be absolute if // present` - throw Object.assign(new Error(msg), { - raw: res.rawSpec, - parsed: resolvedUrl, - }) - } - - // turn /C:/blah into just C:/blah on windows - let specPath = decodeURIComponent(specUrl.pathname) - let resolvedPath = decodeURIComponent(resolvedUrl.pathname) - if (isWindows) { - specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1') - resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1') - } - - // replace ~ with homedir, but keep the ~ in the saveSpec - // otherwise, make it relative to where param - if (/^\/~(\/|$)/.test(specPath)) { - res.saveSpec = `file:${specPath.substr(1)}` - resolvedPath = path.resolve(homedir(), specPath.substr(3)) - } else if (!path.isAbsolute(rawNoPrefix)) { - res.saveSpec = `file:${path.relative(where, resolvedPath)}` - } else { - res.saveSpec = `file:${path.resolve(resolvedPath)}` - } - - res.fetchSpec = path.resolve(where, resolvedPath) - return res -} - -function fromHostedGit (res, hosted) { - res.type = 'git' - res.hosted = hosted - res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false }) - res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString() - return setGitCommittish(res, hosted.committish) -} - -function unsupportedURLType (protocol, spec) { - const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`) - err.code = 'EUNSUPPORTEDPROTOCOL' - return err -} - -function matchGitScp (spec) { - // git ssh specifiers are overloaded to also use scp-style git - // specifiers, so we have to parse those out and treat them special. - // They are NOT true URIs, so we can't hand them to `url.parse`. - // - // This regex looks for things that look like: - // git+ssh://git@my.custom.git.com:username/project.git#deadbeef - // - // ...and various combinations. The username in the beginning is *required*. - const matched = spec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i) - return matched && !matched[1].match(/:[0-9]+\/?.*$/i) && { - fetchSpec: matched[1], - gitCommittish: matched[2] == null ? null : matched[2], - } -} - -function fromURL (res) { - // eslint-disable-next-line node/no-deprecated-api - const urlparse = url.parse(res.rawSpec) - res.saveSpec = res.rawSpec - // check the protocol, and then see if it's git or not - switch (urlparse.protocol) { - case 'git:': - case 'git+http:': - case 'git+https:': - case 'git+rsync:': - case 'git+ftp:': - case 'git+file:': - case 'git+ssh:': { - res.type = 'git' - const match = urlparse.protocol === 'git+ssh:' ? matchGitScp(res.rawSpec) - : null - if (match) { - setGitCommittish(res, match.gitCommittish) - res.fetchSpec = match.fetchSpec - } else { - setGitCommittish(res, urlparse.hash != null ? urlparse.hash.slice(1) : '') - urlparse.protocol = urlparse.protocol.replace(/^git[+]/, '') - if (urlparse.protocol === 'file:' && /^git\+file:\/\/[a-z]:/i.test(res.rawSpec)) { - // keep the drive letter : on windows file paths - urlparse.host += ':' - urlparse.hostname += ':' - } - delete urlparse.hash - res.fetchSpec = url.format(urlparse) - } - break - } - case 'http:': - case 'https:': - res.type = 'remote' - res.fetchSpec = res.saveSpec - break - - default: - throw unsupportedURLType(urlparse.protocol, res.rawSpec) - } - - return res -} - -function fromAlias (res, where) { - const subSpec = npa(res.rawSpec.substr(4), where) - if (subSpec.type === 'alias') { - throw new Error('nested aliases not supported') - } - - if (!subSpec.registry) { - throw new Error('aliases only work for registry deps') - } - - res.subSpec = subSpec - res.registry = true - res.type = 'alias' - res.saveSpec = null - res.fetchSpec = null - return res -} - -function fromRegistry (res) { - res.registry = true - const spec = res.rawSpec.trim() - // no save spec for registry components as we save based on the fetched - // version, not on the argument so this can't compute that. - res.saveSpec = null - res.fetchSpec = spec - const version = semver.valid(spec, true) - const range = semver.validRange(spec, true) - if (version) { - res.type = 'version' - } else if (range) { - res.type = 'range' - } else { - if (encodeURIComponent(spec) !== spec) { - throw invalidTagName(spec, res.raw) - } - res.type = 'tag' - } - return res -} diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/LICENSE b/node_modules/init-package-json/node_modules/read-package-json/LICENSE similarity index 93% rename from node_modules/init-package-json/node_modules/hosted-git-info/LICENSE rename to node_modules/init-package-json/node_modules/read-package-json/LICENSE index 45055763dc838..052085c436514 100644 --- a/node_modules/init-package-json/node_modules/hosted-git-info/LICENSE +++ b/node_modules/init-package-json/node_modules/read-package-json/LICENSE @@ -1,4 +1,6 @@ -Copyright (c) 2015, Rebecca Turner +The ISC License + +Copyright (c) Isaac Z. Schlueter Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above diff --git a/node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js b/node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js new file mode 100644 index 0000000000000..d35f09ebd208f --- /dev/null +++ b/node_modules/init-package-json/node_modules/read-package-json/lib/read-json.js @@ -0,0 +1,589 @@ +var fs = require('fs') + +var path = require('path') + +var { glob } = require('glob') +var normalizeData = require('normalize-package-data') +var safeJSON = require('json-parse-even-better-errors') +var util = require('util') +var normalizePackageBin = require('npm-normalize-package-bin') + +module.exports = readJson + +// put more stuff on here to customize. +readJson.extraSet = [ + bundleDependencies, + gypfile, + serverjs, + scriptpath, + authors, + readme, + mans, + bins, + githead, + fillTypes, +] + +var typoWarned = {} +var cache = {} + +function readJson (file, log_, strict_, cb_) { + var log, strict, cb + for (var i = 1; i < arguments.length - 1; i++) { + if (typeof arguments[i] === 'boolean') { + strict = arguments[i] + } else if (typeof arguments[i] === 'function') { + log = arguments[i] + } + } + + if (!log) { + log = function () {} + } + cb = arguments[arguments.length - 1] + + readJson_(file, log, strict, cb) +} + +function readJson_ (file, log, strict, cb) { + fs.readFile(file, 'utf8', function (er, d) { + parseJson(file, er, d, log, strict, cb) + }) +} + +function stripBOM (content) { + // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) + // because the buffer-to-string conversion in `fs.readFileSync()` + // translates it to FEFF, the UTF-16 BOM. + if (content.charCodeAt(0) === 0xFEFF) { + content = content.slice(1) + } + return content +} + +function jsonClone (obj) { + if (obj == null) { + return obj + } else if (Array.isArray(obj)) { + var newarr = new Array(obj.length) + for (var ii in obj) { + newarr[ii] = jsonClone(obj[ii]) + } + return newarr + } else if (typeof obj === 'object') { + var newobj = {} + for (var kk in obj) { + newobj[kk] = jsonClone(obj[kk]) + } + return newobj + } else { + return obj + } +} + +function parseJson (file, er, d, log, strict, cb) { + if (er && er.code === 'ENOENT') { + return fs.stat(path.dirname(file), function (err, stat) { + if (!err && stat && !stat.isDirectory()) { + // ENOTDIR isn't used on Windows, but npm expects it. + er = Object.create(er) + er.code = 'ENOTDIR' + return cb(er) + } else { + return indexjs(file, er, log, strict, cb) + } + }) + } + if (er) { + return cb(er) + } + + if (cache[d]) { + return cb(null, jsonClone(cache[d])) + } + + var data + + try { + data = safeJSON(stripBOM(d)) + for (var key in data) { + if (/^_/.test(key)) { + delete data[key] + } + } + } catch (jsonErr) { + data = parseIndex(d) + if (!data) { + return cb(parseError(jsonErr, file)) + } + } + extrasCached(file, d, data, log, strict, cb) +} + +function extrasCached (file, d, data, log, strict, cb) { + extras(file, data, log, strict, function (err, extrasData) { + if (!err) { + cache[d] = jsonClone(extrasData) + } + cb(err, extrasData) + }) +} + +function indexjs (file, er, log, strict, cb) { + if (path.basename(file) === 'index.js') { + return cb(er) + } + + var index = path.resolve(path.dirname(file), 'index.js') + fs.readFile(index, 'utf8', function (er2, d) { + if (er2) { + return cb(er) + } + + if (cache[d]) { + return cb(null, cache[d]) + } + + var data = parseIndex(d) + if (!data) { + return cb(er) + } + + extrasCached(file, d, data, log, strict, cb) + }) +} + +readJson.extras = extras +function extras (file, data, log_, strict_, cb_) { + var log, strict, cb + for (var i = 2; i < arguments.length - 1; i++) { + if (typeof arguments[i] === 'boolean') { + strict = arguments[i] + } else if (typeof arguments[i] === 'function') { + log = arguments[i] + } + } + + if (!log) { + log = function () {} + } + cb = arguments[i] + + var set = readJson.extraSet + var n = set.length + var errState = null + set.forEach(function (fn) { + fn(file, data, then) + }) + + function then (er) { + if (errState) { + return + } + if (er) { + return cb(errState = er) + } + if (--n > 0) { + return + } + final(file, data, log, strict, cb) + } +} + +function scriptpath (file, data, cb) { + if (!data.scripts) { + return cb(null, data) + } + var k = Object.keys(data.scripts) + k.forEach(scriptpath_, data.scripts) + cb(null, data) +} + +function scriptpath_ (key) { + var s = this[key] + // This is never allowed, and only causes problems + if (typeof s !== 'string') { + return delete this[key] + } + + var spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/ + if (s.match(spre)) { + this[key] = this[key].replace(spre, '') + } +} + +function gypfile (file, data, cb) { + var dir = path.dirname(file) + var s = data.scripts || {} + if (s.install || s.preinstall) { + return cb(null, data) + } + + if (data.gypfile === false) { + return cb(null, data) + } + glob('*.gyp', { cwd: dir }) + .then(files => gypfile_(file, data, files, cb)) + .catch(er => cb(er)) +} + +function gypfile_ (file, data, files, cb) { + if (!files.length) { + return cb(null, data) + } + var s = data.scripts || {} + s.install = 'node-gyp rebuild' + data.scripts = s + data.gypfile = true + return cb(null, data) +} + +function serverjs (file, data, cb) { + var dir = path.dirname(file) + var s = data.scripts || {} + if (s.start) { + return cb(null, data) + } + fs.access(path.join(dir, 'server.js'), (err) => { + if (!err) { + s.start = 'node server.js' + data.scripts = s + } + return cb(null, data) + }) +} + +function authors (file, data, cb) { + if (data.contributors) { + return cb(null, data) + } + var af = path.resolve(path.dirname(file), 'AUTHORS') + fs.readFile(af, 'utf8', function (er, ad) { + // ignore error. just checking it. + if (er) { + return cb(null, data) + } + authors_(file, data, ad, cb) + }) +} + +function authors_ (file, data, ad, cb) { + ad = ad.split(/\r?\n/g).map(function (line) { + return line.replace(/^\s*#.*$/, '').trim() + }).filter(function (line) { + return line + }) + data.contributors = ad + return cb(null, data) +} + +function readme (file, data, cb) { + if (data.readme) { + return cb(null, data) + } + var dir = path.dirname(file) + var globOpts = { cwd: dir, nocase: true, mark: true } + glob('{README,README.*}', globOpts) + .then(files => { + // don't accept directories. + files = files.filter(function (filtered) { + return !filtered.match(/\/$/) + }) + if (!files.length) { + return cb() + } + var fn = preferMarkdownReadme(files) + var rm = path.resolve(dir, fn) + return readme_(file, data, rm, cb) + }) + .catch(er => cb(er)) +} + +function preferMarkdownReadme (files) { + var fallback = 0 + var re = /\.m?a?r?k?d?o?w?n?$/i + for (var i = 0; i < files.length; i++) { + if (files[i].match(re)) { + return files[i] + } else if (files[i].match(/README$/)) { + fallback = i + } + } + // prefer README.md, followed by README; otherwise, return + // the first filename (which could be README) + return files[fallback] +} + +function readme_ (file, data, rm, cb) { + var rmfn = path.basename(rm) + fs.readFile(rm, 'utf8', function (er, rmData) { + // maybe not readable, or something. + if (er) { + return cb() + } + data.readme = rmData + data.readmeFilename = rmfn + return cb(er, data) + }) +} + +function mans (file, data, cb) { + let cwd = data.directories && data.directories.man + if (data.man || !cwd) { + return cb(null, data) + } + const dirname = path.dirname(file) + cwd = path.resolve(path.dirname(file), cwd) + glob('**/*.[0-9]', { cwd }) + .then(mansGlob => { + data.man = mansGlob.map(man => + path.relative(dirname, path.join(cwd, man)).split(path.sep).join('/') + ) + return cb(null, data) + }) + .catch(er => cb(er)) +} + +function bins (file, data, cb) { + data = normalizePackageBin(data) + + var m = data.directories && data.directories.bin + if (data.bin || !m) { + return cb(null, data) + } + + m = path.resolve(path.dirname(file), path.join('.', path.join('/', m))) + glob('**', { cwd: m }) + .then(binsGlob => bins_(file, data, binsGlob, cb)) + .catch(er => cb(er)) +} + +function bins_ (file, data, binsGlob, cb) { + var m = (data.directories && data.directories.bin) || '.' + data.bin = binsGlob.reduce(function (acc, mf) { + if (mf && mf.charAt(0) !== '.') { + var f = path.basename(mf) + acc[f] = path.join(m, mf) + } + return acc + }, {}) + return cb(null, normalizePackageBin(data)) +} + +function bundleDependencies (file, data, cb) { + var bd = 'bundleDependencies' + var bdd = 'bundledDependencies' + // normalize key name + if (data[bdd] !== undefined) { + if (data[bd] === undefined) { + data[bd] = data[bdd] + } + delete data[bdd] + } + if (data[bd] === false) { + delete data[bd] + } else if (data[bd] === true) { + data[bd] = Object.keys(data.dependencies || {}) + } else if (data[bd] !== undefined && !Array.isArray(data[bd])) { + delete data[bd] + } + return cb(null, data) +} + +function githead (file, data, cb) { + if (data.gitHead) { + return cb(null, data) + } + var dir = path.dirname(file) + var head = path.resolve(dir, '.git/HEAD') + fs.readFile(head, 'utf8', function (er, headData) { + if (er) { + var parent = path.dirname(dir) + if (parent === dir) { + return cb(null, data) + } + return githead(dir, data, cb) + } + githead_(data, dir, headData, cb) + }) +} + +function githead_ (data, dir, head, cb) { + if (!head.match(/^ref: /)) { + data.gitHead = head.trim() + return cb(null, data) + } + var headRef = head.replace(/^ref: /, '').trim() + var headFile = path.resolve(dir, '.git', headRef) + fs.readFile(headFile, 'utf8', function (er, headData) { + if (er || !headData) { + var packFile = path.resolve(dir, '.git/packed-refs') + return fs.readFile(packFile, 'utf8', function (readFileErr, refs) { + if (readFileErr || !refs) { + return cb(null, data) + } + refs = refs.split('\n') + for (var i = 0; i < refs.length; i++) { + var match = refs[i].match(/^([0-9a-f]{40}) (.+)$/) + if (match && match[2].trim() === headRef) { + data.gitHead = match[1] + break + } + } + return cb(null, data) + }) + } + headData = headData.replace(/^ref: /, '').trim() + data.gitHead = headData + return cb(null, data) + }) +} + +/** + * Warn if the bin references don't point to anything. This might be better in + * normalize-package-data if it had access to the file path. + */ +function checkBinReferences_ (file, data, warn, cb) { + if (!(data.bin instanceof Object)) { + return cb() + } + + var keys = Object.keys(data.bin) + var keysLeft = keys.length + if (!keysLeft) { + return cb() + } + + function handleExists (relName, result) { + keysLeft-- + if (!result) { + warn('No bin file found at ' + relName) + } + if (!keysLeft) { + cb() + } + } + + keys.forEach(function (key) { + var dirName = path.dirname(file) + var relName = data.bin[key] + /* istanbul ignore if - impossible, bins have been normalized */ + if (typeof relName !== 'string') { + var msg = 'Bin filename for ' + key + + ' is not a string: ' + util.inspect(relName) + warn(msg) + delete data.bin[key] + handleExists(relName, true) + return + } + var binPath = path.resolve(dirName, relName) + fs.stat(binPath, (err) => handleExists(relName, !err)) + }) +} + +function final (file, data, log, strict, cb) { + var pId = makePackageId(data) + + function warn (msg) { + if (typoWarned[pId]) { + return + } + if (log) { + log('package.json', pId, msg) + } + } + + try { + normalizeData(data, warn, strict) + } catch (error) { + return cb(error) + } + + checkBinReferences_(file, data, warn, function () { + typoWarned[pId] = true + cb(null, data) + }) +} + +function fillTypes (file, data, cb) { + var index = data.main || 'index.js' + + if (typeof index !== 'string') { + return cb(new TypeError('The "main" attribute must be of type string.')) + } + + // TODO exports is much more complicated than this in verbose format + // We need to support for instance + + // "exports": { + // ".": [ + // { + // "default": "./lib/npm.js" + // }, + // "./lib/npm.js" + // ], + // "./package.json": "./package.json" + // }, + // as well as conditional exports + + // if (data.exports && typeof data.exports === 'string') { + // index = data.exports + // } + + // if (data.exports && data.exports['.']) { + // index = data.exports['.'] + // if (typeof index !== 'string') { + // } + // } + + var extless = + path.join(path.dirname(index), path.basename(index, path.extname(index))) + var dts = `./${extless}.d.ts` + var dtsPath = path.join(path.dirname(file), dts) + var hasDTSFields = 'types' in data || 'typings' in data + if (!hasDTSFields && fs.existsSync(dtsPath)) { + data.types = dts.split(path.sep).join('/') + } + + cb(null, data) +} + +function makePackageId (data) { + var name = cleanString(data.name) + var ver = cleanString(data.version) + return name + '@' + ver +} + +function cleanString (str) { + return (!str || typeof (str) !== 'string') ? '' : str.trim() +} + +// /**package { "name": "foo", "version": "1.2.3", ... } **/ +function parseIndex (data) { + data = data.split(/^\/\*\*package(?:\s|$)/m) + + if (data.length < 2) { + return null + } + data = data[1] + data = data.split(/\*\*\/$/m) + + if (data.length < 2) { + return null + } + data = data[0] + data = data.replace(/^\s*\*/mg, '') + + try { + return safeJSON(data) + } catch (er) { + return null + } +} + +function parseError (ex, file) { + var e = new Error('Failed to parse json\n' + ex.message) + e.code = 'EJSONPARSE' + e.path = file + return e +} diff --git a/node_modules/init-package-json/node_modules/read-package-json/package.json b/node_modules/init-package-json/node_modules/read-package-json/package.json new file mode 100644 index 0000000000000..01061f2bc2792 --- /dev/null +++ b/node_modules/init-package-json/node_modules/read-package-json/package.json @@ -0,0 +1,65 @@ +{ + "name": "read-package-json", + "version": "7.0.0", + "author": "GitHub Inc.", + "description": "The thing npm uses to read package.json files with semantics and defaults and validation", + "repository": { + "type": "git", + "url": "https://github.com/npm/read-package-json.git" + }, + "main": "lib/read-json.js", + "scripts": { + "prerelease": "npm t", + "postrelease": "npm publish && git push --follow-tags", + "release": "standard-version -s", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint \"**/*.js\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "dependencies": { + "glob": "^10.2.2", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "tap": "^16.0.1" + }, + "license": "ISC", + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "tap": { + "branches": 73, + "functions": 77, + "lines": 77, + "statements": 77, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.18.0", + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] + } +} diff --git a/node_modules/init-package-json/package.json b/node_modules/init-package-json/package.json index e2cb1fe25ebba..a164169a74df3 100644 --- a/node_modules/init-package-json/package.json +++ b/node_modules/init-package-json/package.json @@ -1,6 +1,6 @@ { "name": "init-package-json", - "version": "5.0.0", + "version": "6.0.0", "main": "lib/init-package-json.js", "scripts": { "test": "tap", @@ -19,22 +19,22 @@ "license": "ISC", "description": "A node module to get your node module started", "dependencies": { - "npm-package-arg": "^10.0.0", + "npm-package-arg": "^11.0.0", "promzard": "^1.0.0", "read": "^2.0.0", - "read-package-json": "^6.0.0", + "read-package-json": "^7.0.0", "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "^5.0.0" }, "devDependencies": { - "@npmcli/config": "^6.0.0", + "@npmcli/config": "^7.0.0", "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.3", + "@npmcli/template-oss": "4.18.0", "tap": "^16.0.1" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "tap": { "statements": 95, @@ -63,6 +63,13 @@ ], "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.3" + "version": "4.18.0", + "publish": true, + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] } } diff --git a/package-lock.json b/package-lock.json index 0d868b1c1b358..5335acdd5ce59 100644 --- a/package-lock.json +++ b/package-lock.json @@ -108,7 +108,7 @@ "graceful-fs": "^4.2.11", "hosted-git-info": "^7.0.0", "ini": "^4.1.1", - "init-package-json": "^5.0.0", + "init-package-json": "^6.0.0", "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^3.0.0", "libnpmaccess": "^7.0.2", @@ -6897,57 +6897,51 @@ } }, "node_modules/init-package-json": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-5.0.0.tgz", - "integrity": "sha512-kBhlSheBfYmq3e0L1ii+VKe3zBTLL5lDCDWR+f9dLmEGSB3MqLlMlsolubSsyI88Bg6EA+BIMlomAnQ1SwgQBw==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-6.0.0.tgz", + "integrity": "sha512-AmXD+Aht5iZGo/y1KUtZSUQ1SltesXHxQuc7qeNz0eUGx/8WgkHeeQLSFdM8l9YpmnnamGIbAxVdAs2xoLRKRQ==", "inBundle": true, "dependencies": { - "npm-package-arg": "^10.0.0", + "npm-package-arg": "^11.0.0", "promzard": "^1.0.0", "read": "^2.0.0", - "read-package-json": "^6.0.0", + "read-package-json": "^7.0.0", "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "^5.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } }, - "node_modules/init-package-json/node_modules/hosted-git-info": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz", - "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==", + "node_modules/init-package-json/node_modules/normalize-package-data": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.0.tgz", + "integrity": "sha512-UL7ELRVxYBHBgYEtZCXjxuD5vPxnmvMGq0jp/dGPKKrN7tfsBh2IY7TlJ15WWwdjRWD3RJbnsygUurTK3xkPkg==", "inBundle": true, "dependencies": { - "lru-cache": "^7.5.1" + "hosted-git-info": "^7.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/init-package-json/node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "inBundle": true, - "engines": { - "node": ">=12" + "node": "^16.14.0 || >=18.0.0" } }, - "node_modules/init-package-json/node_modules/npm-package-arg": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz", - "integrity": "sha512-uFyyCEmgBfZTtrKk/5xDfHp6+MdrqGotX/VoOyEEl3mBwiEE5FlBaePanazJSVMPT7vKepcjYBY2ztg9A3yPIA==", + "node_modules/init-package-json/node_modules/read-package-json": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-7.0.0.tgz", + "integrity": "sha512-uL4Z10OKV4p6vbdvIXB+OzhInYtIozl/VxUBPgNkBuUi2DeRonnuspmaVAMcrkmfjKGNmRndyQAbE7/AmzGwFg==", "inBundle": true, "dependencies": { - "hosted-git-info": "^6.0.0", - "proc-log": "^3.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^5.0.0" + "glob": "^10.2.2", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" } }, "node_modules/internal-slot": { @@ -9910,7 +9904,6 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-5.0.0.tgz", "integrity": "sha512-h9iPVIfrVZ9wVYQnxFgtw1ugSvGEMOlyPWWtm8BMJhnwyEL/FLbYbTY3V3PpjI/BUK67n9PEWDu6eHzu1fB15Q==", - "inBundle": true, "dependencies": { "hosted-git-info": "^6.0.0", "is-core-module": "^2.8.1", @@ -9925,7 +9918,6 @@ "version": "6.1.1", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-6.1.1.tgz", "integrity": "sha512-r0EI+HBMcXadMrugk0GCQ+6BQV39PiWAZVfq7oIckeGiN7sjRGyQxPdft3nQekFTCQbYxLBH+/axZMeH8UX6+w==", - "inBundle": true, "dependencies": { "lru-cache": "^7.5.1" }, @@ -9937,7 +9929,6 @@ "version": "7.18.3", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", - "inBundle": true, "engines": { "node": ">=12" } @@ -11277,7 +11268,6 @@ "version": "6.0.4", "resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-6.0.4.tgz", "integrity": "sha512-AEtWXYfopBj2z5N5PbkAOeNHRPUg5q+Nen7QLxV8M2zJq1ym6/lCz3fYNTCXe19puu2d06jfHhrP7v/S2PtMMw==", - "inBundle": true, "dependencies": { "glob": "^10.2.2", "json-parse-even-better-errors": "^3.0.0", diff --git a/package.json b/package.json index f13feb344bbfa..06672c07f8fdb 100644 --- a/package.json +++ b/package.json @@ -73,7 +73,7 @@ "graceful-fs": "^4.2.11", "hosted-git-info": "^7.0.0", "ini": "^4.1.1", - "init-package-json": "^5.0.0", + "init-package-json": "^6.0.0", "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^3.0.0", "libnpmaccess": "^7.0.2",