This commit is contained in:
2025-08-18 23:06:34 +08:00
parent 0bc04fb659
commit ed18af0cad
1926 changed files with 275098 additions and 0 deletions

View File

@@ -0,0 +1,143 @@
// add and remove dependency specs to/from pkg manifest
const { log } = require('proc-log')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const add = ({ pkg, add, saveBundle, saveType }) => {
for (const { name, rawSpec } of add) {
let addSaveType = saveType
// if the user does not give us a type, we infer which type(s)
// to keep based on the same order of priority we do when
// building the tree as defined in the _loadDeps method of
// the node class.
if (!addSaveType) {
addSaveType = inferSaveType(pkg, name)
}
if (addSaveType === 'prod') {
// a production dependency can only exist as production (rpj ensures it
// doesn't coexist w/ optional)
deleteSubKey(pkg, 'devDependencies', name, 'dependencies')
deleteSubKey(pkg, 'peerDependencies', name, 'dependencies')
} else if (addSaveType === 'dev') {
// a dev dependency may co-exist as peer, or optional, but not production
deleteSubKey(pkg, 'dependencies', name, 'devDependencies')
} else if (addSaveType === 'optional') {
// an optional dependency may co-exist as dev (rpj ensures it doesn't
// coexist w/ prod)
deleteSubKey(pkg, 'peerDependencies', name, 'optionalDependencies')
} else { // peer or peerOptional is all that's left
// a peer dependency may coexist as dev
deleteSubKey(pkg, 'dependencies', name, 'peerDependencies')
deleteSubKey(pkg, 'optionalDependencies', name, 'peerDependencies')
}
const depType = saveTypeMap.get(addSaveType)
pkg[depType] = pkg[depType] || {}
if (rawSpec !== '*' || pkg[depType][name] === undefined) {
pkg[depType][name] = rawSpec
}
if (addSaveType === 'optional') {
// Affordance for previous npm versions that require this behaviour
pkg.dependencies = pkg.dependencies || {}
pkg.dependencies[name] = pkg.optionalDependencies[name]
}
if (addSaveType === 'peer' || addSaveType === 'peerOptional') {
const pdm = pkg.peerDependenciesMeta || {}
if (addSaveType === 'peer' && pdm[name] && pdm[name].optional) {
pdm[name].optional = false
} else if (addSaveType === 'peerOptional') {
pdm[name] = pdm[name] || {}
pdm[name].optional = true
pkg.peerDependenciesMeta = pdm
}
// peerDeps are often also a devDep, so that they can be tested when
// using package managers that don't auto-install peer deps
if (pkg.devDependencies && pkg.devDependencies[name] !== undefined) {
pkg.devDependencies[name] = pkg.peerDependencies[name]
}
}
if (saveBundle && addSaveType !== 'peer' && addSaveType !== 'peerOptional') {
// keep it sorted, keep it unique
const bd = new Set(pkg.bundleDependencies || [])
bd.add(name)
pkg.bundleDependencies = [...bd].sort(localeCompare)
}
}
return pkg
}
// Canonical source of both the map between saveType and where it correlates to
// in the package, and the names of all our dependencies attributes
const saveTypeMap = new Map([
['dev', 'devDependencies'],
['optional', 'optionalDependencies'],
['prod', 'dependencies'],
['peerOptional', 'peerDependencies'],
['peer', 'peerDependencies'],
])
// Finds where the package is already in the spec and infers saveType from that
const inferSaveType = (pkg, name) => {
for (const saveType of saveTypeMap.keys()) {
if (hasSubKey(pkg, saveTypeMap.get(saveType), name)) {
if (
saveType === 'peerOptional' &&
(!hasSubKey(pkg, 'peerDependenciesMeta', name) ||
!pkg.peerDependenciesMeta[name].optional)
) {
return 'peer'
}
return saveType
}
}
return 'prod'
}
const hasSubKey = (pkg, depType, name) => {
return pkg[depType] && Object.prototype.hasOwnProperty.call(pkg[depType], name)
}
// Removes a subkey and warns about it if it's being replaced
const deleteSubKey = (pkg, depType, name, replacedBy) => {
if (hasSubKey(pkg, depType, name)) {
if (replacedBy) {
log.warn('idealTree', `Removing ${depType}.${name} in favor of ${replacedBy}.${name}`)
}
delete pkg[depType][name]
// clean up peerDepsMeta if we are removing something from peerDependencies
if (depType === 'peerDependencies' && pkg.peerDependenciesMeta) {
delete pkg.peerDependenciesMeta[name]
if (!Object.keys(pkg.peerDependenciesMeta).length) {
delete pkg.peerDependenciesMeta
}
}
if (!Object.keys(pkg[depType]).length) {
delete pkg[depType]
}
}
}
const rm = (pkg, rm) => {
for (const depType of new Set(saveTypeMap.values())) {
for (const name of rm) {
deleteSubKey(pkg, depType, name)
}
}
if (pkg.bundleDependencies) {
pkg.bundleDependencies = pkg.bundleDependencies
.filter(name => !rm.includes(name))
if (!pkg.bundleDependencies.length) {
delete pkg.bundleDependencies
}
}
return pkg
}
module.exports = { add, rm, saveTypeMap, hasSubKey }

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,280 @@
// The arborist manages three trees:
// - actual
// - virtual
// - ideal
//
// The actual tree is what's present on disk in the node_modules tree
// and elsewhere that links may extend.
//
// The virtual tree is loaded from metadata (package.json and lock files).
//
// The ideal tree is what we WANT that actual tree to become. This starts
// with the virtual tree, and then applies the options requesting
// add/remove/update actions.
//
// To reify a tree, we calculate a diff between the ideal and actual trees,
// and then turn the actual tree into the ideal tree by taking the actions
// required. At the end of the reification process, the actualTree is
// updated to reflect the changes.
//
// Each tree has an Inventory at the root. Shrinkwrap is tracked by Arborist
// instance. It always refers to the actual tree, but is updated (and written
// to disk) on reification.
// Each of the mixin "classes" adds functionality, but are not dependent on
// constructor call order. So, we just load them in an array, and build up
// the base class, so that the overall voltron class is easier to test and
// cover, and separation of concerns can be maintained.
const { resolve } = require('node:path')
const { homedir } = require('node:os')
const { depth } = require('treeverse')
const mapWorkspaces = require('@npmcli/map-workspaces')
const { log, time } = require('proc-log')
const { saveTypeMap } = require('../add-rm-pkg-deps.js')
const AuditReport = require('../audit-report.js')
const relpath = require('../relpath.js')
const PackumentCache = require('../packument-cache.js')
const mixins = [
require('../tracker.js'),
require('./build-ideal-tree.js'),
require('./load-actual.js'),
require('./load-virtual.js'),
require('./rebuild.js'),
require('./reify.js'),
require('./isolated-reifier.js'),
]
const _setWorkspaces = Symbol.for('setWorkspaces')
const Base = mixins.reduce((a, b) => b(a), require('node:events'))
// if it's 1, 2, or 3, set it explicitly that.
// if undefined or null, set it null
// otherwise, throw.
const lockfileVersion = lfv => {
if (lfv === 1 || lfv === 2 || lfv === 3) {
return lfv
}
if (lfv === undefined || lfv === null) {
return null
}
throw new TypeError('Invalid lockfileVersion config: ' + lfv)
}
class Arborist extends Base {
constructor (options = {}) {
const timeEnd = time.start('arborist:ctor')
super(options)
this.options = {
nodeVersion: process.version,
...options,
Arborist: this.constructor,
binLinks: 'binLinks' in options ? !!options.binLinks : true,
cache: options.cache || `${homedir()}/.npm/_cacache`,
dryRun: !!options.dryRun,
formatPackageLock: 'formatPackageLock' in options ? !!options.formatPackageLock : true,
force: !!options.force,
global: !!options.global,
ignoreScripts: !!options.ignoreScripts,
installStrategy: options.global ? 'shallow' : (options.installStrategy ? options.installStrategy : 'hoisted'),
lockfileVersion: lockfileVersion(options.lockfileVersion),
packageLockOnly: !!options.packageLockOnly,
packumentCache: options.packumentCache || new PackumentCache(),
path: options.path || '.',
rebuildBundle: 'rebuildBundle' in options ? !!options.rebuildBundle : true,
replaceRegistryHost: options.replaceRegistryHost,
savePrefix: 'savePrefix' in options ? options.savePrefix : '^',
scriptShell: options.scriptShell,
workspaces: options.workspaces || [],
workspacesEnabled: options.workspacesEnabled !== false,
}
// TODO we only ever look at this.options.replaceRegistryHost, not
// this.replaceRegistryHost. Defaulting needs to be written back to
// this.options to work properly
this.replaceRegistryHost = this.options.replaceRegistryHost =
(!this.options.replaceRegistryHost || this.options.replaceRegistryHost === 'npmjs') ?
'registry.npmjs.org' : this.options.replaceRegistryHost
if (options.saveType && !saveTypeMap.get(options.saveType)) {
throw new Error(`Invalid saveType ${options.saveType}`)
}
this.cache = resolve(this.options.cache)
this.diff = null
this.path = resolve(this.options.path)
timeEnd()
}
// TODO: We should change these to static functions instead
// of methods for the next major version
// Get the actual nodes corresponding to a root node's child workspaces,
// given a list of workspace names.
workspaceNodes (tree, workspaces) {
const wsMap = tree.workspaces
if (!wsMap) {
log.warn('workspaces', 'filter set, but no workspaces present')
return []
}
const nodes = []
for (const name of workspaces) {
const path = wsMap.get(name)
if (!path) {
log.warn('workspaces', `${name} in filter set, but not in workspaces`)
continue
}
const loc = relpath(tree.realpath, path)
const node = tree.inventory.get(loc)
if (!node) {
log.warn('workspaces', `${name} in filter set, but no workspace folder present`)
continue
}
nodes.push(node)
}
return nodes
}
// returns a set of workspace nodes and all their deps
// TODO why is includeWorkspaceRoot a param?
// TODO why is workspaces a param?
workspaceDependencySet (tree, workspaces, includeWorkspaceRoot) {
const wsNodes = this.workspaceNodes(tree, workspaces)
if (includeWorkspaceRoot) {
for (const edge of tree.edgesOut.values()) {
if (edge.type !== 'workspace' && edge.to) {
wsNodes.push(edge.to)
}
}
}
const wsDepSet = new Set(wsNodes)
const extraneous = new Set()
for (const node of wsDepSet) {
for (const edge of node.edgesOut.values()) {
const dep = edge.to
if (dep) {
wsDepSet.add(dep)
if (dep.isLink) {
wsDepSet.add(dep.target)
}
}
}
for (const child of node.children.values()) {
if (child.extraneous) {
extraneous.add(child)
}
}
}
for (const extra of extraneous) {
wsDepSet.add(extra)
}
return wsDepSet
}
// returns a set of root dependencies, excluding dependencies that are
// exclusively workspace dependencies
excludeWorkspacesDependencySet (tree) {
const rootDepSet = new Set()
depth({
tree,
visit: node => {
for (const { to } of node.edgesOut.values()) {
if (!to || to.isWorkspace) {
continue
}
for (const edgeIn of to.edgesIn.values()) {
if (edgeIn.from.isRoot || rootDepSet.has(edgeIn.from)) {
rootDepSet.add(to)
}
}
}
return node
},
filter: node => node,
getChildren: (node, tree) =>
[...tree.edgesOut.values()].map(edge => edge.to),
})
return rootDepSet
}
async [_setWorkspaces] (node) {
const workspaces = await mapWorkspaces({
cwd: node.path,
pkg: node.package,
})
if (node && workspaces.size) {
node.workspaces = workspaces
}
return node
}
async audit (options = {}) {
this.addTracker('audit')
if (this.options.global) {
throw Object.assign(
new Error('`npm audit` does not support testing globals'),
{ code: 'EAUDITGLOBAL' }
)
}
// allow the user to set options on the ctor as well.
// XXX: deprecate separate method options objects.
options = { ...this.options, ...options }
const timeEnd = time.start('audit')
let tree
if (options.packageLock === false) {
// build ideal tree
await this.loadActual(options)
await this.buildIdealTree()
tree = this.idealTree
} else {
tree = await this.loadVirtual()
}
if (this.options.workspaces.length) {
options.filterSet = this.workspaceDependencySet(
tree,
this.options.workspaces,
this.options.includeWorkspaceRoot
)
}
if (!options.workspacesEnabled) {
options.filterSet =
this.excludeWorkspacesDependencySet(tree)
}
this.auditReport = await AuditReport.load(tree, options)
const ret = options.fix ? this.reify(options) : this.auditReport
timeEnd()
this.finishTracker('audit')
return ret
}
async dedupe (options = {}) {
// allow the user to set options on the ctor as well.
// XXX: deprecate separate method options objects.
options = { ...this.options, ...options }
const tree = await this.loadVirtual().catch(() => this.loadActual())
const names = []
for (const name of tree.inventory.query('name')) {
if (tree.inventory.query('name', name).size > 1) {
names.push(name)
}
}
return this.reify({
...options,
preferDedupe: true,
update: { names },
})
}
}
module.exports = Arborist

View File

@@ -0,0 +1,453 @@
const _makeIdealGraph = Symbol('makeIdealGraph')
const _createIsolatedTree = Symbol.for('createIsolatedTree')
const _createBundledTree = Symbol('createBundledTree')
const { mkdirSync } = require('node:fs')
const pacote = require('pacote')
const { join } = require('node:path')
const { depth } = require('treeverse')
const crypto = require('node:crypto')
// cache complicated function results
const memoize = (fn) => {
const memo = new Map()
return async function (arg) {
const key = arg
if (memo.has(key)) {
return memo.get(key)
}
const result = {}
memo.set(key, result)
await fn(result, arg)
return result
}
}
module.exports = cls => class IsolatedReifier extends cls {
/**
* Create an ideal graph.
*
* An implementation of npm RFC-0042
* https://github.com/npm/rfcs/blob/main/accepted/0042-isolated-mode.md
*
* This entire file should be considered technical debt that will be resolved
* with an Arborist refactor or rewrite. Embedded logic in Nodes and Links,
* and the incremental state of building trees and reifying contains too many
* assumptions to do a linked mode properly.
*
* Instead, this approach takes a tree built from build-ideal-tree, and
* returns a new tree-like structure without the embedded logic of Node and
* Link classes.
*
* Since the RFC requires leaving the package-lock in place, this approach
* temporarily replaces the tree state for a couple of steps of reifying.
*
**/
async [_makeIdealGraph] (options) {
/* Make sure that the ideal tree is build as the rest of
* the algorithm depends on it.
*/
const bitOpt = {
...options,
complete: false,
}
await this.buildIdealTree(bitOpt)
const idealTree = this.idealTree
this.rootNode = {}
const root = this.rootNode
this.counter = 0
// memoize to cache generating proxy Nodes
this.externalProxyMemo = memoize(this.externalProxy.bind(this))
this.workspaceProxyMemo = memoize(this.workspaceProxy.bind(this))
root.external = []
root.isProjectRoot = true
root.localLocation = idealTree.location
root.localPath = idealTree.path
root.workspaces = await Promise.all(
Array.from(idealTree.fsChildren.values(), this.workspaceProxyMemo))
const processed = new Set()
const queue = [idealTree, ...idealTree.fsChildren]
while (queue.length !== 0) {
const next = queue.pop()
if (processed.has(next.location)) {
continue
}
processed.add(next.location)
next.edgesOut.forEach(e => {
if (!e.to || (next.package.bundleDependencies || next.package.bundledDependencies || []).includes(e.to.name)) {
return
}
queue.push(e.to)
})
if (!next.isProjectRoot && !next.isWorkspace) {
root.external.push(await this.externalProxyMemo(next))
}
}
await this.assignCommonProperties(idealTree, root)
this.idealGraph = root
}
async workspaceProxy (result, node) {
result.localLocation = node.location
result.localPath = node.path
result.isWorkspace = true
result.resolved = node.resolved
await this.assignCommonProperties(node, result)
}
async externalProxy (result, node) {
await this.assignCommonProperties(node, result)
if (node.hasShrinkwrap) {
const dir = join(
node.root.path,
'node_modules',
'.store',
`${node.name}@${node.version}`
)
mkdirSync(dir, { recursive: true })
// TODO this approach feels wrong
// and shouldn't be necessary for shrinkwraps
await pacote.extract(node.resolved, dir, {
...this.options,
resolved: node.resolved,
integrity: node.integrity,
})
const Arborist = this.constructor
const arb = new Arborist({ ...this.options, path: dir })
await arb[_makeIdealGraph]({ dev: false })
this.rootNode.external.push(...arb.idealGraph.external)
arb.idealGraph.external.forEach(e => {
e.root = this.rootNode
e.id = `${node.id}=>${e.id}`
})
result.localDependencies = []
result.externalDependencies = arb.idealGraph.externalDependencies
result.externalOptionalDependencies = arb.idealGraph.externalOptionalDependencies
result.dependencies = [
...result.externalDependencies,
...result.localDependencies,
...result.externalOptionalDependencies,
]
}
result.optional = node.optional
result.resolved = node.resolved
result.version = node.version
}
async assignCommonProperties (node, result) {
function validEdgesOut (node) {
return [...node.edgesOut.values()].filter(e => e.to && e.to.target && !(node.package.bundledDepenedencies || node.package.bundleDependencies || []).includes(e.to.name))
}
const edges = validEdgesOut(node)
const optionalDeps = edges.filter(e => e.optional).map(e => e.to.target)
const nonOptionalDeps = edges.filter(e => !e.optional).map(e => e.to.target)
result.localDependencies = await Promise.all(nonOptionalDeps.filter(n => n.isWorkspace).map(this.workspaceProxyMemo))
result.externalDependencies = await Promise.all(nonOptionalDeps.filter(n => !n.isWorkspace).map(this.externalProxyMemo))
result.externalOptionalDependencies = await Promise.all(optionalDeps.map(this.externalProxyMemo))
result.dependencies = [
...result.externalDependencies,
...result.localDependencies,
...result.externalOptionalDependencies,
]
result.root = this.rootNode
result.id = this.counter++
result.name = node.name
result.package = { ...node.package }
result.package.bundleDependencies = undefined
result.hasInstallScript = node.hasInstallScript
}
async [_createBundledTree] () {
// TODO: make sure that idealTree object exists
const idealTree = this.idealTree
// TODO: test workspaces having bundled deps
const queue = []
for (const [, edge] of idealTree.edgesOut) {
if (edge.to && (idealTree.package.bundleDependencies || idealTree.package.bundledDependencies || []).includes(edge.to.name)) {
queue.push({ from: idealTree, to: edge.to })
}
}
for (const child of idealTree.fsChildren) {
for (const [, edge] of child.edgesOut) {
if (edge.to && (child.package.bundleDependencies || child.package.bundledDependencies || []).includes(edge.to.name)) {
queue.push({ from: child, to: edge.to })
}
}
}
const processed = new Set()
const nodes = new Map()
const edges = []
while (queue.length !== 0) {
const nextEdge = queue.pop()
const key = `${nextEdge.from.location}=>${nextEdge.to.location}`
// should be impossible, unless bundled is duped
/* istanbul ignore next */
if (processed.has(key)) {
continue
}
processed.add(key)
const from = nextEdge.from
if (!from.isRoot && !from.isWorkspace) {
nodes.set(from.location, { location: from.location, resolved: from.resolved, name: from.name, optional: from.optional, pkg: { ...from.package, bundleDependencies: undefined } })
}
const to = nextEdge.to
nodes.set(to.location, { location: to.location, resolved: to.resolved, name: to.name, optional: to.optional, pkg: { ...to.package, bundleDependencies: undefined } })
edges.push({ from: from.isRoot ? 'root' : from.location, to: to.location })
to.edgesOut.forEach(e => {
// an edge out should always have a to
/* istanbul ignore else */
if (e.to) {
queue.push({ from: e.from, to: e.to })
}
})
}
return { edges, nodes }
}
async [_createIsolatedTree] () {
await this[_makeIdealGraph](this.options)
const proxiedIdealTree = this.idealGraph
const bundledTree = await this[_createBundledTree]()
const treeHash = (startNode) => {
// generate short hash based on the dependency tree
// starting at this node
const deps = []
const branch = []
depth({
tree: startNode,
getChildren: node => node.dependencies,
filter: node => node,
visit: node => {
branch.push(`${node.name}@${node.version}`)
deps.push(`${branch.join('->')}::${node.resolved}`)
},
leave: () => {
branch.pop()
},
})
deps.sort()
return crypto.createHash('shake256', { outputLength: 16 })
.update(deps.join(','))
.digest('base64')
// Node v14 doesn't support base64url
.replace(/\+/g, '-')
.replace(/\//g, '_')
.replace(/=+$/m, '')
}
const getKey = (idealTreeNode) => {
return `${idealTreeNode.name}@${idealTreeNode.version}-${treeHash(idealTreeNode)}`
}
const root = {
fsChildren: [],
integrity: null,
inventory: new Map(),
isLink: false,
isRoot: true,
binPaths: [],
edgesIn: new Set(),
edgesOut: new Map(),
hasShrinkwrap: false,
parent: null,
// TODO: we should probably not reference this.idealTree
resolved: this.idealTree.resolved,
isTop: true,
path: proxiedIdealTree.root.localPath,
realpath: proxiedIdealTree.root.localPath,
package: proxiedIdealTree.root.package,
meta: { loadedFromDisk: false },
global: false,
isProjectRoot: true,
children: [],
}
// root.inventory.set('', t)
// root.meta = this.idealTree.meta
// TODO We should mock better the inventory object because it is used by audit-report.js ... maybe
root.inventory.query = () => {
return []
}
const processed = new Set()
proxiedIdealTree.workspaces.forEach(c => {
const workspace = {
edgesIn: new Set(),
edgesOut: new Map(),
children: [],
hasInstallScript: c.hasInstallScript,
binPaths: [],
package: c.package,
location: c.localLocation,
path: c.localPath,
realpath: c.localPath,
resolved: c.resolved,
}
root.fsChildren.push(workspace)
root.inventory.set(workspace.location, workspace)
})
const generateChild = (node, location, pkg, inStore) => {
const newChild = {
global: false,
globalTop: false,
isProjectRoot: false,
isTop: false,
location,
name: node.name,
optional: node.optional,
top: { path: proxiedIdealTree.root.localPath },
children: [],
edgesIn: new Set(),
edgesOut: new Map(),
binPaths: [],
fsChildren: [],
/* istanbul ignore next -- emulate Node */
getBundler () {
return null
},
hasShrinkwrap: false,
inDepBundle: false,
integrity: null,
isLink: false,
isRoot: false,
isInStore: inStore,
path: join(proxiedIdealTree.root.localPath, location),
realpath: join(proxiedIdealTree.root.localPath, location),
resolved: node.resolved,
version: pkg.version,
package: pkg,
}
newChild.target = newChild
root.children.push(newChild)
root.inventory.set(newChild.location, newChild)
}
proxiedIdealTree.external.forEach(c => {
const key = getKey(c)
if (processed.has(key)) {
return
}
processed.add(key)
const location = join('node_modules', '.store', key, 'node_modules', c.name)
generateChild(c, location, c.package, true)
})
bundledTree.nodes.forEach(node => {
generateChild(node, node.location, node.pkg, false)
})
bundledTree.edges.forEach(e => {
const from = e.from === 'root' ? root : root.inventory.get(e.from)
const to = root.inventory.get(e.to)
// Maybe optional should be propagated from the original edge
const edge = { optional: false, from, to }
from.edgesOut.set(to.name, edge)
to.edgesIn.add(edge)
})
const memo = new Set()
function processEdges (node, externalEdge) {
externalEdge = !!externalEdge
const key = getKey(node)
if (memo.has(key)) {
return
}
memo.add(key)
let from, nmFolder
if (externalEdge) {
const fromLocation = join('node_modules', '.store', key, 'node_modules', node.name)
from = root.children.find(c => c.location === fromLocation)
nmFolder = join('node_modules', '.store', key, 'node_modules')
} else {
from = node.isProjectRoot ? root : root.fsChildren.find(c => c.location === node.localLocation)
nmFolder = join(node.localLocation, 'node_modules')
}
const processDeps = (dep, optional, external) => {
optional = !!optional
external = !!external
const location = join(nmFolder, dep.name)
const binNames = dep.package.bin && Object.keys(dep.package.bin) || []
const toKey = getKey(dep)
let target
if (external) {
const toLocation = join('node_modules', '.store', toKey, 'node_modules', dep.name)
target = root.children.find(c => c.location === toLocation)
} else {
target = root.fsChildren.find(c => c.location === dep.localLocation)
}
// TODO: we should no-op is an edge has already been created with the same fromKey and toKey
binNames.forEach(bn => {
target.binPaths.push(join(from.realpath, 'node_modules', '.bin', bn))
})
const link = {
global: false,
globalTop: false,
isProjectRoot: false,
edgesIn: new Set(),
edgesOut: new Map(),
binPaths: [],
isTop: false,
optional,
location: location,
path: join(dep.root.localPath, nmFolder, dep.name),
realpath: target.path,
name: toKey,
resolved: dep.resolved,
top: { path: dep.root.localPath },
children: [],
fsChildren: [],
isLink: true,
isStoreLink: true,
isRoot: false,
package: { _id: 'abc', bundleDependencies: undefined, deprecated: undefined, bin: target.package.bin, scripts: dep.package.scripts },
target,
}
const newEdge1 = { optional, from, to: link }
from.edgesOut.set(dep.name, newEdge1)
link.edgesIn.add(newEdge1)
const newEdge2 = { optional: false, from: link, to: target }
link.edgesOut.set(dep.name, newEdge2)
target.edgesIn.add(newEdge2)
root.children.push(link)
}
for (const dep of node.localDependencies) {
processEdges(dep, false)
// nonOptional, local
processDeps(dep, false, false)
}
for (const dep of node.externalDependencies) {
processEdges(dep, true)
// nonOptional, external
processDeps(dep, false, true)
}
for (const dep of node.externalOptionalDependencies) {
processEdges(dep, true)
// optional, external
processDeps(dep, true, true)
}
}
processEdges(proxiedIdealTree, false)
for (const node of proxiedIdealTree.workspaces) {
processEdges(node, false)
}
root.children.forEach(c => c.parent = root)
root.children.forEach(c => c.root = root)
root.root = root
root.target = root
return root
}
}

View File

@@ -0,0 +1,438 @@
// mix-in implementing the loadActual method
const { relative, dirname, resolve, join, normalize } = require('node:path')
const rpj = require('read-package-json-fast')
const { readdirScoped } = require('@npmcli/fs')
const { walkUp } = require('walk-up-path')
const ancestorPath = require('common-ancestor-path')
const treeCheck = require('../tree-check.js')
const Shrinkwrap = require('../shrinkwrap.js')
const calcDepFlags = require('../calc-dep-flags.js')
const Node = require('../node.js')
const Link = require('../link.js')
const realpath = require('../realpath.js')
// public symbols
const _changePath = Symbol.for('_changePath')
const _setWorkspaces = Symbol.for('setWorkspaces')
const _rpcache = Symbol.for('realpathCache')
const _stcache = Symbol.for('statCache')
module.exports = cls => class ActualLoader extends cls {
#actualTree
// ensure when walking the tree that we don't call loadTree on the same
// actual node more than one time.
#actualTreeLoaded = new Set()
#actualTreePromise
// cache of nodes when loading the actualTree, so that we avoid loaded the
// same node multiple times when symlinks attack.
#cache = new Map()
#filter
// cache of link targets for setting fsParent links
// We don't do fsParent as a magic getter/setter, because it'd be too costly
// to keep up to date along the walk.
// And, we know that it can ONLY be relevant when the node is a target of a
// link, otherwise it'd be in a node_modules folder, so take advantage of
// that to limit the scans later.
#topNodes = new Set()
#transplantFilter
constructor (options) {
super(options)
// the tree of nodes on disk
this.actualTree = options.actualTree
// caches for cached realpath calls
const cwd = process.cwd()
// assume that the cwd is real enough for our purposes
this[_rpcache] = new Map([[cwd, cwd]])
this[_stcache] = new Map()
}
// public method
// TODO remove options param in next semver major
async loadActual (options = {}) {
// In the past this.actualTree was set as a promise that eventually
// resolved, and overwrite this.actualTree with the resolved value. This
// was a problem because virtually no other code expects this.actualTree to
// be a promise. Instead we only set it once resolved, and also return it
// from the promise so that it is what's returned from this function when
// awaited.
if (this.actualTree) {
return this.actualTree
}
if (!this.#actualTreePromise) {
// allow the user to set options on the ctor as well.
// XXX: deprecate separate method options objects.
options = { ...this.options, ...options }
this.#actualTreePromise = this.#loadActual(options)
.then(tree => {
// reset all deps to extraneous prior to recalc
if (!options.root) {
for (const node of tree.inventory.values()) {
node.extraneous = true
}
}
// only reset root flags if we're not re-rooting,
// otherwise leave as-is
calcDepFlags(tree, !options.root)
this.actualTree = treeCheck(tree)
return this.actualTree
})
}
return this.#actualTreePromise
}
// return the promise so that we don't ever have more than one going at the
// same time. This is so that buildIdealTree can default to the actualTree
// if no shrinkwrap present, but reify() can still call buildIdealTree and
// loadActual in parallel safely.
async #loadActual (options) {
// mostly realpath to throw if the root doesn't exist
const {
global,
filter = () => true,
root = null,
transplantFilter = () => true,
ignoreMissing = false,
forceActual = false,
} = options
this.#filter = filter
this.#transplantFilter = transplantFilter
if (global) {
const real = await realpath(this.path, this[_rpcache], this[_stcache])
const params = {
path: this.path,
realpath: real,
pkg: {},
global,
loadOverrides: true,
}
if (this.path === real) {
this.#actualTree = this.#newNode(params)
} else {
this.#actualTree = await this.#newLink(params)
}
} else {
// not in global mode, hidden lockfile is allowed, load root pkg too
this.#actualTree = await this.#loadFSNode({
path: this.path,
real: await realpath(this.path, this[_rpcache], this[_stcache]),
loadOverrides: true,
})
this.#actualTree.assertRootOverrides()
// if forceActual is set, don't even try the hidden lockfile
if (!forceActual) {
// Note: hidden lockfile will be rejected if it's not the latest thing
// in the folder, or if any of the entries in the hidden lockfile are
// missing.
const meta = await Shrinkwrap.load({
path: this.#actualTree.path,
hiddenLockfile: true,
resolveOptions: this.options,
})
if (meta.loadedFromDisk) {
this.#actualTree.meta = meta
// have to load on a new Arborist object, so we don't assign
// the virtualTree on this one! Also, the weird reference is because
// we can't easily get a ref to Arborist in this module, without
// creating a circular reference, since this class is a mixin used
// to build up the Arborist class itself.
await new this.constructor({ ...this.options }).loadVirtual({
root: this.#actualTree,
})
await this[_setWorkspaces](this.#actualTree)
this.#transplant(root)
return this.#actualTree
}
}
const meta = await Shrinkwrap.load({
path: this.#actualTree.path,
lockfileVersion: this.options.lockfileVersion,
resolveOptions: this.options,
})
this.#actualTree.meta = meta
}
await this.#loadFSTree(this.#actualTree)
await this[_setWorkspaces](this.#actualTree)
// if there are workspace targets without Link nodes created, load
// the targets, so that we know what they are.
if (this.#actualTree.workspaces && this.#actualTree.workspaces.size) {
const promises = []
for (const path of this.#actualTree.workspaces.values()) {
if (!this.#cache.has(path)) {
// workspace overrides use the root overrides
const p = this.#loadFSNode({ path, root: this.#actualTree, useRootOverrides: true })
.then(node => this.#loadFSTree(node))
promises.push(p)
}
}
await Promise.all(promises)
}
if (!ignoreMissing) {
await this.#findMissingEdges()
}
// try to find a node that is the parent in a fs tree sense, but not a
// node_modules tree sense, of any link targets. this allows us to
// resolve deps that node will find, but a legacy npm view of the
// world would not have noticed.
for (const path of this.#topNodes) {
const node = this.#cache.get(path)
if (node && !node.parent && !node.fsParent) {
for (const p of walkUp(dirname(path))) {
if (this.#cache.has(p)) {
node.fsParent = this.#cache.get(p)
break
}
}
}
}
this.#transplant(root)
if (global) {
// need to depend on the children, or else all of them
// will end up being flagged as extraneous, since the
// global root isn't a "real" project
const tree = this.#actualTree
const actualRoot = tree.isLink ? tree.target : tree
const { dependencies = {} } = actualRoot.package
for (const [name, kid] of actualRoot.children.entries()) {
const def = kid.isLink ? `file:${kid.realpath.replace(/#/g, '%23')}` : '*'
dependencies[name] = dependencies[name] || def
}
actualRoot.package = { ...actualRoot.package, dependencies }
}
return this.#actualTree
}
#transplant (root) {
if (!root || root === this.#actualTree) {
return
}
this.#actualTree[_changePath](root.path)
for (const node of this.#actualTree.children.values()) {
if (!this.#transplantFilter(node)) {
node.root = null
}
}
root.replace(this.#actualTree)
for (const node of this.#actualTree.fsChildren) {
node.root = this.#transplantFilter(node) ? root : null
}
this.#actualTree = root
}
async #loadFSNode ({ path, parent, real, root, loadOverrides, useRootOverrides }) {
if (!real) {
try {
real = await realpath(path, this[_rpcache], this[_stcache])
} catch (error) {
// if realpath fails, just provide a dummy error node
return new Node({
error,
path,
realpath: path,
parent,
root,
loadOverrides,
})
}
}
const cached = this.#cache.get(path)
let node
// missing edges get a dummy node, assign the parent and return it
if (cached && !cached.dummy) {
cached.parent = parent
return cached
} else {
const params = {
installLinks: this.installLinks,
legacyPeerDeps: this.legacyPeerDeps,
path,
realpath: real,
parent,
root,
loadOverrides,
}
try {
const pkg = await rpj(join(real, 'package.json'))
params.pkg = pkg
if (useRootOverrides && root.overrides) {
params.overrides = root.overrides.getNodeRule({ name: pkg.name, version: pkg.version })
}
} catch (err) {
params.error = err
}
// soldier on if read-package-json raises an error, passing it to the
// Node which will attach it to its errors array (Link passes it along to
// its target node)
if (normalize(path) === real) {
node = this.#newNode(params)
} else {
node = await this.#newLink(params)
}
}
this.#cache.set(path, node)
return node
}
#newNode (options) {
// check it for an fsParent if it's a tree top. there's a decent chance
// it'll get parented later, making the fsParent scan a no-op, but better
// safe than sorry, since it's cheap.
const { parent, realpath } = options
if (!parent) {
this.#topNodes.add(realpath)
}
return new Node(options)
}
async #newLink (options) {
const { realpath } = options
this.#topNodes.add(realpath)
const target = this.#cache.get(realpath)
const link = new Link({ ...options, target })
if (!target) {
// Link set its target itself in this case
this.#cache.set(realpath, link.target)
// if a link target points at a node outside of the root tree's
// node_modules hierarchy, then load that node as well.
await this.#loadFSTree(link.target)
}
return link
}
async #loadFSTree (node) {
const did = this.#actualTreeLoaded
if (!node.isLink && !did.has(node.target.realpath)) {
did.add(node.target.realpath)
await this.#loadFSChildren(node.target)
return Promise.all(
[...node.target.children.entries()]
.filter(([, kid]) => !did.has(kid.realpath))
.map(([, kid]) => this.#loadFSTree(kid))
)
}
}
// create child nodes for all the entries in node_modules
// and attach them to the node as a parent
async #loadFSChildren (node) {
const nm = resolve(node.realpath, 'node_modules')
try {
const kids = await readdirScoped(nm).then(paths => paths.map(p => p.replace(/\\/g, '/')))
return Promise.all(
// ignore . dirs and retired scoped package folders
kids.filter(kid => !/^(@[^/]+\/)?\./.test(kid))
.filter(kid => this.#filter(node, kid))
.map(kid => this.#loadFSNode({
parent: node,
path: resolve(nm, kid),
})))
} catch {
// error in the readdir is not fatal, just means no kids
}
}
async #findMissingEdges () {
// try to resolve any missing edges by walking up the directory tree,
// checking for the package in each node_modules folder. stop at the
// root directory.
// The tricky move here is that we load a "dummy" node for the folder
// containing the node_modules folder, so that it can be assigned as
// the fsParent. It's a bad idea to *actually* load that full node,
// because people sometimes develop in ~/projects/node_modules/...
// so we'd end up loading a massive tree with lots of unrelated junk.
const nmContents = new Map()
const tree = this.#actualTree
for (const node of tree.inventory.values()) {
const ancestor = ancestorPath(node.realpath, this.path)
const depPromises = []
for (const [name, edge] of node.edgesOut.entries()) {
const notMissing = !edge.missing &&
!(edge.to && (edge.to.dummy || edge.to.parent !== node))
if (notMissing) {
continue
}
// start the walk from the dirname, because we would have found
// the dep in the loadFSTree step already if it was local.
for (const p of walkUp(dirname(node.realpath))) {
// only walk as far as the nearest ancestor
// this keeps us from going into completely unrelated
// places when a project is just missing something, but
// allows for finding the transitive deps of link targets.
// ie, if it has to go up and back out to get to the path
// from the nearest common ancestor, we've gone too far.
if (ancestor && /^\.\.(?:[\\/]|$)/.test(relative(ancestor, p))) {
break
}
let entries
if (!nmContents.has(p)) {
entries = await readdirScoped(p + '/node_modules')
.catch(() => []).then(paths => paths.map(p => p.replace(/\\/g, '/')))
nmContents.set(p, entries)
} else {
entries = nmContents.get(p)
}
if (!entries.includes(name)) {
continue
}
let d
if (!this.#cache.has(p)) {
d = new Node({ path: p, root: node.root, dummy: true })
this.#cache.set(p, d)
} else {
d = this.#cache.get(p)
}
if (d.dummy) {
// it's a placeholder, so likely would not have loaded this dep,
// unless another dep in the tree also needs it.
const depPath = normalize(`${p}/node_modules/${name}`)
const cached = this.#cache.get(depPath)
if (!cached || cached.dummy) {
depPromises.push(this.#loadFSNode({
path: depPath,
root: node.root,
parent: d,
}).then(node => this.#loadFSTree(node)))
}
}
break
}
}
await Promise.all(depPromises)
}
}
}

View File

@@ -0,0 +1,303 @@
// mixin providing the loadVirtual method
const mapWorkspaces = require('@npmcli/map-workspaces')
const { resolve } = require('node:path')
const nameFromFolder = require('@npmcli/name-from-folder')
const consistentResolve = require('../consistent-resolve.js')
const Shrinkwrap = require('../shrinkwrap.js')
const Node = require('../node.js')
const Link = require('../link.js')
const relpath = require('../relpath.js')
const calcDepFlags = require('../calc-dep-flags.js')
const rpj = require('read-package-json-fast')
const treeCheck = require('../tree-check.js')
const flagsSuspect = Symbol.for('flagsSuspect')
const setWorkspaces = Symbol.for('setWorkspaces')
module.exports = cls => class VirtualLoader extends cls {
#rootOptionProvided
constructor (options) {
super(options)
// the virtual tree we load from a shrinkwrap
this.virtualTree = options.virtualTree
this[flagsSuspect] = false
}
// public method
async loadVirtual (options = {}) {
if (this.virtualTree) {
return this.virtualTree
}
// allow the user to set reify options on the ctor as well.
// XXX: deprecate separate reify() options object.
options = { ...this.options, ...options }
if (options.root && options.root.meta) {
await this.#loadFromShrinkwrap(options.root.meta, options.root)
return treeCheck(this.virtualTree)
}
const s = await Shrinkwrap.load({
path: this.path,
lockfileVersion: this.options.lockfileVersion,
resolveOptions: this.options,
})
if (!s.loadedFromDisk && !options.root) {
const er = new Error('loadVirtual requires existing shrinkwrap file')
throw Object.assign(er, { code: 'ENOLOCK' })
}
// when building the ideal tree, we pass in a root node to this function
// otherwise, load it from the root package json or the lockfile
const {
root = await this.#loadRoot(s),
} = options
this.#rootOptionProvided = options.root
await this.#loadFromShrinkwrap(s, root)
root.assertRootOverrides()
return treeCheck(this.virtualTree)
}
async #loadRoot (s) {
const pj = this.path + '/package.json'
const pkg = await rpj(pj).catch(() => s.data.packages['']) || {}
return this[setWorkspaces](this.#loadNode('', pkg, true))
}
async #loadFromShrinkwrap (s, root) {
if (!this.#rootOptionProvided) {
// root is never any of these things, but might be a brand new
// baby Node object that never had its dep flags calculated.
root.extraneous = false
root.dev = false
root.optional = false
root.devOptional = false
root.peer = false
} else {
this[flagsSuspect] = true
}
this.#checkRootEdges(s, root)
root.meta = s
this.virtualTree = root
const { links, nodes } = this.#resolveNodes(s, root)
await this.#resolveLinks(links, nodes)
if (!(s.originalLockfileVersion >= 2)) {
this.#assignBundles(nodes)
}
if (this[flagsSuspect]) {
// reset all dep flags
// can't use inventory here, because virtualTree might not be root
for (const node of nodes.values()) {
if (node.isRoot || node === this.#rootOptionProvided) {
continue
}
node.extraneous = true
node.dev = true
node.optional = true
node.devOptional = true
node.peer = true
}
calcDepFlags(this.virtualTree, !this.#rootOptionProvided)
}
return root
}
// check the lockfile deps, and see if they match. if they do not
// then we have to reset dep flags at the end. for example, if the
// user manually edits their package.json file, then we need to know
// that the idealTree is no longer entirely trustworthy.
#checkRootEdges (s, root) {
// loaded virtually from tree, no chance of being out of sync
// ancient lockfiles are critically damaged by this process,
// so we need to just hope for the best in those cases.
if (!s.loadedFromDisk || s.ancientLockfile) {
return
}
const lock = s.get('')
const prod = lock.dependencies || {}
const dev = lock.devDependencies || {}
const optional = lock.optionalDependencies || {}
const peer = lock.peerDependencies || {}
const peerOptional = {}
if (lock.peerDependenciesMeta) {
for (const [name, meta] of Object.entries(lock.peerDependenciesMeta)) {
if (meta.optional && peer[name] !== undefined) {
peerOptional[name] = peer[name]
delete peer[name]
}
}
}
for (const name of Object.keys(optional)) {
delete prod[name]
}
const lockWS = {}
const workspaces = mapWorkspaces.virtual({
cwd: this.path,
lockfile: s.data,
})
for (const [name, path] of workspaces.entries()) {
lockWS[name] = `file:${path.replace(/#/g, '%23')}`
}
// Should rootNames exclude optional?
const rootNames = new Set(root.edgesOut.keys())
const lockByType = ({ dev, optional, peer, peerOptional, prod, workspace: lockWS })
// Find anything in shrinkwrap deps that doesn't match root's type or spec
for (const type in lockByType) {
const deps = lockByType[type]
for (const name in deps) {
const edge = root.edgesOut.get(name)
if (!edge || edge.type !== type || edge.spec !== deps[name]) {
return this[flagsSuspect] = true
}
rootNames.delete(name)
}
}
// Something was in root that's not accounted for in shrinkwrap
if (rootNames.size) {
return this[flagsSuspect] = true
}
}
// separate out link metadatas, and create Node objects for nodes
#resolveNodes (s, root) {
const links = new Map()
const nodes = new Map([['', root]])
for (const [location, meta] of Object.entries(s.data.packages)) {
// skip the root because we already got it
if (!location) {
continue
}
if (meta.link) {
links.set(location, meta)
} else {
nodes.set(location, this.#loadNode(location, meta))
}
}
return { links, nodes }
}
// links is the set of metadata, and nodes is the map of non-Link nodes
// Set the targets to nodes in the set, if we have them (we might not)
async #resolveLinks (links, nodes) {
for (const [location, meta] of links.entries()) {
const targetPath = resolve(this.path, meta.resolved)
const targetLoc = relpath(this.path, targetPath)
const target = nodes.get(targetLoc)
const link = this.#loadLink(location, targetLoc, target, meta)
nodes.set(location, link)
nodes.set(targetLoc, link.target)
// we always need to read the package.json for link targets
// outside node_modules because they can be changed by the local user
if (!link.target.parent) {
const pj = link.realpath + '/package.json'
const pkg = await rpj(pj).catch(() => null)
if (pkg) {
link.target.package = pkg
}
}
}
}
#assignBundles (nodes) {
for (const [location, node] of nodes) {
// Skip assignment of parentage for the root package
if (!location || node.isLink && !node.target.location) {
continue
}
const { name, parent, package: { inBundle } } = node
if (!parent) {
continue
}
// read inBundle from package because 'package' here is
// actually a v2 lockfile metadata entry.
// If the *parent* is also bundled, though, or if the parent has
// no dependency on it, then we assume that it's being pulled in
// just by virtue of its parent or a transitive dep being bundled.
const { package: ppkg } = parent
const { inBundle: parentBundled } = ppkg
if (inBundle && !parentBundled && parent.edgesOut.has(node.name)) {
if (!ppkg.bundleDependencies) {
ppkg.bundleDependencies = [name]
} else {
ppkg.bundleDependencies.push(name)
}
}
}
}
#loadNode (location, sw, loadOverrides) {
const p = this.virtualTree ? this.virtualTree.realpath : this.path
const path = resolve(p, location)
// shrinkwrap doesn't include package name unless necessary
if (!sw.name) {
sw.name = nameFromFolder(path)
}
const dev = sw.dev
const optional = sw.optional
const devOptional = dev || optional || sw.devOptional
const peer = sw.peer
const node = new Node({
installLinks: this.installLinks,
legacyPeerDeps: this.legacyPeerDeps,
root: this.virtualTree,
path,
realpath: path,
integrity: sw.integrity,
resolved: consistentResolve(sw.resolved, this.path, path),
pkg: sw,
hasShrinkwrap: sw.hasShrinkwrap,
dev,
optional,
devOptional,
peer,
loadOverrides,
})
// cast to boolean because they're undefined in the lock file when false
node.extraneous = !!sw.extraneous
node.devOptional = !!(sw.devOptional || sw.dev || sw.optional)
node.peer = !!sw.peer
node.optional = !!sw.optional
node.dev = !!sw.dev
return node
}
#loadLink (location, targetLoc, target) {
const path = resolve(this.path, location)
const link = new Link({
installLinks: this.installLinks,
legacyPeerDeps: this.legacyPeerDeps,
path,
realpath: resolve(this.path, targetLoc),
target,
pkg: target && target.package,
})
link.extraneous = target.extraneous
link.devOptional = target.devOptional
link.peer = target.peer
link.optional = target.optional
link.dev = target.dev
return link
}
}

View File

@@ -0,0 +1,400 @@
// Arborist.rebuild({path = this.path}) will do all the binlinks and
// bundle building needed. Called by reify, and by `npm rebuild`.
const localeCompare = require('@isaacs/string-locale-compare')('en')
const { depth: dfwalk } = require('treeverse')
const promiseAllRejectLate = require('promise-all-reject-late')
const rpj = require('read-package-json-fast')
const binLinks = require('bin-links')
const runScript = require('@npmcli/run-script')
const { callLimit: promiseCallLimit } = require('promise-call-limit')
const { resolve } = require('node:path')
const { isNodeGypPackage, defaultGypInstallScript } = require('@npmcli/node-gyp')
const { log, time } = require('proc-log')
const boolEnv = b => b ? '1' : ''
const sortNodes = (a, b) =>
(a.depth - b.depth) || localeCompare(a.path, b.path)
const _checkBins = Symbol.for('checkBins')
// defined by reify mixin
const _handleOptionalFailure = Symbol.for('handleOptionalFailure')
const _trashList = Symbol.for('trashList')
module.exports = cls => class Builder extends cls {
#doHandleOptionalFailure
#oldMeta = null
#queues
constructor (options) {
super(options)
this.scriptsRun = new Set()
this.#resetQueues()
}
async rebuild ({ nodes, handleOptionalFailure = false } = {}) {
// nothing to do if we're not building anything!
if (this.options.ignoreScripts && !this.options.binLinks) {
return
}
// when building for the first time, as part of reify, we ignore
// failures in optional nodes, and just delete them. however, when
// running JUST a rebuild, we treat optional failures as real fails
this.#doHandleOptionalFailure = handleOptionalFailure
if (!nodes) {
nodes = await this.#loadDefaultNodes()
}
// separates links nodes so that it can run
// prepare scripts and link bins in the expected order
const timeEnd = time.start('build')
const {
depNodes,
linkNodes,
} = this.#retrieveNodesByType(nodes)
// build regular deps
await this.#build(depNodes, {})
// build link deps
if (linkNodes.size) {
this.#resetQueues()
await this.#build(linkNodes, { type: 'links' })
}
timeEnd()
}
// if we don't have a set of nodes, then just rebuild
// the actual tree on disk.
async #loadDefaultNodes () {
let nodes
const tree = await this.loadActual()
let filterSet
if (!this.options.workspacesEnabled) {
filterSet = this.excludeWorkspacesDependencySet(tree)
nodes = tree.inventory.filter(node =>
filterSet.has(node) || node.isProjectRoot
)
} else if (this.options.workspaces.length) {
filterSet = this.workspaceDependencySet(
tree,
this.options.workspaces,
this.options.includeWorkspaceRoot
)
nodes = tree.inventory.filter(node => filterSet.has(node))
} else {
nodes = tree.inventory.values()
}
return nodes
}
#retrieveNodesByType (nodes) {
const depNodes = new Set()
const linkNodes = new Set()
const storeNodes = new Set()
for (const node of nodes) {
if (node.isStoreLink) {
storeNodes.add(node)
} else if (node.isLink) {
linkNodes.add(node)
} else {
depNodes.add(node)
}
}
// Make sure that store linked nodes are processed last.
// We can't process store links separately or else lifecycle scripts on
// standard nodes might not have bin links yet.
for (const node of storeNodes) {
depNodes.add(node)
}
// deduplicates link nodes and their targets, avoids
// calling lifecycle scripts twice when running `npm rebuild`
// ref: https://github.com/npm/cli/issues/2905
//
// we avoid doing so if global=true since `bin-links` relies
// on having the target nodes available in global mode.
if (!this.options.global) {
for (const node of linkNodes) {
depNodes.delete(node.target)
}
}
return {
depNodes,
linkNodes,
}
}
#resetQueues () {
this.#queues = {
preinstall: [],
install: [],
postinstall: [],
prepare: [],
bin: [],
}
}
async #build (nodes, { type = 'deps' }) {
const timeEnd = time.start(`build:${type}`)
await this.#buildQueues(nodes)
if (!this.options.ignoreScripts) {
await this.#runScripts('preinstall')
}
// links should run prepare scripts and only link bins after that
if (type === 'links') {
await this.#runScripts('prepare')
}
if (this.options.binLinks) {
await this.#linkAllBins()
}
if (!this.options.ignoreScripts) {
await this.#runScripts('install')
await this.#runScripts('postinstall')
}
timeEnd()
}
async #buildQueues (nodes) {
const timeEnd = time.start('build:queue')
const set = new Set()
const promises = []
for (const node of nodes) {
promises.push(this.#addToBuildSet(node, set))
// if it has bundle deps, add those too, if rebuildBundle
if (this.options.rebuildBundle !== false) {
const bd = node.package.bundleDependencies
if (bd && bd.length) {
dfwalk({
tree: node,
leave: node => promises.push(this.#addToBuildSet(node, set)),
getChildren: node => [...node.children.values()],
filter: node => node.inBundle,
})
}
}
}
await promiseAllRejectLate(promises)
// now sort into the queues for the 4 things we have to do
// run in the same predictable order that buildIdealTree uses
// there's no particular reason for doing it in this order rather
// than another, but sorting *somehow* makes it consistent.
const queue = [...set].sort(sortNodes)
for (const node of queue) {
const { package: { bin, scripts = {} } } = node.target
const { preinstall, install, postinstall, prepare } = scripts
const tests = { bin, preinstall, install, postinstall, prepare }
for (const [key, has] of Object.entries(tests)) {
if (has) {
this.#queues[key].push(node)
}
}
}
timeEnd()
}
async [_checkBins] (node) {
// if the node is a global top, and we're not in force mode, then
// any existing bins need to either be missing, or a symlink into
// the node path. Otherwise a package can have a preinstall script
// that unlinks something, to allow them to silently overwrite system
// binaries, which is unsafe and insecure.
if (!node.globalTop || this.options.force) {
return
}
const { path, package: pkg } = node
await binLinks.checkBins({ pkg, path, top: true, global: true })
}
async #addToBuildSet (node, set, refreshed = false) {
if (set.has(node)) {
return
}
if (this.#oldMeta === null) {
const { root: { meta } } = node
this.#oldMeta = meta && meta.loadedFromDisk &&
!(meta.originalLockfileVersion >= 2)
}
const { package: pkg, hasInstallScript } = node.target
const { gypfile, bin, scripts = {} } = pkg
const { preinstall, install, postinstall, prepare } = scripts
const anyScript = preinstall || install || postinstall || prepare
if (!refreshed && !anyScript && (hasInstallScript || this.#oldMeta)) {
// we either have an old metadata (and thus might have scripts)
// or we have an indication that there's install scripts (but
// don't yet know what they are) so we have to load the package.json
// from disk to see what the deal is. Failure here just means
// no scripts to add, probably borked package.json.
// add to the set then remove while we're reading the pj, so we
// don't accidentally hit it multiple times.
set.add(node)
const pkg = await rpj(node.path + '/package.json').catch(() => ({}))
set.delete(node)
const { scripts = {} } = pkg
node.package.scripts = scripts
return this.#addToBuildSet(node, set, true)
}
// Rebuild node-gyp dependencies lacking an install or preinstall script
// note that 'scripts' might be missing entirely, and the package may
// set gypfile:false to avoid this automatic detection.
const isGyp = gypfile !== false &&
!install &&
!preinstall &&
await isNodeGypPackage(node.path)
if (bin || preinstall || install || postinstall || prepare || isGyp) {
if (bin) {
await this[_checkBins](node)
}
if (isGyp) {
scripts.install = defaultGypInstallScript
node.package.scripts = scripts
}
set.add(node)
}
}
async #runScripts (event) {
const queue = this.#queues[event]
if (!queue.length) {
return
}
const timeEnd = time.start(`build:run:${event}`)
const stdio = this.options.foregroundScripts ? 'inherit' : 'pipe'
const limit = this.options.foregroundScripts ? 1 : undefined
await promiseCallLimit(queue.map(node => async () => {
const {
path,
integrity,
resolved,
optional,
peer,
dev,
devOptional,
package: pkg,
location,
isStoreLink,
} = node.target
// skip any that we know we'll be deleting
// or storeLinks
if (this[_trashList].has(path) || isStoreLink) {
return
}
const timeEndLocation = time.start(`build:run:${event}:${location}`)
log.info('run', pkg._id, event, location, pkg.scripts[event])
const env = {
npm_package_resolved: resolved,
npm_package_integrity: integrity,
npm_package_json: resolve(path, 'package.json'),
npm_package_optional: boolEnv(optional),
npm_package_dev: boolEnv(dev),
npm_package_peer: boolEnv(peer),
npm_package_dev_optional:
boolEnv(devOptional && !dev && !optional),
}
const runOpts = {
event,
path,
pkg,
stdio,
env,
scriptShell: this.options.scriptShell,
}
const p = runScript(runOpts).catch(er => {
const { code, signal } = er
log.info('run', pkg._id, event, { code, signal })
throw er
}).then(({ args, code, signal, stdout, stderr }) => {
this.scriptsRun.add({
pkg,
path,
event,
// I do not know why this needs to be on THIS line but refactoring
// this function would be quite a process
// eslint-disable-next-line promise/always-return
cmd: args && args[args.length - 1],
env,
code,
signal,
stdout,
stderr,
})
log.info('run', pkg._id, event, { code, signal })
})
await (this.#doHandleOptionalFailure
? this[_handleOptionalFailure](node, p)
: p)
timeEndLocation()
}), { limit })
timeEnd()
}
async #linkAllBins () {
const queue = this.#queues.bin
if (!queue.length) {
return
}
const timeEnd = time.start('build:link')
const promises = []
// sort the queue by node path, so that the module-local collision
// detector in bin-links will always resolve the same way.
for (const node of queue.sort(sortNodes)) {
// TODO these run before they're awaited
promises.push(this.#createBinLinks(node))
}
await promiseAllRejectLate(promises)
timeEnd()
}
async #createBinLinks (node) {
if (this[_trashList].has(node.path)) {
return
}
const timeEnd = time.start(`build:link:${node.location}`)
const p = binLinks({
pkg: node.package,
path: node.path,
top: !!(node.isTop || node.globalTop),
force: this.options.force,
global: !!node.globalTop,
})
await (this.#doHandleOptionalFailure
? this[_handleOptionalFailure](node, p)
: p)
timeEnd()
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,415 @@
// an object representing the set of vulnerabilities in a tree
/* eslint camelcase: "off" */
const localeCompare = require('@isaacs/string-locale-compare')('en')
const npa = require('npm-package-arg')
const pickManifest = require('npm-pick-manifest')
const Vuln = require('./vuln.js')
const Calculator = require('@npmcli/metavuln-calculator')
const _getReport = Symbol('getReport')
const _fixAvailable = Symbol('fixAvailable')
const _checkTopNode = Symbol('checkTopNode')
const _init = Symbol('init')
const _omit = Symbol('omit')
const { log, time } = require('proc-log')
const fetch = require('npm-registry-fetch')
class AuditReport extends Map {
static load (tree, opts) {
return new AuditReport(tree, opts).run()
}
get auditReportVersion () {
return 2
}
toJSON () {
const obj = {
auditReportVersion: this.auditReportVersion,
vulnerabilities: {},
metadata: {
vulnerabilities: {
info: 0,
low: 0,
moderate: 0,
high: 0,
critical: 0,
total: this.size,
},
dependencies: {
prod: 0,
dev: 0,
optional: 0,
peer: 0,
peerOptional: 0,
total: this.tree.inventory.size - 1,
},
},
}
for (const node of this.tree.inventory.values()) {
const { dependencies } = obj.metadata
let prod = true
for (const type of [
'dev',
'optional',
'peer',
'peerOptional',
]) {
if (node[type]) {
dependencies[type]++
prod = false
}
}
if (prod) {
dependencies.prod++
}
}
// if it doesn't have any topVulns, then it's fixable with audit fix
// for each topVuln, figure out if it's fixable with audit fix --force,
// or if we have to just delete the thing, and if the fix --force will
// require a semver major update.
const vulnerabilities = []
for (const [name, vuln] of this.entries()) {
vulnerabilities.push([name, vuln.toJSON()])
obj.metadata.vulnerabilities[vuln.severity]++
}
obj.vulnerabilities = vulnerabilities
.sort(([a], [b]) => localeCompare(a, b))
.reduce((set, [name, vuln]) => {
set[name] = vuln
return set
}, {})
return obj
}
constructor (tree, opts = {}) {
super()
const { omit } = opts
this[_omit] = new Set(omit || [])
this.topVulns = new Map()
this.calculator = new Calculator(opts)
this.error = null
this.options = opts
this.tree = tree
this.filterSet = opts.filterSet
}
async run () {
this.report = await this[_getReport]()
log.silly('audit report', this.report)
if (this.report) {
await this[_init]()
}
return this
}
isVulnerable (node) {
const vuln = this.get(node.packageName)
return !!(vuln && vuln.isVulnerable(node))
}
async [_init] () {
const timeEnd = time.start('auditReport:init')
const promises = []
for (const [name, advisories] of Object.entries(this.report)) {
for (const advisory of advisories) {
promises.push(this.calculator.calculate(name, advisory))
}
}
// now the advisories are calculated with a set of versions
// and the packument. turn them into our style of vuln objects
// which also have the affected nodes, and also create entries
// for all the metavulns that we find from dependents.
const advisories = new Set(await Promise.all(promises))
const seen = new Set()
for (const advisory of advisories) {
const { name, range } = advisory
const k = `${name}@${range}`
const vuln = this.get(name) || new Vuln({ name, advisory })
if (this.has(name)) {
vuln.addAdvisory(advisory)
}
super.set(name, vuln)
// don't flag the exact same name/range more than once
// adding multiple advisories with the same range is fine, but no
// need to search for nodes we already would have added.
if (!seen.has(k)) {
const p = []
for (const node of this.tree.inventory.query('packageName', name)) {
if (!shouldAudit(node, this[_omit], this.filterSet)) {
continue
}
// if not vulnerable by this advisory, keep searching
if (!advisory.testVersion(node.version)) {
continue
}
// we will have loaded the source already if this is a metavuln
if (advisory.type === 'metavuln') {
vuln.addVia(this.get(advisory.dependency))
}
// already marked this one, no need to do it again
if (vuln.nodes.has(node)) {
continue
}
// haven't marked this one yet. get its dependents.
vuln.nodes.add(node)
for (const { from: dep, spec } of node.edgesIn) {
if (dep.isTop && !vuln.topNodes.has(dep)) {
this[_checkTopNode](dep, vuln, spec)
} else {
// calculate a metavuln, if necessary
const calc = this.calculator.calculate(dep.packageName, advisory)
// eslint-disable-next-line promise/always-return
p.push(calc.then(meta => {
// eslint-disable-next-line promise/always-return
if (meta.testVersion(dep.version, spec)) {
advisories.add(meta)
}
}))
}
}
}
await Promise.all(p)
seen.add(k)
}
// make sure we actually got something. if not, remove it
// this can happen if you are loading from a lockfile created by
// npm v5, since it lists the current version of all deps,
// rather than the range that is actually depended upon,
// or if using --omit with the older audit endpoint.
if (this.get(name).nodes.size === 0) {
this.delete(name)
continue
}
// if the vuln is valid, but THIS advisory doesn't apply to any of
// the nodes it references, then remove it from the advisory list.
// happens when using omit with old audit endpoint.
for (const advisory of vuln.advisories) {
const relevant = [...vuln.nodes]
.some(n => advisory.testVersion(n.version))
if (!relevant) {
vuln.deleteAdvisory(advisory)
}
}
}
timeEnd()
}
[_checkTopNode] (topNode, vuln, spec) {
vuln.fixAvailable = this[_fixAvailable](topNode, vuln, spec)
if (vuln.fixAvailable !== true) {
// now we know the top node is vulnerable, and cannot be
// upgraded out of the bad place without --force. But, there's
// no need to add it to the actual vulns list, because nothing
// depends on root.
this.topVulns.set(vuln.name, vuln)
vuln.topNodes.add(topNode)
}
}
// check whether the top node is vulnerable.
// check whether we can get out of the bad place with --force, and if
// so, whether that update is SemVer Major
[_fixAvailable] (topNode, vuln, spec) {
// this will always be set to at least {name, versions:{}}
const paku = vuln.packument
if (!vuln.testSpec(spec)) {
return true
}
// similarly, even if we HAVE a packument, but we're looking for it
// somewhere other than the registry, and we got something vulnerable,
// then we're stuck with it.
const specObj = npa(spec)
if (!specObj.registry) {
return false
}
if (specObj.subSpec) {
spec = specObj.subSpec.rawSpec
}
// We don't provide fixes for top nodes other than root, but we
// still check to see if the node is fixable with a different version,
// and if that is a semver major bump.
try {
const {
_isSemVerMajor: isSemVerMajor,
version,
name,
} = pickManifest(paku, spec, {
...this.options,
before: null,
avoid: vuln.range,
avoidStrict: true,
})
return { name, version, isSemVerMajor }
} catch (er) {
return false
}
}
set () {
throw new Error('do not call AuditReport.set() directly')
}
// convert a quick-audit into a bulk advisory listing
static auditToBulk (report) {
if (!report.advisories) {
// tack on the report json where the response body would go
throw Object.assign(new Error('Invalid advisory report'), {
body: JSON.stringify(report),
})
}
const bulk = {}
const { advisories } = report
for (const advisory of Object.values(advisories)) {
const {
id,
url,
title,
severity = 'high',
vulnerable_versions = '*',
module_name: name,
} = advisory
bulk[name] = bulk[name] || []
bulk[name].push({ id, url, title, severity, vulnerable_versions })
}
return bulk
}
async [_getReport] () {
// if we're not auditing, just return false
if (this.options.audit === false || this.options.offline === true || this.tree.inventory.size === 1) {
return null
}
const timeEnd = time.start('auditReport:getReport')
try {
try {
// first try the super fast bulk advisory listing
const body = prepareBulkData(this.tree, this[_omit], this.filterSet)
log.silly('audit', 'bulk request', body)
// no sense asking if we don't have anything to audit,
// we know it'll be empty
if (!Object.keys(body).length) {
return null
}
const res = await fetch('/-/npm/v1/security/advisories/bulk', {
...this.options,
registry: this.options.auditRegistry || this.options.registry,
method: 'POST',
gzip: true,
body,
})
return await res.json()
} catch (er) {
log.silly('audit', 'bulk request failed', String(er.body))
// that failed, try the quick audit endpoint
const body = prepareData(this.tree, this.options)
const res = await fetch('/-/npm/v1/security/audits/quick', {
...this.options,
registry: this.options.auditRegistry || this.options.registry,
method: 'POST',
gzip: true,
body,
})
return AuditReport.auditToBulk(await res.json())
}
} catch (er) {
log.verbose('audit error', er)
log.silly('audit error', String(er.body))
this.error = er
return null
} finally {
timeEnd()
}
}
}
// return true if we should audit this one
const shouldAudit = (node, omit, filterSet) =>
!node.version ? false
: node.isRoot ? false
: filterSet && filterSet.size !== 0 && !filterSet.has(node) ? false
: omit.size === 0 ? true
: !( // otherwise, just ensure we're not omitting this one
node.dev && omit.has('dev') ||
node.optional && omit.has('optional') ||
node.devOptional && omit.has('dev') && omit.has('optional') ||
node.peer && omit.has('peer')
)
const prepareBulkData = (tree, omit, filterSet) => {
const payload = {}
for (const name of tree.inventory.query('packageName')) {
const set = new Set()
for (const node of tree.inventory.query('packageName', name)) {
if (!shouldAudit(node, omit, filterSet)) {
continue
}
set.add(node.version)
}
if (set.size) {
payload[name] = [...set]
}
}
return payload
}
const prepareData = (tree, opts) => {
const { npmVersion: npm_version } = opts
const node_version = process.version
const { platform, arch } = process
const { NODE_ENV: node_env } = process.env
const data = tree.meta.commit()
// the legacy audit endpoint doesn't support any kind of pre-filtering
// we just have to get the advisories and skip over them in the report
return {
name: data.name,
version: data.version,
requires: {
...(tree.package.devDependencies || {}),
...(tree.package.peerDependencies || {}),
...(tree.package.optionalDependencies || {}),
...(tree.package.dependencies || {}),
},
dependencies: data.dependencies,
metadata: {
node_version,
npm_version,
platform,
arch,
node_env,
},
}
}
module.exports = AuditReport

View File

@@ -0,0 +1,126 @@
const { depth } = require('treeverse')
const calcDepFlags = (tree, resetRoot = true) => {
if (resetRoot) {
tree.dev = false
tree.optional = false
tree.devOptional = false
tree.peer = false
}
const ret = depth({
tree,
visit: node => calcDepFlagsStep(node),
filter: node => node,
getChildren: (node, tree) =>
[...tree.edgesOut.values()].map(edge => edge.to),
})
return ret
}
const calcDepFlagsStep = (node) => {
// This rewalk is necessary to handle cases where devDep and optional
// or normal dependency graphs overlap deep in the dep graph.
// Since we're only walking through deps that are not already flagged
// as non-dev/non-optional, it's typically a very shallow traversal
node.extraneous = false
resetParents(node, 'extraneous')
resetParents(node, 'dev')
resetParents(node, 'peer')
resetParents(node, 'devOptional')
resetParents(node, 'optional')
// for links, map their hierarchy appropriately
if (node.isLink) {
// node.target can be null, we check to ensure it's not null before proceeding
if (node.target == null) {
return node
}
node.target.dev = node.dev
node.target.optional = node.optional
node.target.devOptional = node.devOptional
node.target.peer = node.peer
return calcDepFlagsStep(node.target)
}
node.edgesOut.forEach(({ peer, optional, dev, to }) => {
// if the dep is missing, then its flags are already maximally unset
if (!to) {
return
}
// everything with any kind of edge into it is not extraneous
to.extraneous = false
// devOptional is the *overlap* of the dev and optional tree.
// however, for convenience and to save an extra rewalk, we leave
// it set when we are in *either* tree, and then omit it from the
// package-lock if either dev or optional are set.
const unsetDevOpt = !node.devOptional && !node.dev && !node.optional && !dev && !optional
// if we are not in the devOpt tree, then we're also not in
// either the dev or opt trees
const unsetDev = unsetDevOpt || !node.dev && !dev
const unsetOpt = unsetDevOpt || !node.optional && !optional
const unsetPeer = !node.peer && !peer
if (unsetPeer) {
unsetFlag(to, 'peer')
}
if (unsetDevOpt) {
unsetFlag(to, 'devOptional')
}
if (unsetDev) {
unsetFlag(to, 'dev')
}
if (unsetOpt) {
unsetFlag(to, 'optional')
}
})
return node
}
const resetParents = (node, flag) => {
if (node[flag]) {
return
}
for (let p = node; p && (p === node || p[flag]); p = p.resolveParent) {
p[flag] = false
}
}
// typically a short walk, since it only traverses deps that have the flag set.
const unsetFlag = (node, flag) => {
if (node[flag]) {
node[flag] = false
depth({
tree: node,
visit: node => {
node.extraneous = node[flag] = false
if (node.isLink && node.target) {
node.target.extraneous = node.target[flag] = false
}
},
getChildren: node => {
const children = []
const targetNode = node.isLink && node.target ? node.target : node
for (const edge of targetNode.edgesOut.values()) {
if (
edge.to &&
edge.to[flag] &&
((flag !== 'peer' && edge.type === 'peer') || edge.type === 'prod')
) {
children.push(edge.to)
}
}
return children
},
})
}
}
module.exports = calcDepFlags

View File

@@ -0,0 +1,436 @@
// Internal methods used by buildIdealTree.
// Answer the question: "can I put this dep here?"
//
// IMPORTANT: *nothing* in this class should *ever* modify or mutate the tree
// at all. The contract here is strictly limited to read operations. We call
// this in the process of walking through the ideal tree checking many
// different potential placement targets for a given node. If a change is made
// to the tree along the way, that can cause serious problems!
//
// In order to enforce this restriction, in debug mode, canPlaceDep() will
// snapshot the tree at the start of the process, and then at the end, will
// verify that it still matches the snapshot, and throw an error if any changes
// occurred.
//
// The algorithm is roughly like this:
// - check the node itself:
// - if there is no version present, and no conflicting edges from target,
// OK, provided all peers can be placed at or above the target.
// - if the current version matches, KEEP
// - if there is an older version present, which can be replaced, then
// - if satisfying and preferDedupe? KEEP
// - else: REPLACE
// - if there is a newer version present, and preferDedupe, REPLACE
// - if the version present satisfies the edge, KEEP
// - else: CONFLICT
// - if the node is not in conflict, check each of its peers:
// - if the peer can be placed in the target, continue
// - else if the peer can be placed in a parent, and there is no other
// conflicting version shadowing it, continue
// - else CONFLICT
// - If the peers are not in conflict, return the original node's value
//
// An exception to this logic is that if the target is the deepest location
// that a node can be placed, and the conflicting node can be placed deeper,
// then we will return REPLACE rather than CONFLICT, and Arborist will queue
// the replaced node for resolution elsewhere.
const localeCompare = require('@isaacs/string-locale-compare')('en')
const semver = require('semver')
const debug = require('./debug.js')
const peerEntrySets = require('./peer-entry-sets.js')
const deepestNestingTarget = require('./deepest-nesting-target.js')
const CONFLICT = Symbol('CONFLICT')
const OK = Symbol('OK')
const REPLACE = Symbol('REPLACE')
const KEEP = Symbol('KEEP')
class CanPlaceDep {
// dep is a dep that we're trying to place. it should already live in
// a virtual tree where its peer set is loaded as children of the root.
// target is the actual place where we're trying to place this dep
// in a node_modules folder.
// edge is the edge that we're trying to satisfy with this placement.
// parent is the CanPlaceDep object of the entry node when placing a peer.
constructor (options) {
const {
dep,
target,
edge,
preferDedupe,
parent = null,
peerPath = [],
explicitRequest = false,
} = options
debug(() => {
if (!dep) {
throw new Error('no dep provided to CanPlaceDep')
}
if (!target) {
throw new Error('no target provided to CanPlaceDep')
}
if (!edge) {
throw new Error('no edge provided to CanPlaceDep')
}
this._treeSnapshot = JSON.stringify([...target.root.inventory.entries()]
.map(([loc, { packageName, version, resolved }]) => {
return [loc, packageName, version, resolved]
}).sort(([a], [b]) => localeCompare(a, b)))
})
// the result of whether we can place it or not
this.canPlace = null
// if peers conflict, but this one doesn't, then that is useful info
this.canPlaceSelf = null
this.dep = dep
this.target = target
this.edge = edge
this.explicitRequest = explicitRequest
// preventing cycles when we check peer sets
this.peerPath = peerPath
// we always prefer to dedupe peers, because they are trying
// a bit harder to be singletons.
this.preferDedupe = !!preferDedupe || edge.peer
this.parent = parent
this.children = []
this.isSource = target === this.peerSetSource
this.name = edge.name
this.current = target.children.get(this.name)
this.targetEdge = target.edgesOut.get(this.name)
this.conflicts = new Map()
// check if this dep was already subject to a peerDep override while
// building the peerSet.
this.edgeOverride = !dep.satisfies(edge)
this.canPlace = this.checkCanPlace()
if (!this.canPlaceSelf) {
this.canPlaceSelf = this.canPlace
}
debug(() => {
const treeSnapshot = JSON.stringify([...target.root.inventory.entries()]
.map(([loc, { packageName, version, resolved }]) => {
return [loc, packageName, version, resolved]
}).sort(([a], [b]) => localeCompare(a, b)))
/* istanbul ignore if */
if (this._treeSnapshot !== treeSnapshot) {
throw Object.assign(new Error('tree changed in CanPlaceDep'), {
expect: this._treeSnapshot,
actual: treeSnapshot,
})
}
})
}
checkCanPlace () {
const { target, targetEdge, current, dep } = this
// if the dep failed to load, we're going to fail the build or
// prune it out anyway, so just move forward placing/replacing it.
if (dep.errors.length) {
return current ? REPLACE : OK
}
// cannot place peers inside their dependents, except for tops
if (targetEdge && targetEdge.peer && !target.isTop) {
return CONFLICT
}
// skip this test if there's a current node, because we might be able
// to dedupe against it anyway
if (!current &&
targetEdge &&
!dep.satisfies(targetEdge) &&
targetEdge !== this.edge) {
return CONFLICT
}
return current ? this.checkCanPlaceCurrent() : this.checkCanPlaceNoCurrent()
}
// we know that the target has a dep by this name in its node_modules
// already. Can return KEEP, REPLACE, or CONFLICT.
checkCanPlaceCurrent () {
const { preferDedupe, explicitRequest, current, target, edge, dep } = this
if (dep.matches(current)) {
if (current.satisfies(edge) || this.edgeOverride) {
return explicitRequest ? REPLACE : KEEP
}
}
const { version: curVer } = current
const { version: newVer } = dep
const tryReplace = curVer && newVer && semver.gte(newVer, curVer)
if (tryReplace && dep.canReplace(current)) {
// It's extremely rare that a replaceable node would be a conflict, if
// the current one wasn't a conflict, but it is theoretically possible
// if peer deps are pinned. In that case we treat it like any other
// conflict, and keep trying.
const cpp = this.canPlacePeers(REPLACE)
if (cpp !== CONFLICT) {
return cpp
}
}
// ok, can't replace the current with new one, but maybe current is ok?
if (current.satisfies(edge) && (!explicitRequest || preferDedupe)) {
return KEEP
}
// if we prefer deduping, then try replacing newer with older
if (preferDedupe && !tryReplace && dep.canReplace(current)) {
const cpp = this.canPlacePeers(REPLACE)
if (cpp !== CONFLICT) {
return cpp
}
}
// Check for interesting cases!
// First, is this the deepest place that this thing can go, and NOT the
// deepest place where the conflicting dep can go? If so, replace it,
// and let it re-resolve deeper in the tree.
const myDeepest = this.deepestNestingTarget
// ok, i COULD be placed deeper, so leave the current one alone.
if (target !== myDeepest) {
return CONFLICT
}
// if we are not checking a peerDep, then we MUST place it here, in the
// target that has a non-peer dep on it.
if (!edge.peer && target === edge.from) {
return this.canPlacePeers(REPLACE)
}
// if we aren't placing a peer in a set, then we're done here.
// This is ignored because it SHOULD be redundant, as far as I can tell,
// with the deepest target and target===edge.from tests. But until we
// can prove that isn't possible, this condition is here for safety.
/* istanbul ignore if - allegedly impossible */
if (!this.parent && !edge.peer) {
return CONFLICT
}
// check the deps in the peer group for each edge into that peer group
// if ALL of them can be pushed deeper, or if it's ok to replace its
// members with the contents of the new peer group, then we're good.
let canReplace = true
for (const [entryEdge, currentPeers] of peerEntrySets(current)) {
if (entryEdge === this.edge || entryEdge === this.peerEntryEdge) {
continue
}
// First, see if it's ok to just replace the peerSet entirely.
// we do this by walking out from the entryEdge, because in a case like
// this:
//
// v -> PEER(a@1||2)
// a@1 -> PEER(b@1)
// a@2 -> PEER(b@2)
// b@1 -> PEER(a@1)
// b@2 -> PEER(a@2)
//
// root
// +-- v
// +-- a@2
// +-- b@2
//
// Trying to place a peer group of (a@1, b@1) would fail to note that
// they can be replaced, if we did it by looping 1 by 1. If we are
// replacing something, we don't have to check its peer deps, because
// the peerDeps in the placed peerSet will presumably satisfy.
const entryNode = entryEdge.to
const entryRep = dep.parent.children.get(entryNode.name)
if (entryRep) {
if (entryRep.canReplace(entryNode, dep.parent.children.keys())) {
continue
}
}
let canClobber = !entryRep
if (!entryRep) {
const peerReplacementWalk = new Set([entryNode])
OUTER: for (const currentPeer of peerReplacementWalk) {
for (const edge of currentPeer.edgesOut.values()) {
if (!edge.peer || !edge.valid) {
continue
}
const rep = dep.parent.children.get(edge.name)
if (!rep) {
if (edge.to) {
peerReplacementWalk.add(edge.to)
}
continue
}
if (!rep.satisfies(edge)) {
canClobber = false
break OUTER
}
}
}
}
if (canClobber) {
continue
}
// ok, we can't replace, but maybe we can nest the current set deeper?
let canNestCurrent = true
for (const currentPeer of currentPeers) {
if (!canNestCurrent) {
break
}
// still possible to nest this peerSet
const curDeep = deepestNestingTarget(entryEdge.from, currentPeer.name)
if (curDeep === target || target.isDescendantOf(curDeep)) {
canNestCurrent = false
canReplace = false
}
if (canNestCurrent) {
continue
}
}
}
// if we can nest or replace all the current peer groups, we can replace.
if (canReplace) {
return this.canPlacePeers(REPLACE)
}
return CONFLICT
}
checkCanPlaceNoCurrent () {
const { target, peerEntryEdge, dep, name } = this
// check to see what that name resolves to here, and who may depend on
// being able to reach it by crawling up past the parent. we know
// that it's not the target's direct child node, and if it was a direct
// dep of the target, we would have conflicted earlier.
const current = target !== peerEntryEdge.from && target.resolve(name)
if (current) {
for (const edge of current.edgesIn.values()) {
if (edge.from.isDescendantOf(target) && edge.valid) {
if (!dep.satisfies(edge)) {
return CONFLICT
}
}
}
}
// no objections, so this is fine as long as peers are ok here.
return this.canPlacePeers(OK)
}
get deepestNestingTarget () {
const start = this.parent ? this.parent.deepestNestingTarget
: this.edge.from
return deepestNestingTarget(start, this.name)
}
get conflictChildren () {
return this.allChildren.filter(c => c.canPlace === CONFLICT)
}
get allChildren () {
const set = new Set(this.children)
for (const child of set) {
for (const grandchild of child.children) {
set.add(grandchild)
}
}
return [...set]
}
get top () {
return this.parent ? this.parent.top : this
}
// check if peers can go here. returns state or CONFLICT
canPlacePeers (state) {
this.canPlaceSelf = state
if (this._canPlacePeers) {
return this._canPlacePeers
}
// TODO: represent peerPath in ERESOLVE error somehow?
const peerPath = [...this.peerPath, this.dep]
let sawConflict = false
for (const peerEdge of this.dep.edgesOut.values()) {
if (!peerEdge.peer || !peerEdge.to || peerPath.includes(peerEdge.to)) {
continue
}
const peer = peerEdge.to
// it may be the case that the *initial* dep can be nested, but a peer
// of that dep needs to be placed shallower, because the target has
// a peer dep on the peer as well.
const target = deepestNestingTarget(this.target, peer.name)
const cpp = new CanPlaceDep({
dep: peer,
target,
parent: this,
edge: peerEdge,
peerPath,
// always place peers in preferDedupe mode
preferDedupe: true,
})
/* istanbul ignore next */
debug(() => {
if (this.children.some(c => c.dep === cpp.dep)) {
throw new Error('checking same dep repeatedly')
}
})
this.children.push(cpp)
if (cpp.canPlace === CONFLICT) {
sawConflict = true
}
}
this._canPlacePeers = sawConflict ? CONFLICT : state
return this._canPlacePeers
}
// what is the node that is causing this peerSet to be placed?
get peerSetSource () {
return this.parent ? this.parent.peerSetSource : this.edge.from
}
get peerEntryEdge () {
return this.top.edge
}
static get CONFLICT () {
return CONFLICT
}
static get OK () {
return OK
}
static get REPLACE () {
return REPLACE
}
static get KEEP () {
return KEEP
}
get description () {
const { canPlace } = this
return canPlace && canPlace.description ||
/* istanbul ignore next - old node affordance */ canPlace
}
}
module.exports = CanPlaceDep

View File

@@ -0,0 +1,50 @@
// package children are represented with a Map object, but many file systems
// are case-insensitive and unicode-normalizing, so we need to treat
// node.children.get('FOO') and node.children.get('foo') as the same thing.
module.exports = class CIMap extends Map {
#keys = new Map()
constructor (items = []) {
super()
for (const [key, val] of items) {
this.set(key, val)
}
}
#normKey (key) {
if (typeof key !== 'string') {
return key
}
return key.normalize('NFKD').toLowerCase()
}
get (key) {
const normKey = this.#normKey(key)
return this.#keys.has(normKey) ? super.get(this.#keys.get(normKey))
: undefined
}
set (key, val) {
const normKey = this.#normKey(key)
if (this.#keys.has(normKey)) {
super.delete(this.#keys.get(normKey))
}
this.#keys.set(normKey, key)
return super.set(key, val)
}
delete (key) {
const normKey = this.#normKey(key)
if (this.#keys.has(normKey)) {
const prevKey = this.#keys.get(normKey)
this.#keys.delete(normKey)
return super.delete(prevKey)
}
}
has (key) {
const normKey = this.#normKey(key)
return this.#keys.has(normKey) && super.has(this.#keys.get(normKey))
}
}

View File

@@ -0,0 +1,45 @@
// take a path and a resolved value, and turn it into a resolution from
// the given new path. This is used with converting a package.json's
// relative file: path into one suitable for a lockfile, or between
// lockfiles, and for converting hosted git repos to a consistent url type.
const npa = require('npm-package-arg')
const relpath = require('./relpath.js')
const consistentResolve = (resolved, fromPath, toPath, relPaths = false) => {
if (!resolved) {
return null
}
try {
const hostedOpt = { noCommittish: false }
const {
fetchSpec,
saveSpec,
type,
hosted,
rawSpec,
raw,
} = npa(resolved, fromPath)
if (type === 'file' || type === 'directory') {
const cleanFetchSpec = fetchSpec.replace(/#/g, '%23')
if (relPaths && toPath) {
return `file:${relpath(toPath, cleanFetchSpec)}`
}
return `file:${cleanFetchSpec}`
}
if (hosted) {
return `git+${hosted.auth ? hosted.https(hostedOpt) : hosted.sshurl(hostedOpt)}`
}
if (type === 'git') {
return saveSpec
}
if (rawSpec === '*') {
return raw
}
return rawSpec
} catch (_) {
// whatever we passed in was not acceptable to npa.
// leave it 100% untouched.
return resolved
}
}
module.exports = consistentResolve

32
package/node_modules/@npmcli/arborist/lib/debug.js generated vendored Normal file
View File

@@ -0,0 +1,32 @@
// certain assertions we should do only when testing arborist itself, because
// they are too expensive or aggressive and would break user programs if we
// miss a situation where they are actually valid.
//
// call like this:
//
// /* istanbul ignore next - debug check */
// debug(() => {
// if (someExpensiveCheck)
// throw new Error('expensive check should have returned false')
// })
// run in debug mode if explicitly requested, running arborist tests,
// or working in the arborist project directory.
const debug = process.env.ARBORIST_DEBUG !== '0' && (
process.env.ARBORIST_DEBUG === '1' ||
/\barborist\b/.test(process.env.NODE_DEBUG || '') ||
process.env.npm_package_name === '@npmcli/arborist' &&
['test', 'snap'].includes(process.env.npm_lifecycle_event) ||
process.cwd() === require('node:path').resolve(__dirname, '..')
)
module.exports = debug ? fn => fn() : () => {}
const red = process.stderr.isTTY ? msg => `\x1B[31m${msg}\x1B[39m` : m => m
module.exports.log = (...msg) => module.exports(() => {
const { format } = require('node:util')
const prefix = `\n${process.pid} ${red(format(msg.shift()))} `
msg = (prefix + format(...msg).trim().split('\n').join(prefix)).trim()
/* eslint-disable-next-line no-console */
console.error(msg)
})

View File

@@ -0,0 +1,18 @@
// given a starting node, what is the *deepest* target where name could go?
// This is not on the Node class for the simple reason that we sometimes
// need to check the deepest *potential* target for a Node that is not yet
// added to the tree where we are checking.
const deepestNestingTarget = (start, name) => {
for (const target of start.ancestry()) {
// note: this will skip past the first target if edge is peer
if (target.isProjectRoot || !target.resolveParent || target.globalTop) {
return target
}
const targetEdge = target.edgesOut.get(name)
if (!targetEdge || !targetEdge.peer) {
return target
}
}
}
module.exports = deepestNestingTarget

150
package/node_modules/@npmcli/arborist/lib/dep-valid.js generated vendored Normal file
View File

@@ -0,0 +1,150 @@
// Do not rely on package._fields, so that we don't throw
// false failures if a tree is generated by other clients.
// Only relies on child.resolved, which MAY come from
// client-specific package.json meta _fields, but most of
// the time will be pulled out of a lockfile
const semver = require('semver')
const npa = require('npm-package-arg')
const { relative } = require('node:path')
const fromPath = require('./from-path.js')
const depValid = (child, requested, requestor) => {
// NB: we don't do much to verify 'tag' type requests.
// Just verify that we got a remote resolution. Presumably, it
// came from a registry and was tagged at some point.
if (typeof requested === 'string') {
try {
// tarball/dir must have resolved to the same tgz on disk, but for
// file: deps that depend on other files/dirs, we must resolve the
// location based on the *requestor* file/dir, not where it ends up.
// '' is equivalent to '*'
requested = npa.resolve(child.name, requested || '*', fromPath(requestor, requestor.edgesOut.get(child.name)))
} catch (er) {
// Not invalid because the child doesn't match, but because
// the spec itself is not supported. Nothing would match,
// so the edge is definitely not valid and never can be.
er.dependency = child.name
er.requested = requested
requestor.errors.push(er)
return false
}
}
// if the lockfile is super old, or hand-modified,
// then it's possible to hit this state.
if (!requested) {
const er = new Error('Invalid dependency specifier')
er.dependency = child.name
er.requested = requested
requestor.errors.push(er)
return false
}
switch (requested.type) {
case 'range':
if (requested.fetchSpec === '*') {
return true
}
// fallthrough
case 'version':
// if it's a version or a range other than '*', semver it
return semver.satisfies(child.version, requested.fetchSpec, true)
case 'directory':
return linkValid(child, requested, requestor)
case 'file':
return tarballValid(child, requested, requestor)
case 'alias':
// check that the alias target is valid
return depValid(child, requested.subSpec, requestor)
case 'tag':
// if it's a tag, we just verify that it has a tarball resolution
// presumably, it came from the registry and was tagged at some point
return child.resolved && npa(child.resolved).type === 'remote'
case 'remote':
// verify that we got it from the desired location
return child.resolved === requested.fetchSpec
case 'git': {
// if it's a git type, verify that they're the same repo
//
// if it specifies a definite commit, then it must have the
// same commit to be considered the same repo
//
// if it has a #semver:<range> specifier, verify that the
// version in the package is in the semver range
const resRepo = npa(child.resolved || '')
const resHost = resRepo.hosted
const reqHost = requested.hosted
const reqCommit = /^[a-fA-F0-9]{40}$/.test(requested.gitCommittish || '')
const nc = { noCommittish: !reqCommit }
if (!resHost) {
if (resRepo.fetchSpec !== requested.fetchSpec) {
return false
}
} else {
if (reqHost?.ssh(nc) !== resHost.ssh(nc)) {
return false
}
}
if (!requested.gitRange) {
return true
}
return semver.satisfies(child.package.version, requested.gitRange, {
loose: true,
})
}
default: // unpossible, just being cautious
break
}
const er = new Error('Unsupported dependency type')
er.dependency = child.name
er.requested = requested
requestor.errors.push(er)
return false
}
const linkValid = (child, requested, requestor) => {
const isLink = !!child.isLink
// if we're installing links and the node is a link, then it's invalid because we want
// a real node to be there. Except for workspaces. They are always links.
if (requestor.installLinks && !child.isWorkspace) {
return !isLink
}
// directory must be a link to the specified folder
return isLink && relative(child.realpath, requested.fetchSpec) === ''
}
const tarballValid = (child, requested) => {
if (child.isLink) {
return false
}
if (child.resolved) {
return child.resolved.replace(/\\/g, '/') === `file:${requested.fetchSpec.replace(/\\/g, '/')}`
}
// if we have a legacy mutated package.json file. we can't be 100%
// sure that it resolved to the same file, but if it was the same
// request, that's a pretty good indicator of sameness.
if (child.package._requested) {
return child.package._requested.saveSpec === requested.saveSpec
}
// ok, we're probably dealing with some legacy cruft here, not much
// we can do at this point unfortunately.
return false
}
module.exports = (child, requested, accept, requestor) =>
depValid(child, requested, requestor) ||
(typeof accept === 'string' ? depValid(child, accept, requestor) : false)

306
package/node_modules/@npmcli/arborist/lib/diff.js generated vendored Normal file
View File

@@ -0,0 +1,306 @@
// a tree representing the difference between two trees
// A Diff node's parent is not necessarily the parent of
// the node location it refers to, but rather the highest level
// node that needs to be either changed or removed.
// Thus, the root Diff node is the shallowest change required
// for a given branch of the tree being mutated.
const { depth } = require('treeverse')
const { existsSync } = require('node:fs')
const ssri = require('ssri')
class Diff {
constructor ({ actual, ideal, filterSet, shrinkwrapInflated }) {
this.filterSet = filterSet
this.shrinkwrapInflated = shrinkwrapInflated
this.children = []
this.actual = actual
this.ideal = ideal
if (this.ideal) {
this.resolved = this.ideal.resolved
this.integrity = this.ideal.integrity
}
this.action = getAction(this)
this.parent = null
// the set of leaf nodes that we rake up to the top level
this.leaves = []
// the set of nodes that don't change in this branch of the tree
this.unchanged = []
// the set of nodes that will be removed in this branch of the tree
this.removed = []
}
static calculate ({
actual,
ideal,
filterNodes = [],
shrinkwrapInflated = new Set(),
}) {
// if there's a filterNode, then:
// - get the path from the root to the filterNode. The root or
// root.target should have an edge either to the filterNode or
// a link to the filterNode. If not, abort. Add the path to the
// filterSet.
// - Add set of Nodes depended on by the filterNode to filterSet.
// - Anything outside of that set should be ignored by getChildren
const filterSet = new Set()
const extraneous = new Set()
for (const filterNode of filterNodes) {
const { root } = filterNode
if (root !== ideal && root !== actual) {
throw new Error('invalid filterNode: outside idealTree/actualTree')
}
const rootTarget = root.target
const edge = [...rootTarget.edgesOut.values()].filter(e => {
return e.to && (e.to === filterNode || e.to.target === filterNode)
})[0]
filterSet.add(root)
filterSet.add(rootTarget)
filterSet.add(ideal)
filterSet.add(actual)
if (edge && edge.to) {
filterSet.add(edge.to)
filterSet.add(edge.to.target)
}
filterSet.add(filterNode)
depth({
tree: filterNode,
visit: node => filterSet.add(node),
getChildren: node => {
node = node.target
const loc = node.location
const idealNode = ideal.inventory.get(loc)
const ideals = !idealNode ? []
: [...idealNode.edgesOut.values()].filter(e => e.to).map(e => e.to)
const actualNode = actual.inventory.get(loc)
const actuals = !actualNode ? []
: [...actualNode.edgesOut.values()].filter(e => e.to).map(e => e.to)
if (actualNode) {
for (const child of actualNode.children.values()) {
if (child.extraneous) {
extraneous.add(child)
}
}
}
return ideals.concat(actuals)
},
})
}
for (const extra of extraneous) {
filterSet.add(extra)
}
return depth({
tree: new Diff({ actual, ideal, filterSet, shrinkwrapInflated }),
getChildren,
leave,
})
}
}
const getAction = ({ actual, ideal }) => {
if (!ideal) {
return 'REMOVE'
}
// bundled meta-deps are copied over to the ideal tree when we visit it,
// so they'll appear to be missing here. There's no need to handle them
// in the diff, though, because they'll be replaced at reify time anyway
// Otherwise, add the missing node.
if (!actual) {
return ideal.inDepBundle ? null : 'ADD'
}
// always ignore the root node
if (ideal.isRoot && actual.isRoot) {
return null
}
// if the versions don't match, it's a change no matter what
if (ideal.version !== actual.version) {
return 'CHANGE'
}
const binsExist = ideal.binPaths.every((path) => existsSync(path))
// top nodes, links, and git deps won't have integrity, but do have resolved
// if neither node has integrity, the bins exist, and either (a) neither
// node has a resolved value or (b) they both do and match, then we can
// leave this one alone since we already know the versions match due to
// the condition above. The "neither has resolved" case (a) cannot be
// treated as a 'mark CHANGE and refetch', because shrinkwraps, bundles,
// and link deps may lack this information, and we don't want to try to
// go to the registry for something that isn't there.
const noIntegrity = !ideal.integrity && !actual.integrity
const noResolved = !ideal.resolved && !actual.resolved
const resolvedMatch = ideal.resolved && ideal.resolved === actual.resolved
if (noIntegrity && binsExist && (resolvedMatch || noResolved)) {
return null
}
// otherwise, verify that it's the same bits
// note that if ideal has integrity, and resolved doesn't, we treat
// that as a 'change', so that it gets re-fetched and locked down.
const integrityMismatch = !ideal.integrity || !actual.integrity ||
!ssri.parse(ideal.integrity).match(actual.integrity)
if (integrityMismatch || !binsExist) {
return 'CHANGE'
}
return null
}
const allChildren = node => {
if (!node) {
return new Map()
}
// if the node is root, and also a link, then what we really
// want is to traverse the target's children
if (node.isRoot && node.isLink) {
return allChildren(node.target)
}
const kids = new Map()
for (const n of [node, ...node.fsChildren]) {
for (const kid of n.children.values()) {
kids.set(kid.path, kid)
}
}
return kids
}
// functions for the walk options when we traverse the trees
// to create the diff tree
const getChildren = diff => {
const children = []
const {
actual,
ideal,
unchanged,
removed,
filterSet,
shrinkwrapInflated,
} = diff
// Note: we DON'T diff fsChildren themselves, because they are either
// included in the package contents, or part of some other project, and
// will never appear in legacy shrinkwraps anyway. but we _do_ include the
// child nodes of fsChildren, because those are nodes that we are typically
// responsible for installing.
const actualKids = allChildren(actual)
const idealKids = allChildren(ideal)
if (ideal && ideal.hasShrinkwrap && !shrinkwrapInflated.has(ideal)) {
// Guaranteed to get a diff.leaves here, because we always
// be called with a proper Diff object when ideal has a shrinkwrap
// that has not been inflated.
diff.leaves.push(diff)
return children
}
const paths = new Set([...actualKids.keys(), ...idealKids.keys()])
for (const path of paths) {
const actual = actualKids.get(path)
const ideal = idealKids.get(path)
diffNode({
actual,
ideal,
children,
unchanged,
removed,
filterSet,
shrinkwrapInflated,
})
}
if (diff.leaves && !children.length) {
diff.leaves.push(diff)
}
return children
}
const diffNode = ({
actual,
ideal,
children,
unchanged,
removed,
filterSet,
shrinkwrapInflated,
}) => {
if (filterSet.size && !(filterSet.has(ideal) || filterSet.has(actual))) {
return
}
const action = getAction({ actual, ideal })
// if it's a match, then get its children
// otherwise, this is the child diff node
if (action || (!shrinkwrapInflated.has(ideal) && ideal.hasShrinkwrap)) {
if (action === 'REMOVE') {
removed.push(actual)
}
children.push(new Diff({ actual, ideal, filterSet, shrinkwrapInflated }))
} else {
unchanged.push(ideal)
// !*! Weird dirty hack warning !*!
//
// Bundled deps aren't loaded in the ideal tree, because we don't know
// what they are going to be without unpacking. Swap them over now if
// the bundling node isn't changing, so we don't prune them later.
//
// It's a little bit dirty to be doing this here, since it means that
// diffing trees can mutate them, but otherwise we have to walk over
// all unchanging bundlers and correct the diff later, so it's more
// efficient to just fix it while we're passing through already.
//
// Note that moving over a bundled dep will break the links to other
// deps under this parent, which may have been transitively bundled.
// Breaking those links means that we'll no longer see the transitive
// dependency, meaning that it won't appear as bundled any longer!
// In order to not end up dropping transitively bundled deps, we have
// to get the list of nodes to move, then move them all at once, rather
// than moving them one at a time in the first loop.
const bd = ideal.package.bundleDependencies
if (actual && bd && bd.length) {
const bundledChildren = []
for (const node of actual.children.values()) {
if (node.inBundle) {
bundledChildren.push(node)
}
}
for (const node of bundledChildren) {
node.parent = ideal
}
}
children.push(...getChildren({
actual,
ideal,
unchanged,
removed,
filterSet,
shrinkwrapInflated,
}))
}
}
// set the parentage in the leave step so that we aren't attaching
// child nodes only to remove them later. also bubble up the unchanged
// nodes so that we can move them out of staging in the reification step.
const leave = (diff, children) => {
children.forEach(kid => {
kid.parent = diff
diff.leaves.push(...kid.leaves)
diff.unchanged.push(...kid.unchanged)
diff.removed.push(...kid.removed)
})
diff.children = children
return diff
}
module.exports = Diff

301
package/node_modules/@npmcli/arborist/lib/edge.js generated vendored Normal file
View File

@@ -0,0 +1,301 @@
// An edge in the dependency graph
// Represents a dependency relationship of some kind
const util = require('node:util')
const npa = require('npm-package-arg')
const depValid = require('./dep-valid.js')
class ArboristEdge {
constructor (edge) {
this.name = edge.name
this.spec = edge.spec
this.type = edge.type
const edgeFrom = edge.from?.location
const edgeTo = edge.to?.location
const override = edge.overrides?.value
if (edgeFrom != null) {
this.from = edgeFrom
}
if (edgeTo) {
this.to = edgeTo
}
if (edge.error) {
this.error = edge.error
}
if (edge.peerConflicted) {
this.peerConflicted = true
}
if (override) {
this.overridden = override
}
}
}
class Edge {
#accept
#error
#explanation
#from
#name
#spec
#to
#type
static types = Object.freeze([
'prod',
'dev',
'optional',
'peer',
'peerOptional',
'workspace',
])
// XXX where is this used?
static errors = Object.freeze([
'DETACHED',
'MISSING',
'PEER LOCAL',
'INVALID',
])
constructor (options) {
const { type, name, spec, accept, from, overrides } = options
// XXX are all of these error states even possible?
if (typeof spec !== 'string') {
throw new TypeError('must provide string spec')
}
if (!Edge.types.includes(type)) {
throw new TypeError(`invalid type: ${type}\n(valid types are: ${Edge.types.join(', ')})`)
}
if (type === 'workspace' && npa(spec).type !== 'directory') {
throw new TypeError('workspace edges must be a symlink')
}
if (typeof name !== 'string') {
throw new TypeError('must provide dependency name')
}
if (!from) {
throw new TypeError('must provide "from" node')
}
if (accept !== undefined) {
if (typeof accept !== 'string') {
throw new TypeError('accept field must be a string if provided')
}
this.#accept = accept || '*'
}
if (overrides !== undefined) {
this.overrides = overrides
}
this.#name = name
this.#type = type
this.#spec = spec
this.#explanation = null
this.#from = from
from.edgesOut.get(this.#name)?.detach()
from.addEdgeOut(this)
this.reload(true)
this.peerConflicted = false
}
satisfiedBy (node) {
if (node.name !== this.#name) {
return false
}
// NOTE: this condition means we explicitly do not support overriding
// bundled or shrinkwrapped dependencies
if (node.hasShrinkwrap || node.inShrinkwrap || node.inBundle) {
return depValid(node, this.rawSpec, this.#accept, this.#from)
}
return depValid(node, this.spec, this.#accept, this.#from)
}
// return the edge data, and an explanation of how that edge came to be here
explain (seen = []) {
if (!this.#explanation) {
const explanation = {
type: this.#type,
name: this.#name,
spec: this.spec,
}
if (this.rawSpec !== this.spec) {
explanation.rawSpec = this.rawSpec
explanation.overridden = true
}
if (this.bundled) {
explanation.bundled = this.bundled
}
if (this.error) {
explanation.error = this.error
}
if (this.#from) {
explanation.from = this.#from.explain(null, seen)
}
this.#explanation = explanation
}
return this.#explanation
}
get bundled () {
return !!this.#from?.package?.bundleDependencies?.includes(this.#name)
}
get workspace () {
return this.#type === 'workspace'
}
get prod () {
return this.#type === 'prod'
}
get dev () {
return this.#type === 'dev'
}
get optional () {
return this.#type === 'optional' || this.#type === 'peerOptional'
}
get peer () {
return this.#type === 'peer' || this.#type === 'peerOptional'
}
get type () {
return this.#type
}
get name () {
return this.#name
}
get rawSpec () {
return this.#spec
}
get spec () {
if (this.overrides?.value && this.overrides.value !== '*' && this.overrides.name === this.#name) {
if (this.overrides.value.startsWith('$')) {
const ref = this.overrides.value.slice(1)
// we may be a virtual root, if we are we want to resolve reference overrides
// from the real root, not the virtual one
const pkg = this.#from.sourceReference
? this.#from.sourceReference.root.package
: this.#from.root.package
if (pkg.devDependencies?.[ref]) {
return pkg.devDependencies[ref]
}
if (pkg.optionalDependencies?.[ref]) {
return pkg.optionalDependencies[ref]
}
if (pkg.dependencies?.[ref]) {
return pkg.dependencies[ref]
}
if (pkg.peerDependencies?.[ref]) {
return pkg.peerDependencies[ref]
}
throw new Error(`Unable to resolve reference ${this.overrides.value}`)
}
return this.overrides.value
}
return this.#spec
}
get accept () {
return this.#accept
}
get valid () {
return !this.error
}
get missing () {
return this.error === 'MISSING'
}
get invalid () {
return this.error === 'INVALID'
}
get peerLocal () {
return this.error === 'PEER LOCAL'
}
get error () {
if (!this.#error) {
if (!this.#to) {
if (this.optional) {
this.#error = null
} else {
this.#error = 'MISSING'
}
} else if (this.peer && this.#from === this.#to.parent && !this.#from.isTop) {
this.#error = 'PEER LOCAL'
} else if (!this.satisfiedBy(this.#to)) {
this.#error = 'INVALID'
} else {
this.#error = 'OK'
}
}
if (this.#error === 'OK') {
return null
}
return this.#error
}
reload (hard = false) {
this.#explanation = null
if (this.#from.overrides) {
this.overrides = this.#from.overrides.getEdgeRule(this)
} else {
delete this.overrides
}
const newTo = this.#from.resolve(this.#name)
if (newTo !== this.#to) {
if (this.#to) {
this.#to.edgesIn.delete(this)
}
this.#to = newTo
this.#error = null
if (this.#to) {
this.#to.addEdgeIn(this)
}
} else if (hard) {
this.#error = null
}
}
detach () {
this.#explanation = null
if (this.#to) {
this.#to.edgesIn.delete(this)
}
this.#from.edgesOut.delete(this.#name)
this.#to = null
this.#error = 'DETACHED'
this.#from = null
}
get from () {
return this.#from
}
get to () {
return this.#to
}
toJSON () {
return new ArboristEdge(this)
}
[util.inspect.custom] () {
return this.toJSON()
}
}
module.exports = Edge

30
package/node_modules/@npmcli/arborist/lib/from-path.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
// file dependencies need their dependencies resolved based on the location
// where the tarball was found, not the location where they end up getting
// installed. directory (ie, symlink) deps also need to be resolved based on
// their targets, but that's what realpath is
const { dirname } = require('node:path')
const npa = require('npm-package-arg')
const fromPath = (node, edge) => {
if (edge && edge.overrides && edge.overrides.name === edge.name && edge.overrides.value) {
// fromPath could be called with a node that has a virtual root, if that
// happens we want to make sure we get the real root node when overrides
// are in use. this is to allow things like overriding a dependency with a
// tarball file that's a relative path from the project root
if (node.sourceReference) {
return node.sourceReference.root.realpath
}
return node.root.realpath
}
if (node.resolved) {
const spec = npa(node.resolved)
if (spec?.type === 'file') {
return dirname(spec.fetchSpec)
}
}
return node.realpath
}
module.exports = fromPath

View File

@@ -0,0 +1,43 @@
// Given a set of nodes in a tree, and a filter function to test
// incoming edges to the dep set that should be ignored otherwise.
//
// find the set of deps that are only depended upon by nodes in the set, or
// their dependencies, or edges that are ignored.
//
// Used when figuring out what to prune when replacing a node with a newer
// version, or when an optional dep fails to install.
const gatherDepSet = (set, edgeFilter) => {
const deps = new Set(set)
// add the full set of dependencies. note that this loop will continue
// as the deps set increases in size.
for (const node of deps) {
for (const edge of node.edgesOut.values()) {
if (edge.to && edgeFilter(edge)) {
deps.add(edge.to)
}
}
}
// now remove all nodes in the set that have a dependant outside the set
// if any change is made, then re-check
// continue until no changes made, or deps set evaporates fully.
let changed = true
while (changed === true && deps.size > 0) {
changed = false
for (const dep of deps) {
for (const edge of dep.edgesIn) {
if (!deps.has(edge.from) && edgeFilter(edge)) {
changed = true
deps.delete(dep)
break
}
}
}
}
return deps
}
module.exports = gatherDepSet

6
package/node_modules/@npmcli/arborist/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,6 @@
module.exports = require('./arborist/index.js')
module.exports.Arborist = module.exports
module.exports.Node = require('./node.js')
module.exports.Link = require('./link.js')
module.exports.Edge = require('./edge.js')
module.exports.Shrinkwrap = require('./shrinkwrap.js')

138
package/node_modules/@npmcli/arborist/lib/inventory.js generated vendored Normal file
View File

@@ -0,0 +1,138 @@
// a class to manage an inventory and set of indexes of a set of objects based
// on specific fields.
const { hasOwnProperty } = Object.prototype
const debug = require('./debug.js')
const keys = ['name', 'license', 'funding', 'realpath', 'packageName']
class Inventory extends Map {
#index
constructor () {
super()
this.#index = new Map()
for (const key of keys) {
this.#index.set(key, new Map())
}
}
// XXX where is this used?
get primaryKey () {
return 'location'
}
// XXX where is this used?
get indexes () {
return [...keys]
}
* filter (fn) {
for (const node of this.values()) {
if (fn(node)) {
yield node
}
}
}
add (node) {
const root = super.get('')
if (root && node.root !== root && node.root !== root.root) {
debug(() => {
throw Object.assign(new Error('adding external node to inventory'), {
root: root.path,
node: node.path,
nodeRoot: node.root.path,
})
})
return
}
const current = super.get(node.location)
if (current) {
if (current === node) {
return
}
this.delete(current)
}
super.set(node.location, node)
for (const [key, map] of this.#index.entries()) {
let val
if (hasOwnProperty.call(node, key)) {
// if the node has the value, use it even if it's false
val = node[key]
} else if (key === 'license' && node.package) {
// handling for the outdated "licenses" array, just pick the first one
// also support the alternative spelling "licence"
if (node.package.license) {
val = node.package.license
} else if (node.package.licence) {
val = node.package.licence
} else if (Array.isArray(node.package.licenses)) {
val = node.package.licenses[0]
} else if (Array.isArray(node.package.licences)) {
val = node.package.licences[0]
}
} else if (node[key]) {
val = node[key]
} else {
val = node.package?.[key]
}
if (val && typeof val === 'object') {
// We currently only use license and funding
/* istanbul ignore next - not used */
if (key === 'license') {
val = val.type
} else if (key === 'funding') {
val = val.url
}
}
if (!map.has(val)) {
map.set(val, new Set())
}
map.get(val).add(node)
}
}
delete (node) {
if (!this.has(node)) {
return
}
super.delete(node.location)
for (const [key, map] of this.#index.entries()) {
let val
if (node[key] !== undefined) {
val = node[key]
} else {
val = node.package?.[key]
}
const set = map.get(val)
if (set) {
set.delete(node)
if (set.size === 0) {
map.delete(node[key])
}
}
}
}
query (key, val) {
const map = this.#index.get(key)
if (arguments.length === 2) {
if (map.has(val)) {
return map.get(val)
}
return new Set()
}
return map.keys()
}
has (node) {
return super.get(node.location) === node
}
set () {
throw new Error('direct set() not supported, use inventory.add(node)')
}
}
module.exports = Inventory

126
package/node_modules/@npmcli/arborist/lib/link.js generated vendored Normal file
View File

@@ -0,0 +1,126 @@
const relpath = require('./relpath.js')
const Node = require('./node.js')
const _loadDeps = Symbol.for('Arborist.Node._loadDeps')
const _target = Symbol.for('_target')
const { dirname } = require('node:path')
// defined by Node class
const _delistFromMeta = Symbol.for('_delistFromMeta')
const _refreshLocation = Symbol.for('_refreshLocation')
class Link extends Node {
constructor (options) {
const { root, realpath, target, parent, fsParent, isStoreLink } = options
if (!realpath && !(target && target.path)) {
throw new TypeError('must provide realpath for Link node')
}
super({
...options,
realpath: realpath || target.path,
root: root || (parent ? parent.root
: fsParent ? fsParent.root
: target ? target.root
: null),
})
this.isStoreLink = isStoreLink || false
if (target) {
this.target = target
} else if (this.realpath === this.root.path) {
this.target = this.root
} else {
this.target = new Node({
...options,
path: realpath,
parent: null,
fsParent: null,
root: this.root,
})
}
}
get version () {
return this.target ? this.target.version : this.package.version || ''
}
get target () {
return this[_target]
}
set target (target) {
const current = this[_target]
if (target === current) {
return
}
if (!target) {
if (current && current.linksIn) {
current.linksIn.delete(this)
}
if (this.path) {
this[_delistFromMeta]()
this[_target] = null
this.package = {}
this[_refreshLocation]()
} else {
this[_target] = null
}
return
}
if (!this.path) {
// temp node pending assignment to a tree
// we know it's not in the inventory yet, because no path.
if (target.path) {
this.realpath = target.path
} else {
target.path = target.realpath = this.realpath
}
target.root = this.root
this[_target] = target
target.linksIn.add(this)
this.package = target.package
return
}
// have to refresh metadata, because either realpath or package
// is very likely changing.
this[_delistFromMeta]()
this.package = target.package
this.realpath = target.path
this[_refreshLocation]()
target.root = this.root
}
// a link always resolves to the relative path to its target
get resolved () {
// the path/realpath guard is there for the benefit of setting
// these things in the "wrong" order
return this.path && this.realpath
? `file:${relpath(dirname(this.path), this.realpath).replace(/#/g, '%23')}`
: null
}
set resolved (r) {}
// deps are resolved on the target, not the Link
// so this is a no-op
[_loadDeps] () {}
// links can't have children, only their targets can
// fix it to an empty list so that we can still call
// things that iterate over them, just as a no-op
get children () {
return new Map()
}
set children (c) {}
get isLink () {
return true
}
}
module.exports = Link

1475
package/node_modules/@npmcli/arborist/lib/node.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,38 @@
// when an optional dep fails to install, we need to remove the branch of the
// graph up to the first optionalDependencies, as well as any nodes that are
// only required by other nodes in the set.
//
// This function finds the set of nodes that will need to be removed in that
// case.
//
// Note that this is *only* going to work with trees where calcDepFlags
// has been called, because we rely on the node.optional flag.
const gatherDepSet = require('./gather-dep-set.js')
const optionalSet = node => {
if (!node.optional) {
return new Set()
}
// start with the node, then walk up the dependency graph until we
// get to the boundaries that define the optional set. since the
// node is optional, we know that all paths INTO this area of the
// graph are optional, but there may be non-optional dependencies
// WITHIN the area.
const set = new Set([node])
for (const node of set) {
for (const edge of node.edgesIn) {
if (!edge.optional) {
set.add(edge.from)
}
}
}
// now that we've hit the boundary, gather the rest of the nodes in
// the optional section. that's the set of dependencies that are only
// depended upon by other nodes within the set, or optional dependencies
// from outside the set.
return gatherDepSet(set, edge => !edge.optional)
}
module.exports = optionalSet

View File

@@ -0,0 +1,11 @@
function overrideResolves (resolved, opts) {
const { omitLockfileRegistryResolved = false } = opts
if (omitLockfileRegistryResolved) {
return undefined
}
return resolved
}
module.exports = { overrideResolves }

View File

@@ -0,0 +1,147 @@
const npa = require('npm-package-arg')
const semver = require('semver')
class OverrideSet {
constructor ({ overrides, key, parent }) {
this.parent = parent
this.children = new Map()
if (typeof overrides === 'string') {
overrides = { '.': overrides }
}
// change a literal empty string to * so we can use truthiness checks on
// the value property later
if (overrides['.'] === '') {
overrides['.'] = '*'
}
if (parent) {
const spec = npa(key)
if (!spec.name) {
throw new Error(`Override without name: ${key}`)
}
this.name = spec.name
spec.name = ''
this.key = key
this.keySpec = spec.toString()
this.value = overrides['.'] || this.keySpec
}
for (const [key, childOverrides] of Object.entries(overrides)) {
if (key === '.') {
continue
}
const child = new OverrideSet({
parent: this,
key,
overrides: childOverrides,
})
this.children.set(child.key, child)
}
}
getEdgeRule (edge) {
for (const rule of this.ruleset.values()) {
if (rule.name !== edge.name) {
continue
}
// if keySpec is * we found our override
if (rule.keySpec === '*') {
return rule
}
let spec = npa(`${edge.name}@${edge.spec}`)
if (spec.type === 'alias') {
spec = spec.subSpec
}
if (spec.type === 'git') {
if (spec.gitRange && semver.intersects(spec.gitRange, rule.keySpec)) {
return rule
}
continue
}
if (spec.type === 'range' || spec.type === 'version') {
if (semver.intersects(spec.fetchSpec, rule.keySpec)) {
return rule
}
continue
}
// if we got this far, the spec type is one of tag, directory or file
// which means we have no real way to make version comparisons, so we
// just accept the override
return rule
}
return this
}
getNodeRule (node) {
for (const rule of this.ruleset.values()) {
if (rule.name !== node.name) {
continue
}
if (semver.satisfies(node.version, rule.keySpec) ||
semver.satisfies(node.version, rule.value)) {
return rule
}
}
return this
}
getMatchingRule (node) {
for (const rule of this.ruleset.values()) {
if (rule.name !== node.name) {
continue
}
if (semver.satisfies(node.version, rule.keySpec) ||
semver.satisfies(node.version, rule.value)) {
return rule
}
}
return null
}
* ancestry () {
for (let ancestor = this; ancestor; ancestor = ancestor.parent) {
yield ancestor
}
}
get isRoot () {
return !this.parent
}
get ruleset () {
const ruleset = new Map()
for (const override of this.ancestry()) {
for (const kid of override.children.values()) {
if (!ruleset.has(kid.key)) {
ruleset.set(kid.key, kid)
}
}
if (!override.isRoot && !ruleset.has(override.key)) {
ruleset.set(override.key, override)
}
}
return ruleset
}
}
module.exports = OverrideSet

View File

@@ -0,0 +1,77 @@
const { LRUCache } = require('lru-cache')
const { getHeapStatistics } = require('node:v8')
const { log } = require('proc-log')
// This is an in-memory cache that Pacote uses for packuments.
// Packuments are usually cached on disk. This allows for rapid re-requests
// of the same packument to bypass disk reads. The tradeoff here is memory
// usage for disk reads.
class PackumentCache extends LRUCache {
static #heapLimit = Math.floor(getHeapStatistics().heap_size_limit)
#sizeKey
#disposed = new Set()
#log (...args) {
log.silly('packumentCache', ...args)
}
constructor ({
// How much of this.#heapLimit to take up
heapFactor = 0.25,
// How much of this.#maxSize we allow any one packument to take up
// Anything over this is not cached
maxEntryFactor = 0.5,
sizeKey = '_contentLength',
} = {}) {
const maxSize = Math.floor(PackumentCache.#heapLimit * heapFactor)
const maxEntrySize = Math.floor(maxSize * maxEntryFactor)
super({
maxSize,
maxEntrySize,
sizeCalculation: (p) => {
// Don't cache if we dont know the size
// Some versions of pacote set this to `0`, newer versions set it to `null`
if (!p[sizeKey]) {
return maxEntrySize + 1
}
if (p[sizeKey] < 10_000) {
return p[sizeKey] * 2
}
if (p[sizeKey] < 1_000_000) {
return Math.floor(p[sizeKey] * 1.5)
}
// It is less beneficial to store a small amount of super large things
// at the cost of all other packuments.
return maxEntrySize + 1
},
dispose: (v, k) => {
this.#disposed.add(k)
this.#log(k, 'dispose')
},
})
this.#sizeKey = sizeKey
this.#log(`heap:${PackumentCache.#heapLimit} maxSize:${maxSize} maxEntrySize:${maxEntrySize}`)
}
set (k, v, ...args) {
// we use disposed only for a logging signal if we are setting packuments that
// have already been evicted from the cache previously. logging here could help
// us tune this in the future.
const disposed = this.#disposed.has(k)
/* istanbul ignore next - this doesnt happen consistently so hard to test without resorting to unit tests */
if (disposed) {
this.#disposed.delete(k)
}
this.#log(k, 'set', `size:${v[this.#sizeKey]} disposed:${disposed}`)
return super.set(k, v, ...args)
}
has (k, ...args) {
const has = super.has(k, ...args)
this.#log(k, `cache-${has ? 'hit' : 'miss'}`)
return has
}
}
module.exports = PackumentCache

View File

@@ -0,0 +1,77 @@
// Given a node in a tree, return all of the peer dependency sets that
// it is a part of, with the entry (top or non-peer) edges into the sets
// identified.
//
// With this information, we can determine whether it is appropriate to
// replace the entire peer set with another (and remove the old one),
// push the set deeper into the tree, and so on.
//
// Returns a Map of { edge => Set(peerNodes) },
const peerEntrySets = node => {
// this is the union of all peer groups that the node is a part of
// later, we identify all of the entry edges, and create a set of
// 1 or more overlapping sets that this node is a part of.
const unionSet = new Set([node])
for (const node of unionSet) {
for (const edge of node.edgesOut.values()) {
if (edge.valid && edge.peer && edge.to) {
unionSet.add(edge.to)
}
}
for (const edge of node.edgesIn) {
if (edge.valid && edge.peer) {
unionSet.add(edge.from)
}
}
}
const entrySets = new Map()
for (const peer of unionSet) {
for (const edge of peer.edgesIn) {
// if not valid, it doesn't matter anyway. either it's been previously
// peerConflicted, or it's the thing we're interested in replacing.
if (!edge.valid) {
continue
}
// this is the entry point into the peer set
if (!edge.peer || edge.from.isTop) {
// get the subset of peer brought in by this peer entry edge
const sub = new Set([peer])
for (const peer of sub) {
for (const edge of peer.edgesOut.values()) {
if (edge.valid && edge.peer && edge.to) {
sub.add(edge.to)
}
}
}
// if this subset does not include the node we are focused on,
// then it is not relevant for our purposes. Example:
//
// a -> (b, c, d)
// b -> PEER(d) b -> d -> e -> f <-> g
// c -> PEER(f, h) c -> (f <-> g, h -> g)
// d -> PEER(e) d -> e -> f <-> g
// e -> PEER(f)
// f -> PEER(g)
// g -> PEER(f)
// h -> PEER(g)
//
// The unionSet(e) will include c, but we don't actually care about
// it. We only expanded to the edge of the peer nodes in order to
// find the entry edges that caused the inclusion of peer sets
// including (e), so we want:
// Map{
// Edge(a->b) => Set(b, d, e, f, g)
// Edge(a->d) => Set(d, e, f, g)
// }
if (sub.has(node)) {
entrySets.set(edge, sub)
}
}
}
}
return entrySets
}
module.exports = peerEntrySets

569
package/node_modules/@npmcli/arborist/lib/place-dep.js generated vendored Normal file
View File

@@ -0,0 +1,569 @@
// Given a dep, a node that depends on it, and the edge representing that
// dependency, place the dep somewhere in the node's tree, and all of its
// peer dependencies.
//
// Handles all of the tree updating needed to place the dep, including
// removing replaced nodes, pruning now-extraneous or invalidated nodes,
// and saves a set of what was placed and what needs re-evaluation as
// a result.
const localeCompare = require('@isaacs/string-locale-compare')('en')
const { log } = require('proc-log')
const { redact } = require('@npmcli/redact')
const deepestNestingTarget = require('./deepest-nesting-target.js')
const CanPlaceDep = require('./can-place-dep.js')
const {
KEEP,
CONFLICT,
} = CanPlaceDep
const debug = require('./debug.js')
const Link = require('./link.js')
const gatherDepSet = require('./gather-dep-set.js')
const peerEntrySets = require('./peer-entry-sets.js')
class PlaceDep {
constructor (options) {
this.auditReport = options.auditReport
this.dep = options.dep
this.edge = options.edge
this.explicitRequest = options.explicitRequest
this.force = options.force
this.installLinks = options.installLinks
this.installStrategy = options.installStrategy
this.legacyPeerDeps = options.legacyPeerDeps
this.parent = options.parent || null
this.preferDedupe = options.preferDedupe
this.strictPeerDeps = options.strictPeerDeps
this.updateNames = options.updateNames
this.canPlace = null
this.canPlaceSelf = null
// XXX this only appears to be used by tests
this.checks = new Map()
this.children = []
this.needEvaluation = new Set()
this.peerConflict = null
this.placed = null
this.target = null
this.current = this.edge.to
this.name = this.edge.name
this.top = this.parent?.top || this
// nothing to do if the edge is fine as it is
if (this.edge.to &&
!this.edge.error &&
!this.explicitRequest &&
!this.updateNames.includes(this.edge.name) &&
!this.auditReport?.isVulnerable(this.edge.to)) {
return
}
// walk up the tree until we hit either a top/root node, or a place
// where the dep is not a peer dep.
const start = this.getStartNode()
for (const target of start.ancestry()) {
// if the current location has a peerDep on it, then we can't place here
// this is pretty rare to hit, since we always prefer deduping peers,
// and the getStartNode will start us out above any peers from the
// thing that depends on it. but we could hit it with something like:
//
// a -> (b@1, c@1)
// +-- c@1
// +-- b -> PEEROPTIONAL(v) (c@2)
// +-- c@2 -> (v)
//
// So we check if we can place v under c@2, that's fine.
// Then we check under b, and can't, because of the optional peer dep.
// but we CAN place it under a, so the correct thing to do is keep
// walking up the tree.
const targetEdge = target.edgesOut.get(this.edge.name)
if (!target.isTop && targetEdge && targetEdge.peer) {
continue
}
const cpd = new CanPlaceDep({
dep: this.dep,
edge: this.edge,
// note: this sets the parent's canPlace as the parent of this
// canPlace, but it does NOT add this canPlace to the parent's
// children. This way, we can know that it's a peer dep, and
// get the top edge easily, while still maintaining the
// tree of checks that factored into the original decision.
parent: this.parent && this.parent.canPlace,
target,
preferDedupe: this.preferDedupe,
explicitRequest: this.explicitRequest,
})
this.checks.set(target, cpd)
// It's possible that a "conflict" is a conflict among the *peers* of
// a given node we're trying to place, but there actually is no current
// node. Eg,
// root -> (a, b)
// a -> PEER(c)
// b -> PEER(d)
// d -> PEER(c@2)
// We place (a), and get a peer of (c) along with it.
// then we try to place (b), and get CONFLICT in the check, because
// of the conflicting peer from (b)->(d)->(c@2). In that case, we
// should treat (b) and (d) as OK, and place them in the last place
// where they did not themselves conflict, and skip c@2 if conflict
// is ok by virtue of being forced or not ours and not strict.
if (cpd.canPlaceSelf !== CONFLICT) {
this.canPlaceSelf = cpd
}
// we found a place this can go, along with all its peer friends.
// we break when we get the first conflict
if (cpd.canPlace !== CONFLICT) {
this.canPlace = cpd
} else {
break
}
// if it's a load failure, just plop it in the first place attempted,
// since we're going to crash the build or prune it out anyway.
// but, this will frequently NOT be a successful canPlace, because
// it'll have no version or other information.
if (this.dep.errors.length) {
break
}
// nest packages like npm v1 and v2
// very disk-inefficient
if (this.installStrategy === 'nested') {
break
}
// when installing globally, or just in global style, we never place
// deps above the first level.
if (this.installStrategy === 'shallow') {
const rp = target.resolveParent
if (rp && rp.isProjectRoot) {
break
}
}
}
// if we can't find a target, that means that the last place checked,
// and all the places before it, had a conflict.
if (!this.canPlace) {
// if not forced, and it's our dep, or strictPeerDeps is set, then
// this is an ERESOLVE error.
if (!this.force && (this.isMine || this.strictPeerDeps)) {
return this.failPeerConflict()
}
// ok! we're gonna allow the conflict, but we should still warn
// if we have a current, then we treat CONFLICT as a KEEP.
// otherwise, we just skip it. Only warn on the one that actually
// could not be placed somewhere.
if (!this.canPlaceSelf) {
this.warnPeerConflict()
return
}
this.canPlace = this.canPlaceSelf
}
// now we have a target, a tree of CanPlaceDep results for the peer group,
// and we are ready to go
/* istanbul ignore next */
if (!this.canPlace) {
debug(() => {
throw new Error('canPlace not set, but trying to place in tree')
})
return
}
const { target } = this.canPlace
log.silly(
'placeDep',
target.location || 'ROOT',
`${this.dep.name}@${this.dep.version}`,
this.canPlace.description,
`for: ${this.edge.from.package._id || this.edge.from.location}`,
`want: ${redact(this.edge.spec || '*')}`
)
const placementType = this.canPlace.canPlace === CONFLICT
? this.canPlace.canPlaceSelf
: this.canPlace.canPlace
// if we're placing in the tree with --force, we can get here even though
// it's a conflict. Treat it as a KEEP, but warn and move on.
if (placementType === KEEP) {
// this was a peerConflicted peer dep
if (this.edge.peer && !this.edge.valid) {
this.warnPeerConflict()
}
// if we get a KEEP in a update scenario, then we MAY have something
// already duplicating this unnecessarily! For example:
// ```
// root (dep: y@1)
// +-- x (dep: y@1.1)
// | +-- y@1.1.0 (replacing with 1.1.2, got KEEP at the root)
// +-- y@1.1.2 (updated already from 1.0.0)
// ```
// Now say we do `reify({update:['y']})`, and the latest version is
// 1.1.2, which we now have in the root. We'll try to place y@1.1.2
// first in x, then in the root, ending with KEEP, because we already
// have it. In that case, we ought to REMOVE the nm/x/nm/y node, because
// it is an unnecessary duplicate.
this.pruneDedupable(target)
return
}
// we were told to place it here in the target, so either it does not
// already exist in the tree, OR it's shadowed.
// handle otherwise unresolvable dependency nesting loops by
// creating a symbolic link
// a1 -> b1 -> a2 -> b2 -> a1 -> ...
// instead of nesting forever, when the loop occurs, create
// a symbolic link to the earlier instance
for (let p = target; p; p = p.resolveParent) {
if (p.matches(this.dep) && !p.isTop) {
this.placed = new Link({ parent: target, target: p })
return
}
}
// XXX if we are replacing SOME of a peer entry group, we will need to
// remove any that are not being replaced and will now be invalid, and
// re-evaluate them deeper into the tree.
const virtualRoot = this.dep.parent
this.placed = new this.dep.constructor({
name: this.dep.name,
pkg: this.dep.package,
resolved: this.dep.resolved,
integrity: this.dep.integrity,
installLinks: this.installLinks,
legacyPeerDeps: this.legacyPeerDeps,
error: this.dep.errors[0],
...(this.dep.overrides ? { overrides: this.dep.overrides } : {}),
...(this.dep.isLink ? { target: this.dep.target, realpath: this.dep.realpath } : {}),
})
this.oldDep = target.children.get(this.name)
if (this.oldDep) {
this.replaceOldDep()
} else {
this.placed.parent = target
}
// if it's a peerConflicted peer dep, warn about it
if (this.edge.peer && !this.placed.satisfies(this.edge)) {
this.warnPeerConflict()
}
// If the edge is not an error, then we're updating something, and
// MAY end up putting a better/identical node further up the tree in
// a way that causes an unnecessary duplication. If so, remove the
// now-unnecessary node.
if (this.edge.valid && this.edge.to && this.edge.to !== this.placed) {
this.pruneDedupable(this.edge.to, false)
}
// in case we just made some duplicates that can be removed,
// prune anything deeper in the tree that can be replaced by this
for (const node of target.root.inventory.query('name', this.name)) {
if (node.isDescendantOf(target) && !node.isTop) {
this.pruneDedupable(node, false)
// only walk the direct children of the ones we kept
if (node.root === target.root) {
for (const kid of node.children.values()) {
this.pruneDedupable(kid, false)
}
}
}
}
// also place its unmet or invalid peer deps at this location
// loop through any peer deps from the thing we just placed, and place
// those ones as well. it's safe to do this with the virtual nodes,
// because we're copying rather than moving them out of the virtual root,
// otherwise they'd be gone and the peer set would change throughout
// this loop.
for (const peerEdge of this.placed.edgesOut.values()) {
if (peerEdge.valid || !peerEdge.peer || peerEdge.peerConflicted) {
continue
}
const peer = virtualRoot.children.get(peerEdge.name)
// Note: if the virtualRoot *doesn't* have the peer, then that means
// it's an optional peer dep. If it's not being properly met (ie,
// peerEdge.valid is false), then this is likely heading for an
// ERESOLVE error, unless it can walk further up the tree.
if (!peer) {
continue
}
// peerConflicted peerEdge, just accept what's there already
if (!peer.satisfies(peerEdge)) {
continue
}
this.children.push(new PlaceDep({
auditReport: this.auditReport,
explicitRequest: this.explicitRequest,
force: this.force,
installLinks: this.installLinks,
installStrategy: this.installStrategy,
legacyPeerDeps: this.legaycPeerDeps,
preferDedupe: this.preferDedupe,
strictPeerDeps: this.strictPeerDeps,
updateNames: this.updateName,
parent: this,
dep: peer,
node: this.placed,
edge: peerEdge,
}))
}
}
replaceOldDep () {
const target = this.oldDep.parent
// XXX handle replacing an entire peer group?
// what about cases where we need to push some other peer groups deeper
// into the tree? all the tree updating should be done here, and track
// all the things that we add and remove, so that we can know what
// to re-evaluate.
// if we're replacing, we should also remove any nodes for edges that
// are now invalid, and where this (or its deps) is the only dependent,
// and also recurse on that pruning. Otherwise leaving that dep node
// around can result in spurious conflicts pushing nodes deeper into
// the tree than needed in the case of cycles that will be removed
// later anyway.
const oldDeps = []
for (const [name, edge] of this.oldDep.edgesOut.entries()) {
if (!this.placed.edgesOut.has(name) && edge.to) {
oldDeps.push(...gatherDepSet([edge.to], e => e.to !== edge.to))
}
}
// gather all peer edgesIn which are at this level, and will not be
// satisfied by the new dependency. Those are the peer sets that need
// to be either warned about (if they cannot go deeper), or removed and
// re-placed (if they can).
const prunePeerSets = []
for (const edge of this.oldDep.edgesIn) {
if (this.placed.satisfies(edge) ||
!edge.peer ||
edge.from.parent !== target ||
edge.peerConflicted) {
// not a peer dep, not invalid, or not from this level, so it's fine
// to just let it re-evaluate as a problemEdge later, or let it be
// satisfied by the new dep being placed.
continue
}
for (const entryEdge of peerEntrySets(edge.from).keys()) {
// either this one needs to be pruned and re-evaluated, or marked
// as peerConflicted and warned about. If the entryEdge comes in from
// the root or a workspace, then we have to leave it alone, and in that
// case, it will have already warned or crashed by getting to this point
const entryNode = entryEdge.to
const deepestTarget = deepestNestingTarget(entryNode)
if (deepestTarget !== target &&
!(entryEdge.from.isProjectRoot || entryEdge.from.isWorkspace)) {
prunePeerSets.push(...gatherDepSet([entryNode], e => {
return e.to !== entryNode && !e.peerConflicted
}))
} else {
this.warnPeerConflict(edge, this.dep)
}
}
}
this.placed.replace(this.oldDep)
this.pruneForReplacement(this.placed, oldDeps)
for (const dep of prunePeerSets) {
for (const edge of dep.edgesIn) {
this.needEvaluation.add(edge.from)
}
dep.root = null
}
}
pruneForReplacement (node, oldDeps) {
// gather up all the now-invalid/extraneous edgesOut, as long as they are
// only depended upon by the old node/deps
const invalidDeps = new Set([...node.edgesOut.values()]
.filter(e => e.to && !e.valid).map(e => e.to))
for (const dep of oldDeps) {
const set = gatherDepSet([dep], e => e.to !== dep && e.valid)
for (const dep of set) {
invalidDeps.add(dep)
}
}
// ignore dependency edges from the node being replaced, but
// otherwise filter the set down to just the set with no
// dependencies from outside the set, except the node in question.
const deps = gatherDepSet(invalidDeps, edge =>
edge.from !== node && edge.to !== node && edge.valid)
// now just delete whatever's left, because it's junk
for (const dep of deps) {
dep.root = null
}
}
// prune all the nodes in a branch of the tree that can be safely removed
// This is only the most basic duplication detection; it finds if there
// is another satisfying node further up the tree, and if so, dedupes.
// Even in installStategy is nested, we do this amount of deduplication.
pruneDedupable (node, descend = true) {
if (node.canDedupe(this.preferDedupe)) {
// gather up all deps that have no valid edges in from outside
// the dep set, except for this node we're deduping, so that we
// also prune deps that would be made extraneous.
const deps = gatherDepSet([node], e => e.to !== node && e.valid)
for (const node of deps) {
node.root = null
}
return
}
if (descend) {
// sort these so that they're deterministically ordered
// otherwise, resulting tree shape is dependent on the order
// in which they happened to be resolved.
const nodeSort = (a, b) => localeCompare(a.location, b.location)
const children = [...node.children.values()].sort(nodeSort)
for (const child of children) {
this.pruneDedupable(child)
}
const fsChildren = [...node.fsChildren].sort(nodeSort)
for (const topNode of fsChildren) {
const children = [...topNode.children.values()].sort(nodeSort)
for (const child of children) {
this.pruneDedupable(child)
}
}
}
}
get isMine () {
const { edge } = this.top
const { from: node } = edge
if (node.isWorkspace || node.isProjectRoot) {
return true
}
if (!edge.peer) {
return false
}
// re-entry case. check if any non-peer edges come from the project,
// or any entryEdges on peer groups are from the root.
let hasPeerEdges = false
for (const edge of node.edgesIn) {
if (edge.peer) {
hasPeerEdges = true
continue
}
if (edge.from.isWorkspace || edge.from.isProjectRoot) {
return true
}
}
if (hasPeerEdges) {
for (const edge of peerEntrySets(node).keys()) {
if (edge.from.isWorkspace || edge.from.isProjectRoot) {
return true
}
}
}
return false
}
warnPeerConflict (edge, dep) {
edge = edge || this.edge
dep = dep || this.dep
edge.peerConflicted = true
const expl = this.explainPeerConflict(edge, dep)
log.warn('ERESOLVE', 'overriding peer dependency', expl)
}
failPeerConflict (edge, dep) {
edge = edge || this.top.edge
dep = dep || this.top.dep
const expl = this.explainPeerConflict(edge, dep)
throw Object.assign(new Error('could not resolve'), expl)
}
explainPeerConflict (edge, dep) {
const { from: node } = edge
const curNode = node.resolve(edge.name)
// XXX decorate more with this.canPlace and this.canPlaceSelf,
// this.checks, this.children, walk over conflicted peers, etc.
const expl = {
code: 'ERESOLVE',
edge: edge.explain(),
dep: dep.explain(edge),
force: this.force,
isMine: this.isMine,
strictPeerDeps: this.strictPeerDeps,
}
if (this.parent) {
// this is the conflicted peer
expl.current = curNode && curNode.explain(edge)
expl.peerConflict = this.current && this.current.explain(this.edge)
} else {
expl.current = curNode && curNode.explain()
if (this.canPlaceSelf && this.canPlaceSelf.canPlaceSelf !== CONFLICT) {
// failed while checking for a child dep
const cps = this.canPlaceSelf
for (const peer of cps.conflictChildren) {
if (peer.current) {
expl.peerConflict = {
current: peer.current.explain(),
peer: peer.dep.explain(peer.edge),
}
break
}
}
} else {
expl.peerConflict = {
current: this.current && this.current.explain(),
peer: this.dep.explain(this.edge),
}
}
}
return expl
}
getStartNode () {
// if we are a peer, then we MUST be at least as shallow as the peer
// dependent
const from = this.parent?.getStartNode() || this.edge.from
return deepestNestingTarget(from, this.name)
}
// XXX this only appears to be used by tests
get allChildren () {
const set = new Set(this.children)
for (const child of set) {
for (const grandchild of child.children) {
set.add(grandchild)
}
}
return [...set]
}
}
module.exports = PlaceDep

198
package/node_modules/@npmcli/arborist/lib/printable.js generated vendored Normal file
View File

@@ -0,0 +1,198 @@
// helper function to output a clearer visualization
// of the current node and its descendents
const localeCompare = require('@isaacs/string-locale-compare')('en')
const util = require('node:util')
const relpath = require('./relpath.js')
class ArboristNode {
constructor (tree, path) {
this.name = tree.name
if (tree.packageName && tree.packageName !== this.name) {
this.packageName = tree.packageName
}
if (tree.version) {
this.version = tree.version
}
this.location = tree.location
this.path = tree.path
if (tree.realpath !== this.path) {
this.realpath = tree.realpath
}
if (tree.resolved !== null) {
this.resolved = tree.resolved
}
if (tree.extraneous) {
this.extraneous = true
}
if (tree.dev) {
this.dev = true
}
if (tree.optional) {
this.optional = true
}
if (tree.devOptional && !tree.dev && !tree.optional) {
this.devOptional = true
}
if (tree.peer) {
this.peer = true
}
if (tree.inBundle) {
this.bundled = true
}
if (tree.inDepBundle) {
this.bundler = tree.getBundler().location
}
if (tree.isProjectRoot) {
this.isProjectRoot = true
}
if (tree.isWorkspace) {
this.isWorkspace = true
}
const bd = tree.package && tree.package.bundleDependencies
if (bd && bd.length) {
this.bundleDependencies = bd
}
if (tree.inShrinkwrap) {
this.inShrinkwrap = true
} else if (tree.hasShrinkwrap) {
this.hasShrinkwrap = true
}
if (tree.error) {
this.error = treeError(tree.error)
}
if (tree.errors && tree.errors.length) {
this.errors = tree.errors.map(treeError)
}
if (tree.overrides) {
this.overrides = new Map([...tree.overrides.ruleset.values()]
.map((override) => [override.key, override.value]))
}
// edgesOut sorted by name
if (tree.edgesOut.size) {
this.edgesOut = new Map([...tree.edgesOut.entries()]
.sort(([a], [b]) => localeCompare(a, b))
.map(([name, edge]) => [name, new EdgeOut(edge)]))
}
// edgesIn sorted by location
if (tree.edgesIn.size) {
this.edgesIn = new Set([...tree.edgesIn]
.sort((a, b) => localeCompare(a.from.location, b.from.location))
.map(edge => new EdgeIn(edge)))
}
if (tree.workspaces && tree.workspaces.size) {
this.workspaces = new Map([...tree.workspaces.entries()]
.map(([name, path]) => [name, relpath(tree.root.realpath, path)]))
}
// fsChildren sorted by path
if (tree.fsChildren.size) {
this.fsChildren = new Set([...tree.fsChildren]
.sort(({ path: a }, { path: b }) => localeCompare(a, b))
.map(tree => printableTree(tree, path)))
}
// children sorted by name
if (tree.children.size) {
this.children = new Map([...tree.children.entries()]
.sort(([a], [b]) => localeCompare(a, b))
.map(([name, tree]) => [name, printableTree(tree, path)]))
}
}
}
class ArboristVirtualNode extends ArboristNode {
constructor (tree, path) {
super(tree, path)
this.sourceReference = printableTree(tree.sourceReference, path)
}
}
class ArboristLink extends ArboristNode {
constructor (tree, path) {
super(tree, path)
this.target = printableTree(tree.target, path)
}
}
const treeError = ({ code, path }) => ({
code,
...(path ? { path } : {}),
})
// print out edges without dumping the full node all over again
// this base class will toJSON as a plain old object, but the
// util.inspect() output will be a bit cleaner
class Edge {
constructor (edge) {
this.type = edge.type
this.name = edge.name
this.spec = edge.rawSpec || '*'
if (edge.rawSpec !== edge.spec) {
this.override = edge.spec
}
if (edge.error) {
this.error = edge.error
}
if (edge.peerConflicted) {
this.peerConflicted = edge.peerConflicted
}
}
}
// don't care about 'from' for edges out
class EdgeOut extends Edge {
constructor (edge) {
super(edge)
this.to = edge.to && edge.to.location
}
[util.inspect.custom] () {
return `{ ${this.type} ${this.name}@${this.spec}${
this.override ? ` overridden:${this.override}` : ''
}${
this.to ? ' -> ' + this.to : ''
}${
this.error ? ' ' + this.error : ''
}${
this.peerConflicted ? ' peerConflicted' : ''
} }`
}
}
// don't care about 'to' for edges in
class EdgeIn extends Edge {
constructor (edge) {
super(edge)
this.from = edge.from && edge.from.location
}
[util.inspect.custom] () {
return `{ ${this.from || '""'} ${this.type} ${this.name}@${this.spec}${
this.error ? ' ' + this.error : ''
}${
this.peerConflicted ? ' peerConflicted' : ''
} }`
}
}
const printableTree = (tree, path = []) => {
if (!tree) {
return tree
}
const Cls = tree.isLink ? ArboristLink
: tree.sourceReference ? ArboristVirtualNode
: ArboristNode
if (path.includes(tree)) {
const obj = Object.create(Cls.prototype)
return Object.assign(obj, { location: tree.location })
}
path.push(tree)
return new Cls(tree, path)
}
module.exports = printableTree

View File

@@ -0,0 +1,945 @@
'use strict'
const { resolve } = require('node:path')
const { parser, arrayDelimiter } = require('@npmcli/query')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const { log } = require('proc-log')
const { minimatch } = require('minimatch')
const npa = require('npm-package-arg')
const pacote = require('pacote')
const semver = require('semver')
const fetch = require('npm-registry-fetch')
// handle results for parsed query asts, results are stored in a map that has a
// key that points to each ast selector node and stores the resulting array of
// arborist nodes as its value, that is essential to how we handle multiple
// query selectors, e.g: `#a, #b, #c` <- 3 diff ast selector nodes
class Results {
#currentAstSelector
#initialItems
#inventory
#outdatedCache = new Map()
#vulnCache
#pendingCombinator
#results = new Map()
#targetNode
constructor (opts) {
this.#currentAstSelector = opts.rootAstNode.nodes[0]
this.#inventory = opts.inventory
this.#initialItems = opts.initialItems
this.#vulnCache = opts.vulnCache
this.#targetNode = opts.targetNode
this.currentResults = this.#initialItems
// We get this when first called and need to pass it to pacote
this.flatOptions = opts.flatOptions || {}
// reset by rootAstNode walker
this.currentAstNode = opts.rootAstNode
}
get currentResults () {
return this.#results.get(this.#currentAstSelector)
}
set currentResults (value) {
this.#results.set(this.#currentAstSelector, value)
}
// retrieves the initial items to which start the filtering / matching
// for most of the different types of recognized ast nodes, e.g: class (aka
// depType), id, *, etc in different contexts we need to start with the
// current list of filtered results, for example a query for `.workspace`
// actually means the same as `*.workspace` so we want to start with the full
// inventory if that's the first ast node we're reading but if it appears in
// the middle of a query it should respect the previous filtered results,
// combinators are a special case in which we always want to have the
// complete inventory list in order to use the left-hand side ast node as a
// filter combined with the element on its right-hand side
get initialItems () {
const firstParsed =
(this.currentAstNode.parent.nodes[0] === this.currentAstNode) &&
(this.currentAstNode.parent.parent.type === 'root')
if (firstParsed) {
return this.#initialItems
}
if (this.currentAstNode.prev().type === 'combinator') {
return this.#inventory
}
return this.currentResults
}
// combinators need information about previously filtered items along
// with info of the items parsed / retrieved from the selector right
// past the combinator, for this reason combinators are stored and
// only ran as the last part of each selector logic
processPendingCombinator (nextResults) {
if (this.#pendingCombinator) {
const res = this.#pendingCombinator(this.currentResults, nextResults)
this.#pendingCombinator = null
this.currentResults = res
} else {
this.currentResults = nextResults
}
}
// when collecting results to a root astNode, we traverse the list of child
// selector nodes and collect all of their resulting arborist nodes into a
// single/flat Set of items, this ensures we also deduplicate items
collect (rootAstNode) {
return new Set(rootAstNode.nodes.flatMap(n => this.#results.get(n)))
}
// selector types map to the '.type' property of the ast nodes via `${astNode.type}Type`
//
// attribute selector [name=value], etc
attributeType () {
const nextResults = this.initialItems.filter(node =>
attributeMatch(this.currentAstNode, node.package)
)
this.processPendingCombinator(nextResults)
}
// dependency type selector (i.e. .prod, .dev, etc)
// css calls this class, we interpret is as dependency type
classType () {
const depTypeFn = depTypes[String(this.currentAstNode)]
if (!depTypeFn) {
throw Object.assign(
new Error(`\`${String(this.currentAstNode)}\` is not a supported dependency type.`),
{ code: 'EQUERYNODEPTYPE' }
)
}
const nextResults = depTypeFn(this.initialItems)
this.processPendingCombinator(nextResults)
}
// combinators (i.e. '>', ' ', '~')
combinatorType () {
this.#pendingCombinator = combinators[String(this.currentAstNode)]
}
// name selectors (i.e. #foo)
// css calls this id, we interpret it as name
idType () {
const name = this.currentAstNode.value
const nextResults = this.initialItems.filter(node =>
(name === node.name) || (name === node.package.name)
)
this.processPendingCombinator(nextResults)
}
// pseudo selectors (prefixed with :)
async pseudoType () {
const pseudoFn = `${this.currentAstNode.value.slice(1)}Pseudo`
if (!this[pseudoFn]) {
throw Object.assign(
new Error(`\`${this.currentAstNode.value
}\` is not a supported pseudo selector.`),
{ code: 'EQUERYNOPSEUDO' }
)
}
const nextResults = await this[pseudoFn]()
this.processPendingCombinator(nextResults)
}
selectorType () {
this.#currentAstSelector = this.currentAstNode
// starts a new array in which resulting items
// can be stored for each given ast selector
if (!this.currentResults) {
this.currentResults = []
}
}
universalType () {
this.processPendingCombinator(this.initialItems)
}
// pseudo selectors map to the 'value' property of the pseudo selectors in the ast nodes
// via selectors via `${value.slice(1)}Pseudo`
attrPseudo () {
const { lookupProperties, attributeMatcher } = this.currentAstNode
return this.initialItems.filter(node => {
let objs = [node.package]
for (const prop of lookupProperties) {
// if an isArray symbol is found that means we'll need to iterate
// over the previous found array to basically make sure we traverse
// all its indexes testing for possible objects that may eventually
// hold more keys specified in a selector
if (prop === arrayDelimiter) {
objs = objs.flat()
continue
}
// otherwise just maps all currently found objs
// to the next prop from the lookup properties list,
// filters out any empty key lookup
objs = objs.flatMap(obj => obj[prop] || [])
// in case there's no property found in the lookup
// just filters that item out
const noAttr = objs.every(obj => !obj)
if (noAttr) {
return false
}
}
// if any of the potential object matches
// that item should be in the final result
return objs.some(obj => attributeMatch(attributeMatcher, obj))
})
}
emptyPseudo () {
return this.initialItems.filter(node => node.edgesOut.size === 0)
}
extraneousPseudo () {
return this.initialItems.filter(node => node.extraneous)
}
async hasPseudo () {
const found = []
for (const item of this.initialItems) {
// This is the one time initialItems differs from inventory
const res = await retrieveNodesFromParsedAst({
flatOptions: this.flatOptions,
initialItems: [item],
inventory: this.#inventory,
rootAstNode: this.currentAstNode.nestedNode,
targetNode: item,
vulnCache: this.#vulnCache,
})
if (res.size > 0) {
found.push(item)
}
}
return found
}
invalidPseudo () {
const found = []
for (const node of this.initialItems) {
for (const edge of node.edgesIn) {
if (edge.invalid) {
found.push(node)
break
}
}
}
return found
}
async isPseudo () {
const res = await retrieveNodesFromParsedAst({
flatOptions: this.flatOptions,
initialItems: this.initialItems,
inventory: this.#inventory,
rootAstNode: this.currentAstNode.nestedNode,
targetNode: this.currentAstNode,
vulnCache: this.#vulnCache,
})
return [...res]
}
linkPseudo () {
return this.initialItems.filter(node => node.isLink || (node.isTop && !node.isRoot))
}
missingPseudo () {
return this.#inventory.reduce((res, node) => {
for (const edge of node.edgesOut.values()) {
if (edge.missing) {
const pkg = { name: edge.name, version: edge.spec }
const item = new this.#targetNode.constructor({ pkg })
item.queryContext = {
missing: true,
}
item.edgesIn = new Set([edge])
res.push(item)
}
}
return res
}, [])
}
async notPseudo () {
const res = await retrieveNodesFromParsedAst({
flatOptions: this.flatOptions,
initialItems: this.initialItems,
inventory: this.#inventory,
rootAstNode: this.currentAstNode.nestedNode,
targetNode: this.currentAstNode,
vulnCache: this.#vulnCache,
})
const internalSelector = new Set(res)
return this.initialItems.filter(node =>
!internalSelector.has(node))
}
overriddenPseudo () {
return this.initialItems.filter(node => node.overridden)
}
pathPseudo () {
return this.initialItems.filter(node => {
if (!this.currentAstNode.pathValue) {
return true
}
return minimatch(
node.realpath.replace(/\\+/g, '/'),
resolve(node.root.realpath, this.currentAstNode.pathValue).replace(/\\+/g, '/')
)
})
}
privatePseudo () {
return this.initialItems.filter(node => node.package.private)
}
rootPseudo () {
return this.initialItems.filter(node => node === this.#targetNode.root)
}
scopePseudo () {
return this.initialItems.filter(node => node === this.#targetNode)
}
semverPseudo () {
const {
attributeMatcher,
lookupProperties,
semverFunc = 'infer',
semverValue,
} = this.currentAstNode
const { qualifiedAttribute } = attributeMatcher
if (!semverValue) {
// DEPRECATED: remove this warning and throw an error as part of @npmcli/arborist@6
log.warn('query', 'usage of :semver() with no parameters is deprecated')
return this.initialItems
}
if (!semver.valid(semverValue) && !semver.validRange(semverValue)) {
throw Object.assign(
new Error(`\`${semverValue}\` is not a valid semver version or range`),
{ code: 'EQUERYINVALIDSEMVER' })
}
const valueIsVersion = !!semver.valid(semverValue)
const nodeMatches = (node, obj) => {
// if we already have an operator, the user provided some test as part of the selector
// we evaluate that first because if it fails we don't want this node anyway
if (attributeMatcher.operator) {
if (!attributeMatch(attributeMatcher, obj)) {
// if the initial operator doesn't match, we're done
return false
}
}
const attrValue = obj[qualifiedAttribute]
// both valid and validRange return null for undefined, so this will skip both nodes that
// do not have the attribute defined as well as those where the attribute value is invalid
// and those where the value from the package.json is not a string
if ((!semver.valid(attrValue) && !semver.validRange(attrValue)) ||
typeof attrValue !== 'string') {
return false
}
const attrIsVersion = !!semver.valid(attrValue)
let actualFunc = semverFunc
// if we're asked to infer, we examine outputs to make a best guess
if (actualFunc === 'infer') {
if (valueIsVersion && attrIsVersion) {
// two versions -> semver.eq
actualFunc = 'eq'
} else if (!valueIsVersion && !attrIsVersion) {
// two ranges -> semver.intersects
actualFunc = 'intersects'
} else {
// anything else -> semver.satisfies
actualFunc = 'satisfies'
}
}
if (['eq', 'neq', 'gt', 'gte', 'lt', 'lte'].includes(actualFunc)) {
// both sides must be versions, but one is not
if (!valueIsVersion || !attrIsVersion) {
return false
}
return semver[actualFunc](attrValue, semverValue)
} else if (['gtr', 'ltr', 'satisfies'].includes(actualFunc)) {
// at least one side must be a version, but neither is
if (!valueIsVersion && !attrIsVersion) {
return false
}
return valueIsVersion
? semver[actualFunc](semverValue, attrValue)
: semver[actualFunc](attrValue, semverValue)
} else if (['intersects', 'subset'].includes(actualFunc)) {
// these accept two ranges and since a version is also a range, anything goes
return semver[actualFunc](attrValue, semverValue)
} else {
// user provided a function we don't know about, throw an error
throw Object.assign(new Error(`\`semver.${actualFunc}\` is not a supported operator.`),
{ code: 'EQUERYINVALIDOPERATOR' })
}
}
return this.initialItems.filter((node) => {
// no lookupProperties just means its a top level property, see if it matches
if (!lookupProperties.length) {
return nodeMatches(node, node.package)
}
// this code is mostly duplicated from attrPseudo to traverse into the package until we get
// to our deepest requested object
let objs = [node.package]
for (const prop of lookupProperties) {
if (prop === arrayDelimiter) {
objs = objs.flat()
continue
}
objs = objs.flatMap(obj => obj[prop] || [])
const noAttr = objs.every(obj => !obj)
if (noAttr) {
return false
}
return objs.some(obj => nodeMatches(node, obj))
}
})
}
typePseudo () {
if (!this.currentAstNode.typeValue) {
return this.initialItems
}
return this.initialItems
.flatMap(node => {
const found = []
for (const edge of node.edgesIn) {
if (npa(`${edge.name}@${edge.spec}`).type === this.currentAstNode.typeValue) {
found.push(edge.to)
}
}
return found
})
}
dedupedPseudo () {
return this.initialItems.filter(node => node.target.edgesIn.size > 1)
}
async vulnPseudo () {
if (!this.initialItems.length) {
return this.initialItems
}
if (!this.#vulnCache) {
const packages = {}
// We have to map the items twice, once to get the request, and a second time to filter out the results of that request
this.initialItems.map((node) => {
if (node.isProjectRoot || node.package.private) {
return
}
if (!packages[node.name]) {
packages[node.name] = []
}
if (!packages[node.name].includes(node.version)) {
packages[node.name].push(node.version)
}
})
const res = await fetch('/-/npm/v1/security/advisories/bulk', {
...this.flatOptions,
registry: this.flatOptions.auditRegistry || this.flatOptions.registry,
method: 'POST',
gzip: true,
body: packages,
})
this.#vulnCache = await res.json()
}
const advisories = this.#vulnCache
const { vulns } = this.currentAstNode
return this.initialItems.filter(item => {
const vulnerable = advisories[item.name]?.filter(advisory => {
// This could be for another version of this package elsewhere in the tree
if (!semver.intersects(advisory.vulnerable_versions, item.version)) {
return false
}
if (!vulns) {
return true
}
// vulns are OR with each other, if any one matches we're done
for (const vuln of vulns) {
if (vuln.severity && !vuln.severity.includes('*')) {
if (!vuln.severity.includes(advisory.severity)) {
continue
}
}
if (vuln?.cwe) {
// * is special, it means "has a cwe"
if (vuln.cwe.includes('*')) {
if (!advisory.cwe.length) {
continue
}
} else if (!vuln.cwe.every(cwe => advisory.cwe.includes(`CWE-${cwe}`))) {
continue
}
}
return true
}
})
if (vulnerable?.length) {
item.queryContext = {
advisories: vulnerable,
}
return true
}
return false
})
}
async outdatedPseudo () {
const { outdatedKind = 'any' } = this.currentAstNode
// filter the initialItems
// NOTE: this uses a Promise.all around a map without in-line concurrency handling
// since the only async action taken is retrieving the packument, which is limited
// based on the max-sockets config in make-fetch-happen
const initialResults = await Promise.all(this.initialItems.map(async (node) => {
// the root can't be outdated, skip it
if (node.isProjectRoot) {
return false
}
// private packages can't be published, skip them
if (node.package.private) {
return false
}
// we cache the promise representing the full versions list, this helps reduce the
// number of requests we send by keeping population of the cache in a single tick
// making it less likely that multiple requests for the same package will be inflight
if (!this.#outdatedCache.has(node.name)) {
this.#outdatedCache.set(node.name, getPackageVersions(node.name, this.flatOptions))
}
const availableVersions = await this.#outdatedCache.get(node.name)
// we attach _all_ versions to the queryContext to allow consumers to do their own
// filtering and comparisons
node.queryContext.versions = availableVersions
// next we further reduce the set to versions that are greater than the current one
const greaterVersions = availableVersions.filter((available) => {
return semver.gt(available, node.version)
})
// no newer versions than the current one, drop this node from the result set
if (!greaterVersions.length) {
return false
}
// if we got here, we know that newer versions exist, if the kind is 'any' we're done
if (outdatedKind === 'any') {
return node
}
// look for newer versions that differ from current by a specific part of the semver version
if (['major', 'minor', 'patch'].includes(outdatedKind)) {
// filter the versions greater than our current one based on semver.diff
const filteredVersions = greaterVersions.filter((version) => {
return semver.diff(node.version, version) === outdatedKind
})
// no available versions are of the correct diff type
if (!filteredVersions.length) {
return false
}
return node
}
// look for newer versions that satisfy at least one edgeIn to this node
if (outdatedKind === 'in-range') {
const inRangeContext = []
for (const edge of node.edgesIn) {
const inRangeVersions = greaterVersions.filter((version) => {
return semver.satisfies(version, edge.spec)
})
// this edge has no in-range candidates, just move on
if (!inRangeVersions.length) {
continue
}
inRangeContext.push({
from: edge.from.location,
versions: inRangeVersions,
})
}
// if we didn't find at least one match, drop this node
if (!inRangeContext.length) {
return false
}
// now add to the context each version that is in-range for each edgeIn
node.queryContext.outdated = {
...node.queryContext.outdated,
inRange: inRangeContext,
}
return node
}
// look for newer versions that _do not_ satisfy at least one edgeIn
if (outdatedKind === 'out-of-range') {
const outOfRangeContext = []
for (const edge of node.edgesIn) {
const outOfRangeVersions = greaterVersions.filter((version) => {
return !semver.satisfies(version, edge.spec)
})
// this edge has no out-of-range candidates, skip it
if (!outOfRangeVersions.length) {
continue
}
outOfRangeContext.push({
from: edge.from.location,
versions: outOfRangeVersions,
})
}
// if we didn't add at least one thing to the context, this node is not a match
if (!outOfRangeContext.length) {
return false
}
// attach the out-of-range context to the node
node.queryContext.outdated = {
...node.queryContext.outdated,
outOfRange: outOfRangeContext,
}
return node
}
// any other outdatedKind is unknown and will never match
return false
}))
// return an array with the holes for non-matching nodes removed
return initialResults.filter(Boolean)
}
}
// operators for attribute selectors
const attributeOperators = {
// attribute value is equivalent
'=' ({ attr, value }) {
return attr === value
},
// attribute value contains word
'~=' ({ attr, value }) {
return (attr.match(/\w+/g) || []).includes(value)
},
// attribute value contains string
'*=' ({ attr, value }) {
return attr.includes(value)
},
// attribute value is equal or starts with
'|=' ({ attr, value }) {
return attr.startsWith(`${value}-`)
},
// attribute value starts with
'^=' ({ attr, value }) {
return attr.startsWith(value)
},
// attribute value ends with
'$=' ({ attr, value }) {
return attr.endsWith(value)
},
}
const attributeOperator = ({ attr, value, insensitive, operator }) => {
if (typeof attr === 'number') {
attr = String(attr)
}
if (typeof attr !== 'string') {
// It's an object or an array, bail
return false
}
if (insensitive) {
attr = attr.toLowerCase()
}
return attributeOperators[operator]({
attr,
insensitive,
value,
})
}
const attributeMatch = (matcher, obj) => {
const insensitive = !!matcher.insensitive
const operator = matcher.operator || ''
const attribute = matcher.qualifiedAttribute
let value = matcher.value || ''
// return early if checking existence
if (operator === '') {
return Boolean(obj[attribute])
}
if (insensitive) {
value = value.toLowerCase()
}
// in case the current object is an array
// then we try to match every item in the array
if (Array.isArray(obj[attribute])) {
return obj[attribute].find((i, index) => {
const attr = obj[attribute][index] || ''
return attributeOperator({ attr, value, insensitive, operator })
})
} else {
const attr = obj[attribute] || ''
return attributeOperator({ attr, value, insensitive, operator })
}
}
const edgeIsType = (node, type, seen = new Set()) => {
for (const edgeIn of node.edgesIn) {
// TODO Need a test with an infinite loop
if (seen.has(edgeIn)) {
continue
}
seen.add(edgeIn)
if (edgeIn.type === type || edgeIn.from[type] || edgeIsType(edgeIn.from, type, seen)) {
return true
}
}
return false
}
const filterByType = (nodes, type) => {
const found = []
for (const node of nodes) {
if (node[type] || edgeIsType(node, type)) {
found.push(node)
}
}
return found
}
const depTypes = {
// dependency
'.prod' (prevResults) {
const found = []
for (const node of prevResults) {
if (!node.dev) {
found.push(node)
}
}
return found
},
// devDependency
'.dev' (prevResults) {
return filterByType(prevResults, 'dev')
},
// optionalDependency
'.optional' (prevResults) {
return filterByType(prevResults, 'optional')
},
// peerDependency
'.peer' (prevResults) {
return filterByType(prevResults, 'peer')
},
// workspace
'.workspace' (prevResults) {
return prevResults.filter(node => node.isWorkspace)
},
// bundledDependency
'.bundled' (prevResults) {
return prevResults.filter(node => node.inBundle)
},
}
// checks if a given node has a direct parent in any of the nodes provided in
// the compare nodes array
const hasParent = (node, compareNodes) => {
// All it takes is one so we loop and return on the first hit
for (let compareNode of compareNodes) {
if (compareNode.isLink) {
compareNode = compareNode.target
}
// follows logical parent for link anscestors
if (node.isTop && (node.resolveParent === compareNode)) {
return true
}
// follows edges-in to check if they match a possible parent
for (const edge of node.edgesIn) {
if (edge && edge.from === compareNode) {
return true
}
}
}
return false
}
// checks if a given node is a descendant of any of the nodes provided in the
// compareNodes array
const hasAscendant = (node, compareNodes, seen = new Set()) => {
// TODO (future) loop over ancestry property
if (hasParent(node, compareNodes)) {
return true
}
if (node.isTop && node.resolveParent) {
/* istanbul ignore if - investigate if linksIn check obviates need for this */
if (hasAscendant(node.resolveParent, compareNodes)) {
return true
}
}
for (const edge of node.edgesIn) {
// TODO Need a test with an infinite loop
if (seen.has(edge)) {
continue
}
seen.add(edge)
if (edge && edge.from && hasAscendant(edge.from, compareNodes, seen)) {
return true
}
}
for (const linkNode of node.linksIn) {
if (hasAscendant(linkNode, compareNodes, seen)) {
return true
}
}
return false
}
const combinators = {
// direct descendant
'>' (prevResults, nextResults) {
return nextResults.filter(node => hasParent(node, prevResults))
},
// any descendant
' ' (prevResults, nextResults) {
return nextResults.filter(node => hasAscendant(node, prevResults))
},
// sibling
'~' (prevResults, nextResults) {
// Return any node in nextResults that is a sibling of (aka shares a
// parent with) a node in prevResults
const parentNodes = new Set() // Parents of everything in prevResults
for (const node of prevResults) {
for (const edge of node.edgesIn) {
// edge.from always exists cause it's from another node's edgesIn
parentNodes.add(edge.from)
}
}
return nextResults.filter(node =>
!prevResults.includes(node) && hasParent(node, [...parentNodes])
)
},
}
// get a list of available versions of a package filtered to respect --before
// NOTE: this runs over each node and should not throw
const getPackageVersions = async (name, opts) => {
let packument
try {
packument = await pacote.packument(name, {
...opts,
fullMetadata: false, // we only need the corgi
})
} catch (err) {
// if the fetch fails, log a warning and pretend there are no versions
log.warn('query', `could not retrieve packument for ${name}: ${err.message}`)
return []
}
// start with a sorted list of all versions (lowest first)
let candidates = Object.keys(packument.versions).sort(semver.compare)
// if the packument has a time property, and the user passed a before flag, then
// we filter this list down to only those versions that existed before the specified date
if (packument.time && opts.before) {
candidates = candidates.filter((version) => {
// this version isn't found in the times at all, drop it
if (!packument.time[version]) {
return false
}
return Date.parse(packument.time[version]) <= opts.before
})
}
return candidates
}
const retrieveNodesFromParsedAst = async (opts) => {
// when we first call this it's the parsed query. all other times it's
// results.currentNode.nestedNode
const rootAstNode = opts.rootAstNode
if (!rootAstNode.nodes) {
return new Set()
}
const results = new Results(opts)
const astNodeQueue = new Set()
// walk is sync, so we have to build up our async functions and then await them later
rootAstNode.walk((nextAstNode) => {
astNodeQueue.add(nextAstNode)
})
for (const nextAstNode of astNodeQueue) {
// This is the only place we reset currentAstNode
results.currentAstNode = nextAstNode
const updateFn = `${results.currentAstNode.type}Type`
if (typeof results[updateFn] !== 'function') {
throw Object.assign(
new Error(`\`${results.currentAstNode.type}\` is not a supported selector.`),
{ code: 'EQUERYNOSELECTOR' }
)
}
await results[updateFn]()
}
return results.collect(rootAstNode)
}
const querySelectorAll = async (targetNode, query, flatOptions) => {
// This never changes ever we just pass it around. But we can't scope it to
// this whole file if we ever want to support concurrent calls to this
// function.
const inventory = [...targetNode.root.inventory.values()]
// res is a Set of items returned for each parsed css ast selector
const res = await retrieveNodesFromParsedAst({
initialItems: inventory,
inventory,
flatOptions,
rootAstNode: parser(query),
targetNode,
})
// returns nodes ordered by realpath
return [...res].sort((a, b) => localeCompare(a.location, b.location))
}
module.exports = querySelectorAll

95
package/node_modules/@npmcli/arborist/lib/realpath.js generated vendored Normal file
View File

@@ -0,0 +1,95 @@
// look up the realpath, but cache stats to minimize overhead
// If the parent folder is in the realpath cache, then we just
// lstat the child, since there's no need to do a full realpath
// This is not a separate module, and is much simpler than Node's
// built-in fs.realpath, because we only care about symbolic links,
// so we can handle many fewer edge cases.
const { lstat, readlink } = require('node:fs/promises')
const { resolve, basename, dirname } = require('node:path')
const realpathCached = (path, rpcache, stcache, depth) => {
// just a safety against extremely deep eloops
/* istanbul ignore next */
if (depth > 2000) {
throw eloop(path)
}
path = resolve(path)
if (rpcache.has(path)) {
return Promise.resolve(rpcache.get(path))
}
const dir = dirname(path)
const base = basename(path)
if (base && rpcache.has(dir)) {
return realpathChild(dir, base, rpcache, stcache, depth)
}
// if it's the root, then we know it's real
if (!base) {
rpcache.set(dir, dir)
return Promise.resolve(dir)
}
// the parent, what is that?
// find out, and then come back.
return realpathCached(dir, rpcache, stcache, depth + 1).then(() =>
realpathCached(path, rpcache, stcache, depth + 1))
}
const lstatCached = (path, stcache) => {
if (stcache.has(path)) {
return Promise.resolve(stcache.get(path))
}
const p = lstat(path).then(st => {
stcache.set(path, st)
return st
})
stcache.set(path, p)
return p
}
// This is a slight fib, as it doesn't actually occur during a stat syscall.
// But file systems are giant piles of lies, so whatever.
const eloop = path =>
Object.assign(new Error(
`ELOOP: too many symbolic links encountered, stat '${path}'`), {
errno: -62,
syscall: 'stat',
code: 'ELOOP',
path: path,
})
const realpathChild = (dir, base, rpcache, stcache, depth) => {
const realdir = rpcache.get(dir)
// that unpossible
/* istanbul ignore next */
if (typeof realdir === 'undefined') {
throw new Error('in realpathChild without parent being in realpath cache')
}
const realish = resolve(realdir, base)
return lstatCached(realish, stcache).then(st => {
if (!st.isSymbolicLink()) {
rpcache.set(resolve(dir, base), realish)
return realish
}
return readlink(realish).then(target => {
const resolved = resolve(realdir, target)
if (realish === resolved) {
throw eloop(realish)
}
return realpathCached(resolved, rpcache, stcache, depth + 1)
}).then(real => {
rpcache.set(resolve(dir, base), real)
return real
})
})
}
module.exports = realpathCached

3
package/node_modules/@npmcli/arborist/lib/relpath.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
const { relative } = require('node:path')
const relpath = (from, to) => relative(from, to).replace(/\\/g, '/')
module.exports = relpath

View File

@@ -0,0 +1,15 @@
// Sometimes we need to actually do a walk from the root, because you can
// have a cycle of deps that all depend on each other, but no path from root.
// Also, since the ideal tree is loaded from the shrinkwrap, it had extraneous
// flags set false that might now be actually extraneous, and dev/optional
// flags that are also now incorrect. This method sets all flags to true, so
// we can find the set that is actually extraneous.
module.exports = tree => {
for (const node of tree.inventory.values()) {
node.extraneous = true
node.dev = true
node.devOptional = true
node.peer = true
node.optional = true
}
}

View File

@@ -0,0 +1,19 @@
const crypto = require('node:crypto')
const { dirname, basename, resolve } = require('node:path')
// use sha1 because it's faster, and collisions extremely unlikely anyway
const pathSafeHash = s =>
crypto.createHash('sha1')
.update(s)
.digest('base64')
.replace(/[^a-zA-Z0-9]+/g, '')
.slice(0, 8)
const retirePath = from => {
const d = dirname(from)
const b = basename(from)
const hash = pathSafeHash(from)
return resolve(d, `.${b}-${hash}`)
}
module.exports = retirePath

1181
package/node_modules/@npmcli/arborist/lib/shrinkwrap.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,74 @@
const signals = require('./signals.js')
// for testing, expose the process being used
module.exports = Object.assign(fn => setup(fn), { process })
// do all of this in a setup function so that we can call it
// multiple times for multiple reifies that might be going on.
// Otherwise, Arborist.reify() is a global action, which is a
// new constraint we'd be adding with this behavior.
const setup = fn => {
const { process } = module.exports
const sigListeners = { loaded: false }
const unload = () => {
if (!sigListeners.loaded) {
return
}
for (const sig of signals) {
try {
process.removeListener(sig, sigListeners[sig])
} catch {
// ignore errors
}
}
process.removeListener('beforeExit', onBeforeExit)
sigListeners.loaded = false
}
const onBeforeExit = () => {
// this trick ensures that we exit with the same signal we caught
// Ie, if you press ^C and npm gets a SIGINT, we'll do the rollback
// and then exit with a SIGINT signal once we've removed the handler.
// The timeout is there because signals are asynchronous, so we need
// the process to NOT exit on its own, which means we have to have
// something keeping the event loop looping. Hence this hack.
unload()
process.kill(process.pid, signalReceived)
setTimeout(() => {}, 500)
}
let signalReceived = null
const listener = (sig, fn) => () => {
signalReceived = sig
// if we exit normally, but caught a signal which would have been fatal,
// then re-send it once we're done with whatever cleanup we have to do.
unload()
if (process.listeners(sig).length < 1) {
process.once('beforeExit', onBeforeExit)
}
fn({ signal: sig })
}
// do the actual loading here
for (const sig of signals) {
sigListeners[sig] = listener(sig, fn)
const max = process.getMaxListeners()
try {
// if we call this a bunch of times, avoid triggering the warning
const { length } = process.listeners(sig)
if (length >= max) {
process.setMaxListeners(length + 1)
}
process.on(sig, sigListeners[sig])
} catch {
// ignore errors
}
}
sigListeners.loaded = true
return unload
}

58
package/node_modules/@npmcli/arborist/lib/signals.js generated vendored Normal file
View File

@@ -0,0 +1,58 @@
// copied from signal-exit
// This is not the set of all possible signals.
//
// It IS, however, the set of all signals that trigger
// an exit on either Linux or BSD systems. Linux is a
// superset of the signal names supported on BSD, and
// the unknown signals just fail to register, so we can
// catch that easily enough.
//
// Don't bother with SIGKILL. It's uncatchable, which
// means that we can't fire any callbacks anyway.
//
// If a user does happen to register a handler on a non-
// fatal signal like SIGWINCH or something, and then
// exit, it'll end up firing `process.emit('exit')`, so
// the handler will be fired anyway.
//
// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised
// artificially, inherently leave the process in a
// state from which it is not safe to try and enter JS
// listeners.
const platform = global.__ARBORIST_FAKE_PLATFORM__ || process.platform
module.exports = [
'SIGABRT',
'SIGALRM',
'SIGHUP',
'SIGINT',
'SIGTERM',
]
if (platform !== 'win32') {
module.exports.push(
'SIGVTALRM',
'SIGXCPU',
'SIGXFSZ',
'SIGUSR2',
'SIGTRAP',
'SIGSYS',
'SIGQUIT',
'SIGIOT'
// should detect profiler and enable/disable accordingly.
// see #21
// 'SIGPROF'
)
}
if (platform === 'linux') {
module.exports.push(
'SIGIO',
'SIGPOLL',
'SIGPWR',
'SIGSTKFLT',
'SIGUNUSED'
)
}

View File

@@ -0,0 +1,34 @@
const npa = require('npm-package-arg')
// extracted from npm v6 lib/install/realize-shrinkwrap-specifier.js
const specFromLock = (name, lock, where) => {
try {
if (lock.version) {
const spec = npa.resolve(name, lock.version, where)
if (lock.integrity || spec.type === 'git') {
return spec
}
}
if (lock.from) {
// legacy metadata includes "from", but not integrity
const spec = npa.resolve(name, lock.from, where)
if (spec.registry && lock.version) {
return npa.resolve(name, lock.version, where)
} else if (!lock.resolved) {
return spec
}
}
if (lock.resolved) {
return npa.resolve(name, lock.resolved, where)
}
} catch {
// ignore errors
}
try {
return npa.resolve(name, lock.version, where)
} catch {
return {}
}
}
module.exports = specFromLock

86
package/node_modules/@npmcli/arborist/lib/tracker.js generated vendored Normal file
View File

@@ -0,0 +1,86 @@
const proggy = require('proggy')
module.exports = cls => class Tracker extends cls {
#progress = new Map()
#createTracker (key, name) {
const tracker = new proggy.Tracker(name ?? key)
tracker.on('done', () => this.#progress.delete(key))
this.#progress.set(key, tracker)
}
addTracker (section, subsection = null, key = null) {
if (section === null || section === undefined) {
this.#onError(`Tracker can't be null or undefined`)
}
if (key === null) {
key = subsection
}
const hasTracker = this.#progress.has(section)
const hasSubtracker = this.#progress.has(`${section}:${key}`)
if (hasTracker && subsection === null) {
// 0. existing tracker, no subsection
this.#onError(`Tracker "${section}" already exists`)
} else if (!hasTracker && subsection === null) {
// 1. no existing tracker, no subsection
// Create a new progress tracker
this.#createTracker(section)
} else if (!hasTracker && subsection !== null) {
// 2. no parent tracker and subsection
this.#onError(`Parent tracker "${section}" does not exist`)
} else if (!hasTracker || !hasSubtracker) {
// 3. existing parent tracker, no subsection tracker
// Create a new subtracker and update parents
const parentTracker = this.#progress.get(section)
parentTracker.update(parentTracker.value, parentTracker.total + 1)
this.#createTracker(`${section}:${key}`, `${section}:${subsection}`)
}
// 4. existing parent tracker, existing subsection tracker
// skip it
}
finishTracker (section, subsection = null, key = null) {
if (section === null || section === undefined) {
this.#onError(`Tracker can't be null or undefined`)
}
if (key === null) {
key = subsection
}
const hasTracker = this.#progress.has(section)
const hasSubtracker = this.#progress.has(`${section}:${key}`)
// 0. parent tracker exists, no subsection
// Finish parent tracker and remove from this.#progress
if (hasTracker && subsection === null) {
// check if parent tracker does
// not have any remaining children
const keys = this.#progress.keys()
for (const key of keys) {
if (key.match(new RegExp(section + ':'))) {
this.finishTracker(section, key)
}
}
// remove parent tracker
this.#progress.get(section).finish()
} else if (!hasTracker && subsection === null) {
// 1. no existing parent tracker, no subsection
this.#onError(`Tracker "${section}" does not exist`)
} else if (!hasTracker || hasSubtracker) {
// 2. subtracker exists
// Finish subtracker and remove from this.#progress
const parentTracker = this.#progress.get(section)
parentTracker.update(parentTracker.value + 1)
this.#progress.get(`${section}:${key}`).finish()
}
// 3. existing parent tracker, no subsection
}
#onError (msg) {
throw new Error(msg)
}
}

155
package/node_modules/@npmcli/arborist/lib/tree-check.js generated vendored Normal file
View File

@@ -0,0 +1,155 @@
const debug = require('./debug.js')
const checkTree = (tree, checkUnreachable = true) => {
const log = [['START TREE CHECK', tree.path]]
// this can only happen in tests where we have a "tree" object
// that isn't actually a tree.
if (!tree.root || !tree.root.inventory) {
return tree
}
const { inventory } = tree.root
const seen = new Set()
const check = (node, via = tree, viaType = 'self') => {
log.push([
'CHECK',
node && node.location,
via && via.location,
viaType,
'seen=' + seen.has(node),
'promise=' + !!(node && node.then),
'root=' + !!(node && node.isRoot),
])
if (!node || seen.has(node) || node.then) {
return
}
seen.add(node)
if (node.isRoot && node !== tree.root) {
throw Object.assign(new Error('double root'), {
node: node.path,
realpath: node.realpath,
tree: tree.path,
root: tree.root.path,
via: via.path,
viaType,
log,
})
}
if (node.root !== tree.root) {
throw Object.assign(new Error('node from other root in tree'), {
node: node.path,
realpath: node.realpath,
tree: tree.path,
root: tree.root.path,
via: via.path,
viaType,
otherRoot: node.root && node.root.path,
log,
})
}
if (!node.isRoot && node.inventory.size !== 0) {
throw Object.assign(new Error('non-root has non-zero inventory'), {
node: node.path,
tree: tree.path,
root: tree.root.path,
via: via.path,
viaType,
inventory: [...node.inventory.values()].map(node =>
[node.path, node.location]),
log,
})
}
if (!node.isRoot && !inventory.has(node) && !node.dummy) {
throw Object.assign(new Error('not in inventory'), {
node: node.path,
tree: tree.path,
root: tree.root.path,
via: via.path,
viaType,
log,
})
}
const devEdges = [...node.edgesOut.values()].filter(e => e.dev)
if (!node.isTop && devEdges.length) {
throw Object.assign(new Error('dev edges on non-top node'), {
node: node.path,
tree: tree.path,
root: tree.root.path,
via: via.path,
viaType,
devEdges: devEdges.map(e => [e.type, e.name, e.spec, e.error]),
log,
})
}
if (node.path === tree.root.path && node !== tree.root && !tree.root.isLink) {
throw Object.assign(new Error('node with same path as root'), {
node: node.path,
tree: tree.path,
root: tree.root.path,
via: via.path,
viaType,
log,
})
}
if (!node.isLink && node.path !== node.realpath) {
throw Object.assign(new Error('non-link with mismatched path/realpath'), {
node: node.path,
tree: tree.path,
realpath: node.realpath,
root: tree.root.path,
via: via.path,
viaType,
log,
})
}
const { parent, fsParent, target } = node
check(parent, node, 'parent')
check(fsParent, node, 'fsParent')
check(target, node, 'target')
log.push(['CHILDREN', node.location, ...node.children.keys()])
for (const kid of node.children.values()) {
check(kid, node, 'children')
}
for (const kid of node.fsChildren) {
check(kid, node, 'fsChildren')
}
for (const link of node.linksIn) {
check(link, node, 'linksIn')
}
for (const top of node.tops) {
check(top, node, 'tops')
}
log.push(['DONE', node.location])
}
check(tree)
if (checkUnreachable) {
for (const node of inventory.values()) {
if (!seen.has(node) && node !== tree.root) {
throw Object.assign(new Error('unreachable in inventory'), {
node: node.path,
realpath: node.realpath,
location: node.location,
root: tree.root.path,
tree: tree.path,
log,
})
}
}
}
return tree
}
// should only ever run this check in debug mode
module.exports = tree => tree
debug(() => module.exports = checkTree)

View File

@@ -0,0 +1,47 @@
const semver = require('semver')
const { basename } = require('node:path')
const { URL } = require('node:url')
module.exports = (name, tgz) => {
const base = basename(tgz)
if (!base.endsWith('.tgz')) {
return null
}
if (tgz.startsWith('http:/') || tgz.startsWith('https:/')) {
const u = new URL(tgz)
// registry url? check for most likely pattern.
// either /@foo/bar/-/bar-1.2.3.tgz or
// /foo/-/foo-1.2.3.tgz, and fall through to
// basename checking. Note that registries can
// be mounted below the root url, so /a/b/-/x/y/foo/-/foo-1.2.3.tgz
// is a potential option.
const tfsplit = u.pathname.slice(1).split('/-/')
if (tfsplit.length > 1) {
const afterTF = tfsplit.pop()
if (afterTF === base) {
const pre = tfsplit.pop()
const preSplit = pre.split(/\/|%2f/i)
const project = preSplit.pop()
const scope = preSplit.pop()
return versionFromBaseScopeName(base, scope, project)
}
}
}
const split = name.split(/\/|%2f/i)
const project = split.pop()
const scope = split.pop()
return versionFromBaseScopeName(base, scope, project)
}
const versionFromBaseScopeName = (base, scope, name) => {
if (!base.startsWith(name + '-')) {
return null
}
const parsed = semver.parse(base.substring(name.length + 1, base.length - 4))
return parsed ? {
name: scope && scope.charAt(0) === '@' ? `${scope}/${name}` : name,
version: parsed.version,
} : null
}

214
package/node_modules/@npmcli/arborist/lib/vuln.js generated vendored Normal file
View File

@@ -0,0 +1,214 @@
// An object representing a vulnerability either as the result of an
// advisory or due to the package in question depending exclusively on
// vulnerable versions of a dep.
//
// - name: package name
// - range: Set of vulnerable versions
// - nodes: Set of nodes affected
// - effects: Set of vulns triggered by this one
// - advisories: Set of advisories (including metavulns) causing this vuln.
// All of the entries in via are vulnerability objects returned by
// @npmcli/metavuln-calculator
// - via: dependency vulns which cause this one
const { satisfies, simplifyRange } = require('semver')
const semverOpt = { loose: true, includePrerelease: true }
const localeCompare = require('@isaacs/string-locale-compare')('en')
const npa = require('npm-package-arg')
const severities = new Map([
['info', 0], [0, 'info'],
['low', 1], [1, 'low'],
['moderate', 2], [2, 'moderate'],
['high', 3], [3, 'high'],
['critical', 4], [4, 'critical'],
[null, -1], [-1, null],
])
class Vuln {
#range = null
#simpleRange = null
// assume a fix is available unless it hits a top node
// that locks it in place, setting this false or {isSemVerMajor, version}.
#fixAvailable = true
constructor ({ name, advisory }) {
this.name = name
this.via = new Set()
this.advisories = new Set()
this.severity = null
this.effects = new Set()
this.topNodes = new Set()
this.nodes = new Set()
this.addAdvisory(advisory)
this.packument = advisory.packument
this.versions = advisory.versions
}
get fixAvailable () {
return this.#fixAvailable
}
set fixAvailable (f) {
this.#fixAvailable = f
// if there's a fix available for this at the top level, it means that
// it will also fix the vulns that led to it being there. to get there,
// we set the vias to the most "strict" of fix availables.
// - false: no fix is available
// - {name, version, isSemVerMajor} fix requires -f, is semver major
// - {name, version} fix requires -f, not semver major
// - true: fix does not require -f
// TODO: duped entries may require different fixes but the current
// structure does not support this, so the case were a top level fix
// corrects a duped entry may mean you have to run fix more than once
for (const v of this.via) {
// don't blow up on loops
if (v.fixAvailable === f) {
continue
}
if (f === false) {
v.fixAvailable = f
} else if (v.fixAvailable === true) {
v.fixAvailable = f
} else if (typeof f === 'object' && (
typeof v.fixAvailable !== 'object' || !v.fixAvailable.isSemVerMajor)) {
v.fixAvailable = f
}
}
}
get isDirect () {
for (const node of this.nodes.values()) {
for (const edge of node.edgesIn) {
if (edge.from.isProjectRoot || edge.from.isWorkspace) {
return true
}
}
}
return false
}
testSpec (spec) {
const specObj = npa(spec)
if (!specObj.registry) {
return true
}
if (specObj.subSpec) {
spec = specObj.subSpec.rawSpec
}
for (const v of this.versions) {
if (satisfies(v, spec) && !satisfies(v, this.range, semverOpt)) {
return false
}
}
return true
}
toJSON () {
return {
name: this.name,
severity: this.severity,
isDirect: this.isDirect,
// just loop over the advisories, since via is only Vuln objects,
// and calculated advisories have all the info we need
via: [...this.advisories].map(v => v.type === 'metavuln' ? v.dependency : {
...v,
versions: undefined,
vulnerableVersions: undefined,
id: undefined,
}).sort((a, b) =>
localeCompare(String(a.source || a), String(b.source || b))),
effects: [...this.effects].map(v => v.name).sort(localeCompare),
range: this.simpleRange,
nodes: [...this.nodes].map(n => n.location).sort(localeCompare),
fixAvailable: this.#fixAvailable,
}
}
addVia (v) {
this.via.add(v)
v.effects.add(this)
// call the setter since we might add vias _after_ setting fixAvailable
this.fixAvailable = this.fixAvailable
}
deleteVia (v) {
this.via.delete(v)
v.effects.delete(this)
}
deleteAdvisory (advisory) {
this.advisories.delete(advisory)
// make sure we have the max severity of all the vulns causing this one
this.severity = null
this.#range = null
this.#simpleRange = null
// refresh severity
for (const advisory of this.advisories) {
this.addAdvisory(advisory)
}
// remove any effects that are no longer relevant
const vias = new Set([...this.advisories].map(a => a.dependency))
for (const via of this.via) {
if (!vias.has(via.name)) {
this.deleteVia(via)
}
}
}
addAdvisory (advisory) {
this.advisories.add(advisory)
const sev = severities.get(advisory.severity)
this.#range = null
this.#simpleRange = null
if (sev > severities.get(this.severity)) {
this.severity = advisory.severity
}
}
get range () {
if (!this.#range) {
this.#range = [...this.advisories].map(v => v.range).join(' || ')
}
return this.#range
}
get simpleRange () {
if (this.#simpleRange && this.#simpleRange === this.#range) {
return this.#simpleRange
}
const versions = [...this.advisories][0].versions
const range = this.range
this.#simpleRange = simplifyRange(versions, range, semverOpt)
this.#range = this.#simpleRange
return this.#simpleRange
}
isVulnerable (node) {
if (this.nodes.has(node)) {
return true
}
const { version } = node.package
if (!version) {
return false
}
for (const v of this.advisories) {
if (v.testVersion(version)) {
this.nodes.add(node)
return true
}
}
return false
}
}
module.exports = Vuln

377
package/node_modules/@npmcli/arborist/lib/yarn-lock.js generated vendored Normal file
View File

@@ -0,0 +1,377 @@
// parse a yarn lock file
// basic format
//
// <request spec>[, <request spec> ...]:
// <key> <value>
// <subkey>:
// <key> <value>
//
// Assume that any key or value might be quoted, though that's only done
// in practice if certain chars are in the string. When writing back, we follow
// Yarn's rules for quoting, to cause minimal friction.
//
// The data format would support nested objects, but at this time, it
// appears that yarn does not use that for anything, so in the interest
// of a simpler parser algorithm, this implementation only supports a
// single layer of sub objects.
//
// This doesn't deterministically define the shape of the tree, and so
// cannot be used (on its own) for Arborist.loadVirtual.
// But it can give us resolved, integrity, and version, which is useful
// for Arborist.loadActual and for building the ideal tree.
//
// At the very least, when a yarn.lock file is present, we update it
// along the way, and save it back in Shrinkwrap.save()
//
// NIHing this rather than using @yarnpkg/lockfile because that module
// is an impenetrable 10kloc of webpack flow output, which is overkill
// for something relatively simple and tailored to Arborist's use case.
const localeCompare = require('@isaacs/string-locale-compare')('en')
const consistentResolve = require('./consistent-resolve.js')
const { dirname } = require('node:path')
const { breadth } = require('treeverse')
// Sort Yarn entries respecting the yarn.lock sort order
const yarnEntryPriorities = {
name: 1,
version: 2,
uid: 3,
resolved: 4,
integrity: 5,
registry: 6,
dependencies: 7,
}
const priorityThenLocaleCompare = (a, b) => {
if (!yarnEntryPriorities[a] && !yarnEntryPriorities[b]) {
return localeCompare(a, b)
}
/* istanbul ignore next */
return (yarnEntryPriorities[a] || 100) > (yarnEntryPriorities[b] || 100) ? 1 : -1
}
const quoteIfNeeded = val => {
if (
typeof val === 'boolean' ||
typeof val === 'number' ||
val.startsWith('true') ||
val.startsWith('false') ||
/[:\s\n\\",[\]]/g.test(val) ||
!/^[a-zA-Z]/g.test(val)
) {
return JSON.stringify(val)
}
return val
}
// sort a key/value object into a string of JSON stringified keys and vals
const sortKV = obj => Object.keys(obj)
.sort(localeCompare)
.map(k => ` ${quoteIfNeeded(k)} ${quoteIfNeeded(obj[k])}`)
.join('\n')
// for checking against previous entries
const match = (p, n) =>
p.integrity && n.integrity ? p.integrity === n.integrity
: p.resolved && n.resolved ? p.resolved === n.resolved
: p.version && n.version ? p.version === n.version
: true
const prefix =
`# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
`
const nullSymbol = Symbol('null')
class YarnLock {
static parse (data) {
return new YarnLock().parse(data)
}
static fromTree (tree) {
return new YarnLock().fromTree(tree)
}
constructor () {
this.entries = null
this.endCurrent()
}
endCurrent () {
this.current = null
this.subkey = nullSymbol
}
parse (data) {
const ENTRY_START = /^[^\s].*:$/
const SUBKEY = /^ {2}[^\s]+:$/
const SUBVAL = /^ {4}[^\s]+ .+$/
const METADATA = /^ {2}[^\s]+ .+$/
this.entries = new Map()
this.current = null
const linere = /([^\r\n]*)\r?\n/gm
let match
let lineNum = 0
if (!/\n$/.test(data)) {
data += '\n'
}
while (match = linere.exec(data)) {
const line = match[1]
lineNum++
if (line.charAt(0) === '#') {
continue
}
if (line === '') {
this.endCurrent()
continue
}
if (ENTRY_START.test(line)) {
this.endCurrent()
const specs = this.splitQuoted(line.slice(0, -1), /, */)
this.current = new YarnLockEntry(specs)
specs.forEach(spec => this.entries.set(spec, this.current))
continue
}
if (SUBKEY.test(line)) {
this.subkey = line.slice(2, -1)
this.current[this.subkey] = {}
continue
}
if (SUBVAL.test(line) && this.current && this.current[this.subkey]) {
const subval = this.splitQuoted(line.trimLeft(), ' ')
if (subval.length === 2) {
this.current[this.subkey][subval[0]] = subval[1]
continue
}
}
// any other metadata
if (METADATA.test(line) && this.current) {
const metadata = this.splitQuoted(line.trimLeft(), ' ')
if (metadata.length === 2) {
// strip off the legacy shasum hashes
if (metadata[0] === 'resolved') {
metadata[1] = metadata[1].replace(/#.*/, '')
}
this.current[metadata[0]] = metadata[1]
continue
}
}
throw Object.assign(new Error('invalid or corrupted yarn.lock file'), {
position: match.index,
content: match[0],
line: lineNum,
})
}
this.endCurrent()
return this
}
splitQuoted (str, delim) {
// a,"b,c",d"e,f => ['a','"b','c"','d"e','f'] => ['a','b,c','d"e','f']
const split = str.split(delim)
const out = []
let o = 0
for (let i = 0; i < split.length; i++) {
const chunk = split[i]
if (/^".*"$/.test(chunk)) {
out[o++] = chunk.trim().slice(1, -1)
} else if (/^"/.test(chunk)) {
let collect = chunk.trimLeft().slice(1)
while (++i < split.length) {
const n = split[i]
// something that is not a slash, followed by an even number
// of slashes then a " then end => ending on an unescaped "
if (/[^\\](\\\\)*"$/.test(n)) {
collect += n.trimRight().slice(0, -1)
break
} else {
collect += n
}
}
out[o++] = collect
} else {
out[o++] = chunk.trim()
}
}
return out
}
toString () {
return prefix + [...new Set([...this.entries.values()])]
.map(e => e.toString())
.sort((a, b) => localeCompare(a.replace(/"/g, ''), b.replace(/"/g, ''))).join('\n\n') + '\n'
}
fromTree (tree) {
this.entries = new Map()
// walk the tree in a deterministic order, breadth-first, alphabetical
breadth({
tree,
visit: node => this.addEntryFromNode(node),
getChildren: node => [...node.children.values(), ...node.fsChildren]
.sort((a, b) => a.depth - b.depth || localeCompare(a.name, b.name)),
})
return this
}
addEntryFromNode (node) {
const specs = [...node.edgesIn]
.map(e => `${node.name}@${e.spec}`)
.sort(localeCompare)
// Note:
// yarn will do excessive duplication in a case like this:
// root -> (x@1.x, y@1.x, z@1.x)
// y@1.x -> (x@1.1, z@2.x)
// z@1.x -> ()
// z@2.x -> (x@1.x)
//
// where x@1.2 exists, because the "x@1.x" spec will *always* resolve
// to x@1.2, which doesn't work for y's dep on x@1.1, so you'll get this:
//
// root
// +-- x@1.2.0
// +-- y
// | +-- x@1.1.0
// | +-- z@2
// | +-- x@1.2.0
// +-- z@1
//
// instead of this more deduped tree that arborist builds by default:
//
// root
// +-- x@1.2.0 (dep is x@1.x, from root)
// +-- y
// | +-- x@1.1.0
// | +-- z@2 (dep on x@1.x deduped to x@1.1.0 under y)
// +-- z@1
//
// In order to not create an invalid yarn.lock file with conflicting
// entries, AND not tell yarn to create an invalid tree, we need to
// ignore the x@1.x spec coming from z, since it's already in the entries.
//
// So, if the integrity and resolved don't match a previous entry, skip it.
// We call this method on shallower nodes first, so this is fine.
const n = this.entryDataFromNode(node)
let priorEntry = null
const newSpecs = []
for (const s of specs) {
const prev = this.entries.get(s)
// no previous entry for this spec at all, so it's new
if (!prev) {
// if we saw a match already, then assign this spec to it as well
if (priorEntry) {
priorEntry.addSpec(s)
} else {
newSpecs.push(s)
}
continue
}
const m = match(prev, n)
// there was a prior entry, but a different thing. skip this one
if (!m) {
continue
}
// previous matches, but first time seeing it, so already has this spec.
// go ahead and add all the previously unseen specs, though
if (!priorEntry) {
priorEntry = prev
for (const s of newSpecs) {
priorEntry.addSpec(s)
this.entries.set(s, priorEntry)
}
newSpecs.length = 0
continue
}
// have a prior entry matching n, and matching the prev we just saw
// add the spec to it
priorEntry.addSpec(s)
this.entries.set(s, priorEntry)
}
// if we never found a matching prior, then this is a whole new thing
if (!priorEntry) {
const entry = Object.assign(new YarnLockEntry(newSpecs), n)
for (const s of newSpecs) {
this.entries.set(s, entry)
}
} else {
// pick up any new info that we got for this node, so that we can
// decorate with integrity/resolved/etc.
Object.assign(priorEntry, n)
}
}
entryDataFromNode (node) {
const n = {}
if (node.package.dependencies) {
n.dependencies = node.package.dependencies
}
if (node.package.optionalDependencies) {
n.optionalDependencies = node.package.optionalDependencies
}
if (node.version) {
n.version = node.version
}
if (node.resolved) {
n.resolved = consistentResolve(
node.resolved,
node.isLink ? dirname(node.path) : node.path,
node.root.path,
true
)
}
if (node.integrity) {
n.integrity = node.integrity
}
return n
}
static get Entry () {
return YarnLockEntry
}
}
class YarnLockEntry {
#specs
constructor (specs) {
this.#specs = new Set(specs)
this.resolved = null
this.version = null
this.integrity = null
this.dependencies = null
this.optionalDependencies = null
}
toString () {
// sort objects to the bottom, then alphabetical
return ([...this.#specs]
.sort(localeCompare)
.map(quoteIfNeeded).join(', ') +
':\n' +
Object.getOwnPropertyNames(this)
.filter(prop => this[prop] !== null)
.sort(priorityThenLocaleCompare)
.map(prop =>
typeof this[prop] !== 'object'
? ` ${prop} ${prop === 'integrity' ? this[prop] : JSON.stringify(this[prop])}\n`
: Object.keys(this[prop]).length === 0 ? ''
: ` ${prop}:\n` + sortKV(this[prop]) + '\n')
.join('')).trim()
}
addSpec (spec) {
this.#specs.add(spec)
}
}
module.exports = YarnLock