This commit is contained in:
2025-08-18 23:06:34 +08:00
parent 0bc04fb659
commit ed18af0cad
1926 changed files with 275098 additions and 0 deletions

18
package/node_modules/@npmcli/package-json/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,18 @@
ISC License
Copyright GitHub Inc.
Permission to use, copy, modify, and/or distribute this
software for any purpose with or without fee is hereby
granted, provided that the above copyright notice and this
permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
USE OR PERFORMANCE OF THIS SOFTWARE.

278
package/node_modules/@npmcli/package-json/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,278 @@
const { readFile, writeFile } = require('node:fs/promises')
const { resolve } = require('node:path')
const parseJSON = require('json-parse-even-better-errors')
const updateDeps = require('./update-dependencies.js')
const updateScripts = require('./update-scripts.js')
const updateWorkspaces = require('./update-workspaces.js')
const normalize = require('./normalize.js')
const { read, parse } = require('./read-package.js')
// a list of handy specialized helper functions that take
// care of special cases that are handled by the npm cli
const knownSteps = new Set([
updateDeps,
updateScripts,
updateWorkspaces,
])
// list of all keys that are handled by "knownSteps" helpers
const knownKeys = new Set([
...updateDeps.knownKeys,
'scripts',
'workspaces',
])
class PackageJson {
static normalizeSteps = Object.freeze([
'_id',
'_attributes',
'bundledDependencies',
'bundleDependencies',
'optionalDedupe',
'scripts',
'funding',
'bin',
])
// npm pkg fix
static fixSteps = Object.freeze([
'binRefs',
'bundleDependencies',
'bundleDependenciesFalse',
'fixNameField',
'fixVersionField',
'fixRepositoryField',
'fixDependencies',
'devDependencies',
'scriptpath',
])
static prepareSteps = Object.freeze([
'_id',
'_attributes',
'bundledDependencies',
'bundleDependencies',
'bundleDependenciesDeleteFalse',
'gypfile',
'serverjs',
'scriptpath',
'authors',
'readme',
'mans',
'binDir',
'gitHead',
'fillTypes',
'normalizeData',
'binRefs',
])
// create a new empty package.json, so we can save at the given path even
// though we didn't start from a parsed file
static async create (path, opts = {}) {
const p = new PackageJson()
await p.create(path)
if (opts.data) {
return p.update(opts.data)
}
return p
}
// Loads a package.json at given path and JSON parses
static async load (path, opts = {}) {
const p = new PackageJson()
// Avoid try/catch if we aren't going to create
if (!opts.create) {
return p.load(path)
}
try {
return await p.load(path)
} catch (err) {
if (!err.message.startsWith('Could not read package.json')) {
throw err
}
return await p.create(path)
}
}
// npm pkg fix
static async fix (path, opts) {
const p = new PackageJson()
await p.load(path, true)
return p.fix(opts)
}
// read-package-json compatible behavior
static async prepare (path, opts) {
const p = new PackageJson()
await p.load(path, true)
return p.prepare(opts)
}
// read-package-json-fast compatible behavior
static async normalize (path, opts) {
const p = new PackageJson()
await p.load(path)
return p.normalize(opts)
}
#path
#manifest
#readFileContent = ''
#canSave = true
// Load content from given path
async load (path, parseIndex) {
this.#path = path
let parseErr
try {
this.#readFileContent = await read(this.filename)
} catch (err) {
if (!parseIndex) {
throw err
}
parseErr = err
}
if (parseErr) {
const indexFile = resolve(this.path, 'index.js')
let indexFileContent
try {
indexFileContent = await readFile(indexFile, 'utf8')
} catch (err) {
throw parseErr
}
try {
this.fromComment(indexFileContent)
} catch (err) {
throw parseErr
}
// This wasn't a package.json so prevent saving
this.#canSave = false
return this
}
return this.fromJSON(this.#readFileContent)
}
// Load data from a JSON string/buffer
fromJSON (data) {
this.#manifest = parse(data)
return this
}
fromContent (data) {
this.#manifest = data
this.#canSave = false
return this
}
// Load data from a comment
// /**package { "name": "foo", "version": "1.2.3", ... } **/
fromComment (data) {
data = data.split(/^\/\*\*package(?:\s|$)/m)
if (data.length < 2) {
throw new Error('File has no package in comments')
}
data = data[1]
data = data.split(/\*\*\/$/m)
if (data.length < 2) {
throw new Error('File has no package in comments')
}
data = data[0]
data = data.replace(/^\s*\*/mg, '')
this.#manifest = parseJSON(data)
return this
}
get content () {
return this.#manifest
}
get path () {
return this.#path
}
get filename () {
if (this.path) {
return resolve(this.path, 'package.json')
}
return undefined
}
create (path) {
this.#path = path
this.#manifest = {}
return this
}
// This should be the ONLY way to set content in the manifest
update (content) {
if (!this.content) {
throw new Error('Can not update without content. Please `load` or `create`')
}
for (const step of knownSteps) {
this.#manifest = step({ content, originalContent: this.content })
}
// unknown properties will just be overwitten
for (const [key, value] of Object.entries(content)) {
if (!knownKeys.has(key)) {
this.content[key] = value
}
}
return this
}
async save () {
if (!this.#canSave) {
throw new Error('No package.json to save to')
}
const {
[Symbol.for('indent')]: indent,
[Symbol.for('newline')]: newline,
} = this.content
const format = indent === undefined ? ' ' : indent
const eol = newline === undefined ? '\n' : newline
const fileContent = `${
JSON.stringify(this.content, null, format)
}\n`
.replace(/\n/g, eol)
if (fileContent.trim() !== this.#readFileContent.trim()) {
return await writeFile(this.filename, fileContent)
}
}
async normalize (opts = {}) {
if (!opts.steps) {
opts.steps = this.constructor.normalizeSteps
}
await normalize(this, opts)
return this
}
async prepare (opts = {}) {
if (!opts.steps) {
opts.steps = this.constructor.prepareSteps
}
await normalize(this, opts)
return this
}
async fix (opts = {}) {
// This one is not overridable
opts.steps = this.constructor.fixSteps
await normalize(this, opts)
return this
}
}
module.exports = PackageJson

View File

@@ -0,0 +1,619 @@
const valid = require('semver/functions/valid')
const clean = require('semver/functions/clean')
const fs = require('node:fs/promises')
const path = require('node:path')
const { log } = require('proc-log')
/**
* @type {import('hosted-git-info')}
*/
let _hostedGitInfo
function lazyHostedGitInfo () {
if (!_hostedGitInfo) {
_hostedGitInfo = require('hosted-git-info')
}
return _hostedGitInfo
}
/**
* @type {import('glob').glob}
*/
let _glob
function lazyLoadGlob () {
if (!_glob) {
_glob = require('glob').glob
}
return _glob
}
// used to be npm-normalize-package-bin
function normalizePackageBin (pkg, changes) {
if (pkg.bin) {
if (typeof pkg.bin === 'string' && pkg.name) {
changes?.push('"bin" was converted to an object')
pkg.bin = { [pkg.name]: pkg.bin }
} else if (Array.isArray(pkg.bin)) {
changes?.push('"bin" was converted to an object')
pkg.bin = pkg.bin.reduce((acc, k) => {
acc[path.basename(k)] = k
return acc
}, {})
}
if (typeof pkg.bin === 'object') {
for (const binKey in pkg.bin) {
if (typeof pkg.bin[binKey] !== 'string') {
delete pkg.bin[binKey]
changes?.push(`removed invalid "bin[${binKey}]"`)
continue
}
const base = path.basename(secureAndUnixifyPath(binKey))
if (!base) {
delete pkg.bin[binKey]
changes?.push(`removed invalid "bin[${binKey}]"`)
continue
}
const binTarget = secureAndUnixifyPath(pkg.bin[binKey])
if (!binTarget) {
delete pkg.bin[binKey]
changes?.push(`removed invalid "bin[${binKey}]"`)
continue
}
if (base !== binKey) {
delete pkg.bin[binKey]
changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`)
}
if (binTarget !== pkg.bin[binKey]) {
changes?.push(`"bin[${base}]" script name was cleaned`)
}
pkg.bin[base] = binTarget
}
if (Object.keys(pkg.bin).length === 0) {
changes?.push('empty "bin" was removed')
delete pkg.bin
}
return pkg
}
}
delete pkg.bin
}
function normalizePackageMan (pkg, changes) {
if (pkg.man) {
const mans = []
for (const man of (Array.isArray(pkg.man) ? pkg.man : [pkg.man])) {
if (typeof man !== 'string') {
changes?.push(`removed invalid "man [${man}]"`)
} else {
mans.push(secureAndUnixifyPath(man))
}
}
if (!mans.length) {
changes?.push('empty "man" was removed')
} else {
pkg.man = mans
return pkg
}
}
delete pkg.man
}
function isCorrectlyEncodedName (spec) {
return !spec.match(/[/@\s+%:]/) &&
spec === encodeURIComponent(spec)
}
function isValidScopedPackageName (spec) {
if (spec.charAt(0) !== '@') {
return false
}
const rest = spec.slice(1).split('/')
if (rest.length !== 2) {
return false
}
return rest[0] && rest[1] &&
rest[0] === encodeURIComponent(rest[0]) &&
rest[1] === encodeURIComponent(rest[1])
}
function unixifyPath (ref) {
return ref.replace(/\\|:/g, '/')
}
function securePath (ref) {
const secured = path.join('.', path.join('/', unixifyPath(ref)))
return secured.startsWith('.') ? '' : secured
}
function secureAndUnixifyPath (ref) {
return unixifyPath(securePath(ref))
}
// We don't want the `changes` array in here by default because this is a hot
// path for parsing packuments during install. So the calling method passes it
// in if it wants to track changes.
const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) => {
if (!pkg.content) {
throw new Error('Can not normalize without content')
}
const data = pkg.content
const scripts = data.scripts || {}
const pkgId = `${data.name ?? ''}@${data.version ?? ''}`
// name and version are load bearing so we have to clean them up first
if (steps.includes('fixNameField') || steps.includes('normalizeData')) {
if (!data.name && !strict) {
changes?.push('Missing "name" field was set to an empty string')
data.name = ''
} else {
if (typeof data.name !== 'string') {
throw new Error('name field must be a string.')
}
if (!strict) {
const name = data.name.trim()
if (data.name !== name) {
changes?.push(`Whitespace was trimmed from "name"`)
data.name = name
}
}
if (data.name.startsWith('.') ||
!(isValidScopedPackageName(data.name) || isCorrectlyEncodedName(data.name)) ||
(strict && (!allowLegacyCase) && data.name !== data.name.toLowerCase()) ||
data.name.toLowerCase() === 'node_modules' ||
data.name.toLowerCase() === 'favicon.ico') {
throw new Error('Invalid name: ' + JSON.stringify(data.name))
}
}
}
if (steps.includes('fixVersionField') || steps.includes('normalizeData')) {
// allow "loose" semver 1.0 versions in non-strict mode
// enforce strict semver 2.0 compliance in strict mode
const loose = !strict
if (!data.version) {
data.version = ''
} else {
if (!valid(data.version, loose)) {
throw new Error(`Invalid version: "${data.version}"`)
}
const version = clean(data.version, loose)
if (version !== data.version) {
changes?.push(`"version" was cleaned and set to "${version}"`)
data.version = version
}
}
}
// remove attributes that start with "_"
if (steps.includes('_attributes')) {
for (const key in data) {
if (key.startsWith('_')) {
changes?.push(`"${key}" was removed`)
delete pkg.content[key]
}
}
}
// build the "_id" attribute
if (steps.includes('_id')) {
if (data.name && data.version) {
changes?.push(`"_id" was set to ${pkgId}`)
data._id = pkgId
}
}
// fix bundledDependencies typo
// normalize bundleDependencies
if (steps.includes('bundledDependencies')) {
if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) {
data.bundleDependencies = data.bundledDependencies
}
changes?.push(`Deleted incorrect "bundledDependencies"`)
delete data.bundledDependencies
}
// expand "bundleDependencies: true or translate from object"
if (steps.includes('bundleDependencies')) {
const bd = data.bundleDependencies
if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) {
changes?.push(`"bundleDependencies" was changed from "false" to "[]"`)
data.bundleDependencies = []
} else if (bd === true) {
changes?.push(`"bundleDependencies" was auto-populated from "dependencies"`)
data.bundleDependencies = Object.keys(data.dependencies || {})
} else if (bd && typeof bd === 'object') {
if (!Array.isArray(bd)) {
changes?.push(`"bundleDependencies" was changed from an object to an array`)
data.bundleDependencies = Object.keys(bd)
}
} else if ('bundleDependencies' in data) {
changes?.push(`"bundleDependencies" was removed`)
delete data.bundleDependencies
}
}
// it was once common practice to list deps both in optionalDependencies and
// in dependencies, to support npm versions that did not know about
// optionalDependencies. This is no longer a relevant need, so duplicating
// the deps in two places is unnecessary and excessive.
if (steps.includes('optionalDedupe')) {
if (data.dependencies &&
data.optionalDependencies && typeof data.optionalDependencies === 'object') {
for (const name in data.optionalDependencies) {
changes?.push(`optionalDependencies."${name}" was removed`)
delete data.dependencies[name]
}
if (!Object.keys(data.dependencies).length) {
changes?.push(`Empty "optionalDependencies" was removed`)
delete data.dependencies
}
}
}
// add "install" attribute if any "*.gyp" files exist
if (steps.includes('gypfile')) {
if (!scripts.install && !scripts.preinstall && data.gypfile !== false) {
const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path })
if (files.length) {
scripts.install = 'node-gyp rebuild'
data.scripts = scripts
data.gypfile = true
changes?.push(`"scripts.install" was set to "node-gyp rebuild"`)
changes?.push(`"gypfile" was set to "true"`)
}
}
}
// add "start" attribute if "server.js" exists
if (steps.includes('serverjs') && !scripts.start) {
try {
await fs.access(path.join(pkg.path, 'server.js'))
scripts.start = 'node server.js'
data.scripts = scripts
changes?.push('"scripts.start" was set to "node server.js"')
} catch {
// do nothing
}
}
// strip "node_modules/.bin" from scripts entries
// remove invalid scripts entries (non-strings)
if ((steps.includes('scripts') || steps.includes('scriptpath')) && data.scripts !== undefined) {
const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
if (typeof data.scripts === 'object') {
for (const name in data.scripts) {
if (typeof data.scripts[name] !== 'string') {
delete data.scripts[name]
changes?.push(`Invalid scripts."${name}" was removed`)
} else if (steps.includes('scriptpath') && spre.test(data.scripts[name])) {
data.scripts[name] = data.scripts[name].replace(spre, '')
changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`)
}
}
} else {
changes?.push(`Removed invalid "scripts"`)
delete data.scripts
}
}
if (steps.includes('funding')) {
if (data.funding && typeof data.funding === 'string') {
data.funding = { url: data.funding }
changes?.push(`"funding" was changed to an object with a url attribute`)
}
}
// populate "authors" attribute
if (steps.includes('authors') && !data.contributors) {
try {
const authorData = await fs.readFile(path.join(pkg.path, 'AUTHORS'), 'utf8')
const authors = authorData.split(/\r?\n/g)
.map(line => line.replace(/^\s*#.*$/, '').trim())
.filter(line => line)
data.contributors = authors
changes?.push('"contributors" was auto-populated with the contents of the "AUTHORS" file')
} catch {
// do nothing
}
}
// populate "readme" attribute
if (steps.includes('readme') && !data.readme) {
const mdre = /\.m?a?r?k?d?o?w?n?$/i
const files = await lazyLoadGlob()('{README,README.*}', {
cwd: pkg.path,
nocase: true,
mark: true,
})
let readmeFile
for (const file of files) {
// don't accept directories.
if (!file.endsWith(path.sep)) {
if (file.match(mdre)) {
readmeFile = file
break
}
if (file.endsWith('README')) {
readmeFile = file
}
}
}
if (readmeFile) {
const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8')
data.readme = readmeData
data.readmeFilename = readmeFile
changes?.push(`"readme" was set to the contents of ${readmeFile}`)
changes?.push(`"readmeFilename" was set to ${readmeFile}`)
}
if (!data.readme) {
// this.warn('missingReadme')
data.readme = 'ERROR: No README data found!'
}
}
// expand directories.man
if (steps.includes('mans')) {
if (data.directories?.man && !data.man) {
const manDir = secureAndUnixifyPath(data.directories.man)
const cwd = path.resolve(pkg.path, manDir)
const files = await lazyLoadGlob()('**/*.[0-9]', { cwd })
data.man = files.map(man =>
path.relative(pkg.path, path.join(cwd, man)).split(path.sep).join('/')
)
}
normalizePackageMan(data, changes)
}
if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) {
normalizePackageBin(data, changes)
}
// expand "directories.bin"
if (steps.includes('binDir') && data.directories?.bin && !data.bin) {
const binsDir = path.resolve(pkg.path, securePath(data.directories.bin))
const bins = await lazyLoadGlob()('**', { cwd: binsDir })
data.bin = bins.reduce((acc, binFile) => {
if (binFile && !binFile.startsWith('.')) {
const binName = path.basename(binFile)
acc[binName] = path.join(data.directories.bin, binFile)
}
return acc
}, {})
// *sigh*
normalizePackageBin(data, changes)
}
// populate "gitHead" attribute
if (steps.includes('gitHead') && !data.gitHead) {
const git = require('@npmcli/git')
const gitRoot = await git.find({ cwd: pkg.path, root })
let head
if (gitRoot) {
try {
head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8')
} catch (err) {
// do nothing
}
}
let headData
if (head) {
if (head.startsWith('ref: ')) {
const headRef = head.replace(/^ref: /, '').trim()
const headFile = path.resolve(gitRoot, '.git', headRef)
try {
headData = await fs.readFile(headFile, 'utf8')
headData = headData.replace(/^ref: /, '').trim()
} catch (err) {
// do nothing
}
if (!headData) {
const packFile = path.resolve(gitRoot, '.git/packed-refs')
try {
let refs = await fs.readFile(packFile, 'utf8')
if (refs) {
refs = refs.split('\n')
for (let i = 0; i < refs.length; i++) {
const match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
if (match && match[2].trim() === headRef) {
headData = match[1]
break
}
}
}
} catch {
// do nothing
}
}
} else {
headData = head.trim()
}
}
if (headData) {
data.gitHead = headData
}
}
// populate "types" attribute
if (steps.includes('fillTypes')) {
const index = data.main || 'index.js'
if (typeof index !== 'string') {
throw new TypeError('The "main" attribute must be of type string.')
}
// TODO exports is much more complicated than this in verbose format
// We need to support for instance
// "exports": {
// ".": [
// {
// "default": "./lib/npm.js"
// },
// "./lib/npm.js"
// ],
// "./package.json": "./package.json"
// },
// as well as conditional exports
// if (data.exports && typeof data.exports === 'string') {
// index = data.exports
// }
// if (data.exports && data.exports['.']) {
// index = data.exports['.']
// if (typeof index !== 'string') {
// }
// }
const extless = path.join(path.dirname(index), path.basename(index, path.extname(index)))
const dts = `./${extless}.d.ts`
const hasDTSFields = 'types' in data || 'typings' in data
if (!hasDTSFields) {
try {
await fs.access(path.join(pkg.path, dts))
data.types = dts.split(path.sep).join('/')
} catch {
// do nothing
}
}
}
// "normalizeData" from "read-package-json", which was just a call through to
// "normalize-package-data". We only call the "fixer" functions because
// outside of that it was also clobbering _id (which we already conditionally
// do) and also adding the gypfile script (which we also already
// conditionally do)
// Some steps are isolated so we can do a limited subset of these in `fix`
if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) {
if (data.repositories) {
/* eslint-disable-next-line max-len */
changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`)
data.repository = data.repositories[0]
}
if (data.repository) {
if (typeof data.repository === 'string') {
changes?.push('"repository" was changed from a string to an object')
data.repository = {
type: 'git',
url: data.repository,
}
}
if (data.repository.url) {
const hosted = lazyHostedGitInfo().fromUrl(data.repository.url)
let r
if (hosted) {
if (hosted.getDefaultRepresentation() === 'shortcut') {
r = hosted.https()
} else {
r = hosted.toString()
}
if (r !== data.repository.url) {
changes?.push(`"repository.url" was normalized to "${r}"`)
data.repository.url = r
}
}
}
}
}
if (steps.includes('fixDependencies') || steps.includes('normalizeData')) {
// peerDependencies?
// devDependencies is meaningless here, it's ignored on an installed package
for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) {
if (data[type]) {
let secondWarning = true
if (typeof data[type] === 'string') {
changes?.push(`"${type}" was converted from a string into an object`)
data[type] = data[type].trim().split(/[\n\r\s\t ,]+/)
secondWarning = false
}
if (Array.isArray(data[type])) {
if (secondWarning) {
changes?.push(`"${type}" was converted from an array into an object`)
}
const o = {}
for (const d of data[type]) {
if (typeof d === 'string') {
const dep = d.trim().split(/(:?[@\s><=])/)
const dn = dep.shift()
const dv = dep.join('').replace(/^@/, '').trim()
o[dn] = dv
}
}
data[type] = o
}
}
}
// normalize-package-data used to put optional dependencies BACK into
// dependencies here, we no longer do this
for (const deps of ['dependencies', 'devDependencies']) {
if (deps in data) {
if (!data[deps] || typeof data[deps] !== 'object') {
changes?.push(`Removed invalid "${deps}"`)
delete data[deps]
} else {
for (const d in data[deps]) {
const r = data[deps][d]
if (typeof r !== 'string') {
changes?.push(`Removed invalid "${deps}.${d}"`)
delete data[deps][d]
}
const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString()
if (hosted && hosted !== data[deps][d]) {
changes?.push(`Normalized git reference to "${deps}.${d}"`)
data[deps][d] = hosted.toString()
}
}
}
}
}
}
if (steps.includes('normalizeData')) {
const legacyFixer = require('normalize-package-data/lib/fixer.js')
const legacyMakeWarning = require('normalize-package-data/lib/make_warning.js')
legacyFixer.warn = function () {
changes?.push(legacyMakeWarning.apply(null, arguments))
}
const legacySteps = [
'fixDescriptionField',
'fixModulesField',
'fixFilesField',
'fixManField',
'fixBugsField',
'fixKeywordsField',
'fixBundleDependenciesField',
'fixHomepageField',
'fixReadmeField',
'fixLicenseField',
'fixPeople',
'fixTypos',
]
for (const legacyStep of legacySteps) {
legacyFixer[legacyStep](data)
}
}
// Warn if the bin references don't point to anything. This might be better
// in normalize-package-data if it had access to the file path.
if (steps.includes('binRefs') && data.bin instanceof Object) {
for (const key in data.bin) {
try {
await fs.access(path.resolve(pkg.path, data.bin[key]))
} catch {
log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`)
// XXX: should a future breaking change delete bin entries that cannot be accessed?
}
}
}
}
module.exports = normalize

View File

@@ -0,0 +1,39 @@
// This is JUST the code needed to open a package.json file and parse it.
// It's isolated out so that code needing to parse a package.json file can do so in the same way as this module does, without needing to require the whole module, or needing to require the underlying parsing library.
const { readFile } = require('fs/promises')
const parseJSON = require('json-parse-even-better-errors')
async function read (filename) {
try {
const data = await readFile(filename, 'utf8')
return data
} catch (err) {
err.message = `Could not read package.json: ${err}`
throw err
}
}
function parse (data) {
try {
const content = parseJSON(data)
return content
} catch (err) {
err.message = `Invalid package.json: ${err}`
throw err
}
}
// This is what most external libs will use.
// PackageJson will call read and parse separately
async function readPackage (filename) {
const data = await read(filename)
const content = parse(data)
return content
}
module.exports = {
read,
parse,
readPackage,
}

View File

@@ -0,0 +1,75 @@
const depTypes = new Set([
'dependencies',
'optionalDependencies',
'devDependencies',
'peerDependencies',
])
// sort alphabetically all types of deps for a given package
const orderDeps = (content) => {
for (const type of depTypes) {
if (content && content[type]) {
content[type] = Object.keys(content[type])
.sort((a, b) => a.localeCompare(b, 'en'))
.reduce((res, key) => {
res[key] = content[type][key]
return res
}, {})
}
}
return content
}
const updateDependencies = ({ content, originalContent }) => {
const pkg = orderDeps({
...content,
})
// optionalDependencies don't need to be repeated in two places
if (pkg.dependencies) {
if (pkg.optionalDependencies) {
for (const name of Object.keys(pkg.optionalDependencies)) {
delete pkg.dependencies[name]
}
}
}
const result = { ...originalContent }
// loop through all types of dependencies and update package json pkg
for (const type of depTypes) {
if (pkg[type]) {
result[type] = pkg[type]
}
// prune empty type props from resulting object
const emptyDepType =
pkg[type]
&& typeof pkg === 'object'
&& Object.keys(pkg[type]).length === 0
if (emptyDepType) {
delete result[type]
}
}
// if original package.json had dep in peerDeps AND deps, preserve that.
const { dependencies: origProd, peerDependencies: origPeer } =
originalContent || {}
const { peerDependencies: newPeer } = result
if (origProd && origPeer && newPeer) {
// we have original prod/peer deps, and new peer deps
// copy over any that were in both in the original
for (const name of Object.keys(origPeer)) {
if (origProd[name] !== undefined && newPeer[name] !== undefined) {
result.dependencies = result.dependencies || {}
result.dependencies[name] = newPeer[name]
}
}
}
return result
}
updateDependencies.knownKeys = depTypes
module.exports = updateDependencies

View File

@@ -0,0 +1,29 @@
const updateScripts = ({ content, originalContent = {} }) => {
const newScripts = content.scripts
if (!newScripts) {
return originalContent
}
// validate scripts content being appended
const hasInvalidScripts = () =>
Object.entries(newScripts)
.some(([key, value]) =>
typeof key !== 'string' || typeof value !== 'string')
if (hasInvalidScripts()) {
throw Object.assign(
new TypeError(
'package.json scripts should be a key-value pair of strings.'),
{ code: 'ESCRIPTSINVALID' }
)
}
return {
...originalContent,
scripts: {
...newScripts,
},
}
}
module.exports = updateScripts

View File

@@ -0,0 +1,26 @@
const updateWorkspaces = ({ content, originalContent = {} }) => {
const newWorkspaces = content.workspaces
if (!newWorkspaces) {
return originalContent
}
// validate workspaces content being appended
const hasInvalidWorkspaces = () =>
newWorkspaces.some(w => !(typeof w === 'string'))
if (!newWorkspaces.length || hasInvalidWorkspaces()) {
throw Object.assign(
new TypeError('workspaces should be an array of strings.'),
{ code: 'EWORKSPACESINVALID' }
)
}
return {
...originalContent,
workspaces: [
...newWorkspaces,
],
}
}
module.exports = updateWorkspaces

60
package/node_modules/@npmcli/package-json/package.json generated vendored Normal file
View File

@@ -0,0 +1,60 @@
{
"name": "@npmcli/package-json",
"version": "5.2.0",
"description": "Programmatic API to update package.json",
"main": "lib/index.js",
"files": [
"bin/",
"lib/"
],
"scripts": {
"snap": "tap",
"test": "tap",
"lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
"lintfix": "npm run lint -- --fix",
"posttest": "npm run lint",
"postsnap": "npm run lintfix --",
"postlint": "template-oss-check",
"template-oss-apply": "template-oss-apply --force"
},
"keywords": [
"npm",
"oss"
],
"author": "GitHub Inc.",
"license": "ISC",
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.22.0",
"read-package-json": "^7.0.0",
"read-package-json-fast": "^3.0.2",
"tap": "^16.0.1"
},
"dependencies": {
"@npmcli/git": "^5.0.0",
"glob": "^10.2.2",
"hosted-git-info": "^7.0.0",
"json-parse-even-better-errors": "^3.0.0",
"normalize-package-data": "^6.0.0",
"proc-log": "^4.0.0",
"semver": "^7.5.3"
},
"repository": {
"type": "git",
"url": "git+https://github.com/npm/package-json.git"
},
"engines": {
"node": "^16.14.0 || >=18.0.0"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
"version": "4.22.0",
"publish": "true"
},
"tap": {
"nyc-arg": [
"--exclude",
"tap-snapshots/**"
]
}
}