Skip to content

Commit

Permalink
fix: Remove table output from search and tar summary
Browse files Browse the repository at this point in the history
This removes table output from `npm search` and from all commands that
log a summary of tarball content (`npm publish` and `npm pack`).

Table output is discouraged in a cli for accessibility reasons.
  • Loading branch information
wraithgar committed Apr 23, 2024
1 parent dfa4cab commit d531f8b
Show file tree
Hide file tree
Showing 9 changed files with 1,283 additions and 432 deletions.
48 changes: 3 additions & 45 deletions lib/commands/search.js
@@ -1,37 +1,9 @@
const { Minipass } = require('minipass')
const Pipeline = require('minipass-pipeline')
const libSearch = require('libnpmsearch')
const { log, output } = require('proc-log')

const formatSearchStream = require('../utils/format-search-stream.js')

function filter (data, include, exclude) {
const words = [data.name]
.concat(data.maintainers.map(m => `=${m.username}`))
.concat(data.keywords || [])
.map(f => f && f.trim && f.trim())
.filter(f => f)
.join(' ')
.toLowerCase()

if (exclude.find(e => match(words, e))) {
return false
}

return true
}

function match (words, pattern) {
if (pattern.startsWith('/')) {
if (pattern.endsWith('/')) {
pattern = pattern.slice(0, -1)
}
pattern = new RegExp(pattern.slice(1))
return words.match(pattern)
}
return words.indexOf(pattern) !== -1
}

const BaseCommand = require('../base-command.js')
class Search extends BaseCommand {
static description = 'Search for packages'
Expand All @@ -57,7 +29,7 @@ class Search extends BaseCommand {
const opts = {
...this.npm.flatOptions,
...this.npm.flatOptions.search,
include: args.map(s => s.toLowerCase()).filter(s => s),
include: args.map(s => s.toLowerCase()).filter(Boolean),
exclude: this.npm.flatOptions.search.exclude.split(/\s+/),
}

Expand All @@ -68,30 +40,16 @@ class Search extends BaseCommand {
// Used later to figure out whether we had any packages go out
let anyOutput = false

class FilterStream extends Minipass {
constructor () {
super({ objectMode: true })
}

write (pkg) {
if (filter(pkg, opts.include, opts.exclude)) {
super.write(pkg)
}
}
}

const filterStream = new FilterStream()

// Grab a configured output stream that will spit out packages in the desired format.
const outputStream = await formatSearchStream({
const outputStream = formatSearchStream({
args, // --searchinclude options are not highlighted
...opts,
npm: this.npm,
})

log.silly('search', 'searching packages')
const p = new Pipeline(
libSearch.stream(opts.include, opts),
filterStream,
outputStream
)

Expand Down
182 changes: 109 additions & 73 deletions lib/utils/format-search-stream.js
@@ -1,6 +1,6 @@
const { stripVTControlCharacters } = require('node:util')
/* eslint-disable max-len */
const { stripVTControlCharacters: strip } = require('node:util')
const { Minipass } = require('minipass')
const columnify = require('columnify')

// This module consumes package data in the following format:
//
Expand All @@ -16,14 +16,48 @@ const columnify = require('columnify')
// The returned stream will format this package data
// into a byte stream of formatted, displayable output.

module.exports = async (opts) => {
return opts.json ? new JSONOutputStream() : new TextOutputStream(opts)
function filter (data, exclude) {
const words = [data.name]
.concat(data.maintainers.map(m => m.username))
.concat(data.keywords || [])
.map(f => f?.trim?.())
.filter(Boolean)
.join(' ')
.toLowerCase()

if (exclude.find(pattern => {
// Treats both /foo and /foo/ as regex searches
if (pattern.startsWith('/')) {
if (pattern.endsWith('/')) {
pattern = pattern.slice(0, -1)
}
return words.match(new RegExp(pattern.slice(1)))
}
return words.includes(pattern)
})) {
return false
}

return true
}

module.exports = (opts) => {
return opts.json ? new JSONOutputStream(opts) : new TextOutputStream(opts)
}

class JSONOutputStream extends Minipass {
#didFirst = false
#exclude

constructor (opts) {
super()
this.#exclude = opts.exclude
}

write (obj) {
if (!filter(obj, this.#exclude)) {
return
}
if (!this.#didFirst) {
super.write('[\n')
this.#didFirst = true
Expand All @@ -41,94 +75,96 @@ class JSONOutputStream extends Minipass {
}

class TextOutputStream extends Minipass {
#opts
#line = 0
#args
#chalk
#exclude
#parseable

constructor (opts) {
super()
this.#opts = opts
this.#args = opts.args.map(s => s.toLowerCase()).filter(Boolean)
this.#chalk = opts.npm.chalk
this.#exclude = opts.exclude
this.#parseable = opts.parseable
}

write (pkg) {
return super.write(this.#prettify(pkg))
}

#prettify (data) {
write (data) {
if (!filter(data, this.#exclude)) {
return
}
// Normalize
const pkg = {
author: data.maintainers.map((m) => `=${stripVTControlCharacters(m.username)}`).join(' '),
date: 'prehistoric',
description: stripVTControlCharacters(data.description ?? ''),
keywords: '',
name: stripVTControlCharacters(data.name),
authors: data.maintainers.map((m) => `${strip(m.username)}`).join(' '),
publisher: strip(data.publisher.username),
date: data.date ? data.date.toISOString().slice(0, 10) : 'prehistoric',
description: strip(data.description ?? ''),
keywords: [],
name: strip(data.name),
version: data.version,
}
if (Array.isArray(data.keywords)) {
pkg.keywords = data.keywords.map((k) => stripVTControlCharacters(k)).join(' ')
pkg.keywords = data.keywords.map(strip)
} else if (typeof data.keywords === 'string') {
pkg.keywords = stripVTControlCharacters(data.keywords.replace(/[,\s]+/, ' '))
}
if (data.date) {
pkg.date = data.date.toISOString().split('T')[0] // remove time
pkg.keywords = strip(data.keywords.replace(/[,\s]+/, ' ')).split(' ')
}

const columns = ['name', 'description', 'author', 'date', 'version', 'keywords']
if (this.#opts.parseable) {
return columns.map((col) => pkg[col] && ('' + pkg[col]).replace(/\t/g, ' ')).join('\t')
let output
if (this.#parseable) {
output = [pkg.name, pkg.description, pkg.author, pkg.date, pkg.version, pkg.keywords]
.filter(Boolean)
.map(col => ('' + col).replace(/\t/g, ' ')).join('\t')
return super.write(output)
}

// stdout in tap is never a tty
/* istanbul ignore next */
const maxWidth = process.stdout.isTTY ? process.stdout.getWindowSize()[0] : Infinity
let output = columnify(
[pkg],
{
include: columns,
showHeaders: ++this.#line <= 1,
columnSplitter: ' | ',
truncate: !this.#opts.long,
config: {
name: { minWidth: 25, maxWidth: 25, truncate: false, truncateMarker: '' },
description: { minWidth: 20, maxWidth: 20 },
author: { minWidth: 15, maxWidth: 15 },
date: { maxWidth: 11 },
version: { minWidth: 8, maxWidth: 8 },
keywords: { maxWidth: Infinity },
},
const keywords = pkg.keywords.map(k => {
if (this.#args.includes(k)) {
return this.#chalk.cyan(k)
} else {
return k
}
}).join(' ')

let description = []
for (const arg of this.#args) {
const finder = pkg.description.toLowerCase().split(arg.toLowerCase())
let p = 0
for (const f of finder) {
description.push(pkg.description.slice(p, p + f.length))
const word = pkg.description.slice(p + f.length, p + f.length + arg.length)
description.push(this.#chalk.cyan(word))
p += f.length + arg.length
}
).split('\n').map(line => line.slice(0, maxWidth)).join('\n')

if (!this.#opts.color) {
return output
}

const colors = ['31m', '33m', '32m', '36m', '34m', '35m']

this.#opts.args.forEach((arg, i) => {
const markStart = String.fromCharCode(i % colors.length + 1)
const markEnd = String.fromCharCode(0)

if (arg.charAt(0) === '/') {
output = output.replace(
new RegExp(arg.slice(1, -1), 'gi'),
bit => `${markStart}${bit}${markEnd}`
)
} else {
// just a normal string, do the split/map thing
description = description.filter(Boolean)
let name = pkg.name
if (this.#args.includes(pkg.name)) {
name = this.#chalk.cyan(pkg.name)
} else {
name = []
for (const arg of this.#args) {
const finder = pkg.name.toLowerCase().split(arg.toLowerCase())
let p = 0

output = output.toLowerCase().split(arg.toLowerCase()).map(piece => {
piece = output.slice(p, p + piece.length)
p += piece.length
const mark = `${markStart}${output.slice(p, p + arg.length)}${markEnd}`
p += arg.length
return `${piece}${mark}`
}).join('')
for (const f of finder) {
name.push(pkg.name.slice(p, p + f.length))
const word = pkg.name.slice(p + f.length, p + f.length + arg.length)
name.push(this.#chalk.cyan(word))
p += f.length + arg.length
}
}
})
name = this.#chalk.blue(name.join(''))
}

for (let i = 1; i <= colors.length; i++) {
output = output.split(String.fromCharCode(i)).join(`\u001B[${colors[i - 1]}`)
if (description.length) {
output = `${name}\n${description.join('')}\n`
} else {
output = `${name}\n`
}
output += `Version ${this.#chalk.blue(pkg.version)} published ${this.#chalk.blue(pkg.date)} by ${this.#chalk.blue(pkg.publisher)}\n`
output += `Maintainers: ${pkg.authors}\n`
if (keywords) {
output += `Keywords: ${keywords}\n`
}
return output.split('\u0000').join('\u001B[0m').trim()
output += `${this.#chalk.blue(`https://npm.im/${pkg.name}`)}\n`
return super.write(output)
}
}
69 changes: 22 additions & 47 deletions lib/utils/tar.js
Expand Up @@ -2,7 +2,6 @@ const tar = require('tar')
const ssri = require('ssri')
const { log } = require('proc-log')
const formatBytes = require('./format-bytes.js')
const columnify = require('columnify')
const localeCompare = require('@isaacs/string-locale-compare')('en', {
sensitivity: 'case',
numeric: true,
Expand All @@ -12,60 +11,36 @@ const logTar = (tarball, opts = {}) => {
const { unicode = false } = opts
log.notice('')
log.notice('', `${unicode ? '📦 ' : 'package:'} ${tarball.name}@${tarball.version}`)
log.notice('=== Tarball Contents ===')
log.notice('Tarball Contents')
if (tarball.files.length) {
log.notice(
'',
columnify(
tarball.files
.map(f => {
const bytes = formatBytes(f.size, false)
return /^node_modules\//.test(f.path) ? null : { path: f.path, size: `${bytes}` }
})
.filter(f => f),
{
include: ['size', 'path'],
showHeaders: false,
}
)
tarball.files.map(f =>
/^node_modules\//.test(f.path) ? null : `${formatBytes(f.size, false)} ${f.path}`
).filter(f => f).join('\n')
)
}
if (tarball.bundled.length) {
log.notice('=== Bundled Dependencies ===')
log.notice('Bundled Dependencies')
tarball.bundled.forEach(name => log.notice('', name))
}
log.notice('=== Tarball Details ===')
log.notice(
'',
columnify(
[
{ name: 'name:', value: tarball.name },
{ name: 'version:', value: tarball.version },
tarball.filename && { name: 'filename:', value: tarball.filename },
{ name: 'package size:', value: formatBytes(tarball.size) },
{ name: 'unpacked size:', value: formatBytes(tarball.unpackedSize) },
{ name: 'shasum:', value: tarball.shasum },
{
name: 'integrity:',
value:
tarball.integrity.toString().slice(0, 20) +
'[...]' +
tarball.integrity.toString().slice(80),
},
tarball.bundled.length && { name: 'bundled deps:', value: tarball.bundled.length },
tarball.bundled.length && {
name: 'bundled files:',
value: tarball.entryCount - tarball.files.length,
},
tarball.bundled.length && { name: 'own files:', value: tarball.files.length },
{ name: 'total files:', value: tarball.entryCount },
].filter(x => x),
{
include: ['name', 'value'],
showHeaders: false,
}
)
)
log.notice('Tarball Details')
log.notice('', `name: ${tarball.name}`)
log.notice('', `version: ${tarball.version}`)
if (tarball.filename) {
log.notice('', `filename: ${tarball.filename}`)
}
log.notice('', `package size: ${formatBytes(tarball.size)}`)
log.notice('', `unpacked size: ${formatBytes(tarball.unpackedSize)}`)
log.notice('', `shasum: ${tarball.shasum}`)
/* eslint-disable-next-line max-len */
log.notice('', `integrity: ${tarball.integrity.toString().slice(0, 20)}[...]${tarball.integrity.toString().slice(80)}`)
if (tarball.bundled.length) {
log.notice('', `bundled deps: ${tarball.bundled.length}`)
log.notice('', `bundled files: ${tarball.entryCount - tarball.files.length}`)
log.notice('', `own files: ${tarball.files.length}`)
}
log.notice('', `total files: ${tarball.entryCount}`)
log.notice('', '')
}

Expand Down

0 comments on commit d531f8b

Please sign in to comment.