forked from vuejs/devtools
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathextension-zip.ts
137 lines (120 loc) · 3.51 KB
/
extension-zip.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
import path from 'node:path'
import fs from 'node:fs'
import { fileURLToPath } from 'node:url'
import archiver from 'archiver'
import readdirGlob from 'readdir-glob'
import ProgressBar from 'progress'
const __dirname = path.dirname(fileURLToPath(import.meta.url))
const INCLUDE_FILES = [
'client/**',
'dist/**',
'icons/**',
'overlay/**',
'pages/**',
'popups/**',
'devtools-background.html',
'devtools-panel.html',
'manifest.json',
'package.json',
]
const EXCLUDE_DIRS = [
'node_modules',
'src',
]
function bytesToSize(bytes) {
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB']
if (bytes === 0)
return '0 Bytes'
const i = Math.floor(Math.log(bytes) / Math.log(1024))
const size = Number.parseFloat((bytes / 1024 ** i).toFixed(2))
return `${size} ${sizes[i]}`
}
async function zip(filename: string, target: string) {
const targetPkgDir = path.join(__dirname, `../packages/${target}-extension`)
const archive = archiver('zip', { zlib: { level: 9 } })
const output = fs.createWriteStream(path.join(__dirname, '../dist', `${filename}`))
const status = {
total: 0,
cFile: '...',
cSize: '0 Bytes',
tBytes: 0,
tSize: '0 Bytes',
}
async function parseFileStats() {
return new Promise<void>((resolve, reject) => {
// @ts-expect-error skip
const globber = readdirGlob.readdirGlob(targetPkgDir, { pattern: INCLUDE_FILES, skip: EXCLUDE_DIRS, mark: true, stat: true })
globber.on('match', (match) => {
if (!match.stat.isDirectory())
status.total++
})
globber.on('error', (err) => {
reject(err)
})
globber.on('end', () => {
resolve()
})
})
}
await parseFileStats().catch((err) => {
console.error(err)
process.exit(1)
})
const bar = new ProgressBar(`${filename} @ :tSize [:bar] :current/:total :percent +:cFile@:cSize`, {
width: 18,
incomplete: ' ',
total: status.total,
})
bar.tick(0, status)
archive.on('entry', (entry) => {
if (!entry.stats.isDirectory()) {
const n = entry.name
status.cFile = n.length > 14
? `...${n.slice(n.length - 11)}`
: n
status.cSize = bytesToSize(entry.stats.size)
status.tBytes += entry.stats.size
status.tSize = bytesToSize(status.tBytes)
bar.tick(1, status)
}
})
const end = new Promise<void>((resolve) => {
// listen for all archive data to be written
// 'close' event is fired only when a file descriptor is involved
output.on('close', () => {
if (archive.pointer() < 1000)
console.warn(`Zip file (${filename}) is only ${archive.pointer()} bytes`)
resolve()
})
})
// This event is fired when the data source is drained no matter what was the data source.
// It is not part of this library but rather from the NodeJS Stream API.
// @see: https://nodejs.org/api/stream.html#stream_event_end
output.on('end', () => {
'nothing'
})
archive.on('warning', (err) => {
if (err.code !== 'ENOENT') {
// throw error
console.error(err)
process.exit(1)
}
})
archive.on('error', (err) => {
console.error(err)
process.exit(1)
})
archive.pipe(output)
INCLUDE_FILES.forEach((file) => {
archive.glob(file, {
cwd: targetPkgDir,
skip: EXCLUDE_DIRS,
})
})
archive.finalize()
await end
}
fs.rmSync(path.join(__dirname, '../dist'), { recursive: true, force: true })
fs.mkdirSync(path.join(__dirname, '../dist'))
await zip('devtools-chrome.zip', 'chrome')
await zip('devtools-firefox.zip', 'firefox')