Skip to content

Commit

Permalink
Change direction into a solution which reads package-lock.json and re…
Browse files Browse the repository at this point in the history
…solves packages to be downloaded from there
  • Loading branch information
heikkipora committed Feb 3, 2018
1 parent 8184b91 commit bafec70
Show file tree
Hide file tree
Showing 14 changed files with 1,414 additions and 905 deletions.
2 changes: 1 addition & 1 deletion .eslintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ module.exports = {
"semi": "off",
"semi-spacing": "error",
"sort-imports": ["error", {
"ignoreCase": false,
"ignoreCase": true,
"ignoreMemberSort": false,
"memberSyntaxSortOrder": ["none", "all", "single", "multiple"]
}],
Expand Down
2 changes: 1 addition & 1 deletion .nvmrc
Original file line number Diff line number Diff line change
@@ -1 +1 @@
9.4.0
8.9.4
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
"scripts": {
"build": "./build-npm",
"eslint": "eslint --fix src/* bin/*",
"test": "mocha --require babel-register --require babel-polyfill --timeout 60000 test/*.js"
"test": "mocha --require babel-register --require babel-polyfill --timeout 10000 test/*.js"
},
"author": "Heikki Pora",
"license": "MIT",
Expand Down
23 changes: 11 additions & 12 deletions src/client.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,22 +4,21 @@ const metadataCache = {}
const timeout = 60 * 1000

export async function fetchUrl(url, isBinary = false) {
if (metadataCache[url]) {
return metadataCache[url]
if (isBinary) {
return fetchBinary(url)
}

const encoding = isBinary ? null : 'utf8'
const body = await request({encoding, url, timeout})
return cacheMetadata(url, parseJson(body, isBinary))
if (!metadataCache[url]) {
metadataCache[url] = await fetchJson(url)
}
return metadataCache[url]
}

function cacheMetadata(url, body) {
if (!url.endsWith('.tgz')) {
metadataCache[url] = body
}
return body
function fetchBinary(url) {
return request({encoding: null, url, timeout})
}

function parseJson(body, isBinary) {
return isBinary ? body : JSON.parse(body)
async function fetchJson(url) {
const body = await request({gzip: true, url, timeout})
return JSON.parse(body)
}
103 changes: 103 additions & 0 deletions src/download.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
import {fetchUrl} from './client'
import mkdirp from 'mkdirp'
import path from 'path'
import Promise from 'bluebird'
import semver from 'semver'
import url from 'url'
import {sha1, sha512, verifyIntegrity} from './integrity'

const fs = Promise.promisifyAll(require('fs'))

const concurrency = 5

export function downloadAll(packages, {registryUrl, localUrl, rootFolder}) {
const downloadFromRegistry = download.bind(null, registryUrl, localUrl, rootFolder)
return Promise.map(packages, downloadFromRegistry, {concurrency})
}

async function download(registryUrl, localUrl, rootFolder, {name, version}) {
const registryMetadata = await fetchMetadata(name, registryUrl)
const versionMetadata = registryMetadata.versions[version]
if (!versionMetadata) {
throw new Error(`Unknown package version ${name}@${version}`)
}

const localFolder = await ensureLocalFolderExists(name, rootFolder)
const data = await downloadTarball(versionMetadata, localFolder)

const localVersionMetadata = rewriteVersionMetadata(versionMetadata, data, localUrl)
await updateMetadata(localVersionMetadata, registryMetadata, registryUrl, localFolder)
}

function rewriteVersionMetadata(versionMetadata, data, localUrl) {
const dist = {
integrity: sha512(data),
shasum: sha1(data),
tarball: tarballUrl(versionMetadata.name, versionMetadata.version, localUrl)
}
return {...versionMetadata, dist}
}

async function downloadTarball({_id: id, name, version, dist}, localFolder) {
const data = await fetchTarball(dist.tarball)
verifyIntegrity(data, id, dist)
await fs.writeFileAsync(tarballPath(name, version, localFolder), data)
return data
}

async function updateMetadata(versionMetadata, defaultMetadata, registryUrl, localFolder) {
const {name, version} = versionMetadata
const localMetadataPath = metadataPath(name, localFolder)
const localMetadata = await loadMetadata(localMetadataPath, defaultMetadata)
localMetadata.versions[version] = versionMetadata
localMetadata.time[version] = defaultMetadata.time[version]
localMetadata['dist-tags'].latest = Object.keys(localMetadata.versions).sort(semver.compare).pop()
await saveMetadata(localMetadataPath, localMetadata)
}

async function loadMetadata(path, defaultMetadata) {
try {
const json = await fs.readFileAsync(path, 'utf8')
return JSON.parse(json)
} catch (fileNotFound) {
return {...defaultMetadata, 'dist-tags': {}, time: {}, versions: {}}
}
}

async function saveMetadata(path, metadata) {
const json = JSON.stringify(metadata, null, 2)
await fs.writeFileAsync(path, json, 'utf8')
}


function metadataPath(name, localFolder) {
return path.join(localFolder, 'index.json')
}

function tarballPath(name, version, localFolder) {
return path.join(localFolder, tarballFilename(name, version))
}

function tarballUrl(name, version, localUrl) {
return url.resolve(localUrl, `${name}/${tarballFilename(name, version)}`)
}

function tarballFilename(name, version) {
const normalized = name.replace(/\//g, '-')
return `${normalized}-${version}.tgz`
}

async function ensureLocalFolderExists(name, rootFolder) {
const localFolder = path.resolve(rootFolder, name)
await mkdirp(localFolder)
return localFolder
}

function fetchTarball(tarballUrl) {
return fetchUrl(tarballUrl, true)
}

function fetchMetadata(name, registryUrl) {
const urlSafeName = name.replace(/\//g, '%2f')
return fetchUrl(url.resolve(registryUrl, urlSafeName))
}
25 changes: 25 additions & 0 deletions src/integrity.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import ssri from 'ssri'

export function verifyIntegrity(data, id, {integrity, shasum}) {
if (!integrity && !shasum) {
throw new Error(`Integrity values not present in metadata for ${id}`)
}

if (integrity) {
if (sha512(data) != integrity) {
throw new Error(`Integrity check with SHA512 failed for ${id}`)
}
} else if (sha1(data) != shasum) {
throw new Error(`Integrity check with SHA1 failed for failed for ${id}`)
}
}

export function sha1(data) {
const [integrity] = ssri.fromData(data, {algorithms: ['sha1']}).sha1
return integrity.hexDigest()
}

export function sha512(data) {
const [integrity] = ssri.fromData(data, {algorithms: ['sha512']}).sha512
return integrity.toString()
}
37 changes: 37 additions & 0 deletions src/resolve.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import _ from 'lodash'
import Promise from 'bluebird'

const fs = Promise.promisifyAll(require('fs'))

export function updateDependenciesCache(dependencies, cacheFilePath) {
return fs.writeFileAsync(cacheFilePath, JSON.stringify(dependencies), 'utf8')
}

export async function dependenciesNotInCache(dependencies, cacheFilePath) {
try {
const json = await fs.readFileAsync(cacheFilePath, 'utf8')
const cachedDependencies = JSON.parse(json)
return _.differenceBy(dependencies, cachedDependencies, 'id')
} catch (fileNotFound) {
return dependencies
}
}

export async function dependenciesFromPackageLock(path) {
const json = await fs.readFileAsync(path, 'utf8')
const dependencyTree = dependenciesRecursive(JSON.parse(json))
return _(dependencyTree)
.flattenDeep()
.map(({name, version}) => ({id: `${name}@${version}`, name, version}))
.sortBy('id')
.sortedUniqBy('id')
.value()
}

function dependenciesRecursive({dependencies}) {
return _(dependencies)
.mapValues((props, name) => [{name, version: props.version}].concat(dependenciesRecursive(props)))
.values()
.value()
}

2 changes: 0 additions & 2 deletions test/.npmrc

This file was deleted.

23 changes: 23 additions & 0 deletions test/download-test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import {expect} from 'chai'
import fs from 'fs'
import {downloadAll} from '../src/download'
import rimraf from 'rimraf'

const options = {
registryUrl: 'https://registry.npmjs.org',
localUrl: 'https://localhost:8443',
rootFolder: `${__dirname}/.download`
}

describe.only('download', () => {
before(done => rimraf(options.rootFolder, done))

it('Should download all packages and create metadata files', async () => {
const packages = [
{id: "abbrev@1.1.0", name: "abbrev", version: "1.1.0"},
{id: "abbrev@1.1.1", name: "abbrev", version: "1.1.1"},
{id: "aproba@1.2.0",name: "aproba", version: "1.2.0"}
]
await downloadAll(packages, options)
})
})
Loading

0 comments on commit bafec70

Please sign in to comment.