Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Commit

Permalink
Merge e93810a into 506cbf1
Browse files Browse the repository at this point in the history
  • Loading branch information
nginnever committed May 5, 2016
2 parents 506cbf1 + e93810a commit e374a90
Show file tree
Hide file tree
Showing 10 changed files with 350 additions and 37 deletions.
23 changes: 12 additions & 11 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,46 +37,47 @@
},
"homepage": "https://github.com/ipfs/js-ipfs#readme",
"devDependencies": {
"aegir": "^3.0.0",
"aegir": "^3.0.1",
"async": "^2.0.0-rc.3",
"buffer-loader": "0.0.1",
"chai": "^3.5.0",
"expose-loader": "^0.7.1",
"form-data": "^1.0.0-rc3",
"idb-plus-blob-store": "^1.1.2",
"lodash": "^4.11.1",
"mocha": "^2.3.4",
"lodash": "^4.11.2",
"mocha": "^2.4.5",
"ncp": "^2.0.0",
"nexpect": "^0.5.0",
"pre-commit": "^1.1.2",
"rimraf": "^2.4.4",
"rimraf": "^2.5.2",
"stream-to-promise": "^1.1.0",
"transform-loader": "^0.2.3"
},
"dependencies": {
"babel-runtime": "^6.6.1",
"bl": "^1.1.2",
"boom": "^3.1.1",
"boom": "^3.1.2",
"bs58": "^3.0.0",
"debug": "^2.2.0",
"fs-blob-store": "^5.2.1",
"glob": "^7.0.3",
"hapi": "^13.3.0",
"ipfs-api": "^3.0.1",
"ipfs-api": "^3.0.2",
"ipfs-block": "^0.3.0",
"ipfs-block-service": "^0.3.0",
"ipfs-data-importing": "^0.3.3",
"ipfs-merkle-dag": "^0.5.0",
"ipfs-multipart": "^0.1.0",
"ipfs-repo": "^0.8.0",
"joi": "^8.0.2",
"libp2p-ipfs": "^0.3.3",
"ipfs-unixfs-engine": "^0.6.1",
"joi": "^8.0.5",
"libp2p-ipfs": "^0.3.5",
"lodash.get": "^4.2.1",
"lodash.set": "^4.0.0",
"multiaddr": "^1.3.0",
"lodash.set": "^4.1.0",
"multiaddr": "^1.4.1",
"peer-book": "0.1.0",
"peer-id": "^0.6.6",
"peer-info": "^0.6.2",
"readable-stream": "^1.1.13",
"ronin": "^0.3.11",
"temp": "^0.8.3"
},
Expand Down
77 changes: 69 additions & 8 deletions src/cli/commands/files/add.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,16 @@
'use strict'

const Command = require('ronin').Command
const IPFS = require('../../../core')
const utils = require('../../utils')
const debug = require('debug')
const log = debug('cli:version')
log.error = debug('cli:version:error')
const bs58 = require('bs58')
const Readable = require('stream').Readable
const fs = require('fs')
const async = require('async')
const pathj = require('path')
const glob = require('glob')

module.exports = Command.extend({
desc: 'Add a file to IPFS using the UnixFS data format',
Expand All @@ -19,15 +24,71 @@ module.exports = Command.extend({
},

run: (recursive, path) => {
var node = new IPFS()
path = process.cwd() + '/' + path
node.files.add(path, {
recursive: recursive
}, (err, stats) => {
let rs

if (!path) {
throw new Error('Error: Argument \'path\' is required')
}

var s = fs.statSync(path)

if (s.isDirectory() && recursive === false) {
throw new Error('Error: ' + process.cwd() + ' is a directory, use the \'-r\' flag to specify directories')
}
if (path === '.' && recursive === true) {
path = process.cwd()
s = fs.statSync(process.cwd())
} else if (path === '.' && recursive === false) {
s = fs.statSync(process.cwd())
if (s.isDirectory()) {
throw new Error('Error: ' + process.cwd() + ' is a directory, use the \'-r\' flag to specify directories')
}
}

glob(pathj.join(path, '/**/*'), (err, res) => {
if (err) {
return console.log(err)
throw err
}
console.log('added', bs58.encode(stats.Hash).toString(), stats.Name)
utils.getIPFS((err, ipfs) => {
if (err) {
throw err
}
const i = ipfs.files.add()
i.on('data', (file) => {
console.log('added', bs58.encode(file.multihash).toString(), file.path)
})
if (res.length !== 0) {
const index = path.lastIndexOf('/')
async.eachLimit(res, 10, (element, callback) => {
rs = new Readable()
const addPath = element.substring(index + 1, element.length)
if (fs.statSync(element).isDirectory()) {
callback()
} else {
const buffered = fs.readFileSync(element)
rs.push(buffered)
rs.push(null)
const filePair = {path: addPath, stream: rs}
i.write(filePair)
callback()
}
}, (err) => {
if (err) {
throw err
}
i.end()
})
} else {
rs = new Readable()
const buffered = fs.readFileSync(path)
path = path.substring(path.lastIndexOf('/') + 1, path.length)
rs.push(buffered)
rs.push(null)
const filePair = {path: path, stream: rs}
i.write(filePair)
i.end()
}
})
})
}
})
37 changes: 37 additions & 0 deletions src/cli/commands/files/cat.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
'use strict'

const Command = require('ronin').Command
const debug = require('debug')
const utils = require('../../utils')
const log = debug('cli:files')
log.error = debug('cli:files:error')

module.exports = Command.extend({
desc: 'Download IPFS objects',

options: {},

run: (path, options) => {
if (!path) {
throw new Error("Argument 'path' is required")
}
if (!options) {
options = {}
}
utils.getIPFS((err, ipfs) => {
if (err) {
throw err
}
ipfs.files.cat(path, (err, res) => {
if (err) {
throw (err)
}
if (res) {
res.on('file', (data) => {
data.stream.pipe(process.stdout)
})
}
})
})
}
})
69 changes: 69 additions & 0 deletions src/cli/commands/files/get.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
'use strict'

const Command = require('ronin').Command
const debug = require('debug')
const utils = require('../../utils')
const log = debug('cli:files')
log.error = debug('cli:files:error')
var fs = require('fs')
const pathj = require('path')

module.exports = Command.extend({
desc: 'Download IPFS objects',

options: {},

run: (path, options) => {
let dir
let filepath
let ws

if (!path) {
throw new Error("Argument 'path' is required")
}
if (!options) {
options = {}
dir = process.cwd()
} else {
if (options.slice(-1) !== '/') {
options += '/'
}
dir = options
}

utils.getIPFS((err, ipfs) => {
if (err) {
throw err
}
ipfs.files.get(path, (err, data) => {
if (err) {
throw err
}
data.on('file', (data) => {
if (data.path.lastIndexOf('/') === -1) {
filepath = data.path
if (data.dir === false) {
ws = fs.createWriteStream(pathj.join(dir, data.path))
data.stream.pipe(ws)
} else {
try {
fs.mkdirSync(pathj.join(dir, data.path))
} catch (err) {
throw err
}
}
} else {
filepath = data.path.substring(0, data.path.lastIndexOf('/') + 1)
try {
fs.mkdirSync(pathj.join(dir, filepath))
} catch (err) {
throw err
}
ws = fs.createWriteStream(pathj.join(dir, data.path))
data.stream.pipe(ws)
}
})
})
})
}
})
54 changes: 49 additions & 5 deletions src/core/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,12 @@ const DAGService = mDAG.DAGService
const peerId = require('peer-id')
const PeerInfo = require('peer-info')
const multiaddr = require('multiaddr')
const importer = require('ipfs-data-importing').import
const Importer = require('ipfs-unixfs-engine').importer
const Exporter = require('ipfs-unixfs-engine').exporter
const libp2p = require('libp2p-ipfs')
const IPFSRepo = require('ipfs-repo')
const PeerBook = require('peer-book')
const UnixFS = require('ipfs-unixfs')

const init = require('./init')
const defaultRepo = require('./default-repo')
Expand Down Expand Up @@ -403,10 +405,52 @@ function IPFS (repo) {
}

this.files = {
add: (path, options, callback) => {
options.path = path
options.dagService = dagS
importer(options, callback)
add: (arr, callback) => {
if (typeof arr === 'function') {
callback = arr
arr = undefined
}
if (callback === undefined) {
callback = function noop () {}
}
if (arr === undefined) {
return new Importer(dagS)
}

const i = new Importer(dagS)
const res = []

i.on('data', (info) => {
res.push(info)
})

i.once('end', () => {
callback(null, res)
})

arr.forEach((tuple) => {
i.write(tuple)
})

i.end()
},
cat: (hash, callback) => {
dagS.get(hash, (err, fetchedNode) => {
if (err) {
return callback(err, null)
}
const data = UnixFS.unmarshal(fetchedNode.data)
if (data.type === 'directory') {
callback('This dag node is a directory', null)
} else {
const exportEvent = Exporter(hash, dagS)
callback(null, exportEvent)
}
})
},
get: (hash, callback) => {
var exportFile = Exporter(hash, dagS)
callback(null, exportFile)
}
}
}
Expand Down
44 changes: 39 additions & 5 deletions src/core/init.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ const peerId = require('peer-id')
const BlockService = require('ipfs-block-service')
const DagService = require('ipfs-merkle-dag').DAGService
const path = require('path')
const glob = require('glob')
const async = require('async')
const Readable = require('stream').Readable
const fs = require('fs')

module.exports = (repo, opts, callback) => {
opts = opts || {}
Expand Down Expand Up @@ -66,17 +70,47 @@ module.exports = (repo, opts, callback) => {
return doneImport(null)
}

const importer = require('ipfs-data-importing')
const Importer = require('ipfs-unixfs-engine').importer
const blocks = new BlockService(repo)
const dag = new DagService(blocks)

const initDocsPath = path.join(__dirname, '../init-files/init-docs')

importer.import(initDocsPath, dag, {
recursive: true
}, doneImport)
const i = new Importer(dag)
i.resume()

glob(path.join(initDocsPath, '/**/*'), (err, res) => {
if (err) {
throw err
}
const index = __dirname.lastIndexOf('/')
async.eachLimit(res, 10, (element, callback) => {
const addPath = element.substring(index + 1, element.length)
if (fs.statSync(element).isDirectory()) {
callback()
} else {
const buffered = fs.readFileSync(element)
const rs = new Readable()
rs.push(buffered)
rs.push(null)
const filePair = {path: addPath, stream: rs}
i.write(filePair)
callback()
}
}, (err) => {
if (err) {
throw err
}
i.end()
return
})
})

i.once('end', () => {
doneImport(null)
})

function doneImport (err, stat) {
function doneImport (err) {
if (err) { return callback(err) }

// All finished!
Expand Down

0 comments on commit e374a90

Please sign in to comment.