Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Commit

Permalink
Merge dbd8e61 into 5c26fae
Browse files Browse the repository at this point in the history
  • Loading branch information
nginnever committed May 3, 2016
2 parents 5c26fae + dbd8e61 commit 849f96d
Show file tree
Hide file tree
Showing 17 changed files with 907 additions and 27 deletions.
4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,16 @@
"ipfs-data-importing": "^0.3.3",
"ipfs-merkle-dag": "^0.5.0",
"ipfs-multipart": "^0.1.0",
"ipfs-repo": "^0.7.1",
"ipfs-repo": "^0.7.4",
"ipfs-unixfs-engine": "^0.5.0",
"joi": "^8.0.2",
"libp2p-ipfs": "^0.3.3",
"lodash.get": "^4.2.1",
"lodash.set": "^4.0.0",
"multiaddr": "^1.3.0",
"peer-id": "^0.6.6",
"peer-info": "^0.6.2",
"readable-stream": "^1.1.13",
"ronin": "^0.3.11",
"temp": "^0.8.3"
},
Expand Down
126 changes: 118 additions & 8 deletions src/cli/commands/files/add.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,16 @@
'use strict'

const Command = require('ronin').Command
const IPFS = require('../../../core')
const utils = require('../../utils')
const debug = require('debug')
const log = debug('cli:version')
log.error = debug('cli:version:error')
const bs58 = require('bs58')
const Readable = require('stream').Readable
const fs = require('fs')
const async = require('async')
const pathj = require('path')
const glob = require('glob')

module.exports = Command.extend({
desc: 'Add a file to IPFS using the UnixFS data format',
Expand All @@ -19,15 +24,120 @@ module.exports = Command.extend({
},

run: (recursive, path) => {
var node = new IPFS()
path = process.cwd() + '/' + path
node.files.add(path, {
recursive: recursive
}, (err, stats) => {
let s
let rs

if (!path) {
throw new Error('Error: Argument \'path\' is required')
}

s = fs.statSync(path)

if (s.isDirectory() && recursive === false) {
throw new Error('Error: ' + process.cwd() + ' is a directory, use the \'-r\' flag to specify directories')
}
if (path === '.' && recursive === true) {
path = process.cwd()
s = fs.statSync(process.cwd())
} else if (path === '.' && recursive === false) {
s = fs.statSync(process.cwd())
if (s.isDirectory()) {
throw new Error('Error: ' + process.cwd() + ' is a directory, use the \'-r\' flag to specify directories')
}
}

glob(pathj.join(path, '/**/*'), (err, res) => {
if (err) {
return console.log(err)
throw new Error(err)
}
console.log('added', bs58.encode(stats.Hash).toString(), stats.Name)
utils.getIPFS((err, ipfs) => {
if (err) {
throw new Error(err)
}
var files = []
if (utils.isDaemonOn()) {
console.log('daemon on')
if (res.length !== 0) {
const index = path.lastIndexOf('/')
async.eachLimit(res, 10, (element, callback) => {
rs = new Readable()
const addPath = element.substring(index + 1, element.length)
if (fs.statSync(element).isDirectory()) {
callback()
} else {
const buffered = fs.readFileSync(element)
rs.push(buffered)
rs.push(null)
const filePair = {path: addPath, content: rs}
files.push(filePair)
callback()
}
}, (err) => {
if (err) {
throw new Error(err)
}
ipfs.add(files, (err, res) => {
if (err) {
throw new Error(err)
}
res.forEach((goRes) => {
console.log('added', goRes.Hash, goRes.Name)
})
})
})
} else {
rs = new Readable()
const buffered = fs.readFileSync(path)
rs.push(buffered)
rs.push(null)
const filePair = {path: path, content: rs}
files.push(filePair)
ipfs.add(files, (err, res) => {
if (err) {
throw new Error(err)
}
console.log('added', res[0].Hash, res[0].Name)
})
}
return
}
const i = ipfs.files.add()
i.on('data', (file) => {
console.log('added', bs58.encode(file.multihash).toString(), file.path)
})
if (res.length !== 0) {
const index = path.lastIndexOf('/')
async.eachLimit(res, 10, (element, callback) => {
rs = new Readable()
const addPath = element.substring(index + 1, element.length)
if (fs.statSync(element).isDirectory()) {
callback()
} else {
const buffered = fs.readFileSync(element)
rs.push(buffered)
rs.push(null)
const filePair = {path: addPath, stream: rs}
i.write(filePair)
callback()
}
}, (err) => {
if (err) {
throw new Error(err)
}
i.end()
return
})
} else {
rs = new Readable()
const buffered = fs.readFileSync(path)
path = path.substring(path.lastIndexOf('/') + 1, path.length)
rs.push(buffered)
rs.push(null)
const filePair = {path: path, stream: rs}
i.write(filePair)
i.end()
}
})
})
}
})
47 changes: 47 additions & 0 deletions src/cli/commands/files/cat.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
'use strict'

const Command = require('ronin').Command
const debug = require('debug')
const utils = require('../../utils')
const log = debug('cli:files')
log.error = debug('cli:files:error')

module.exports = Command.extend({
desc: 'Download IPFS objects',

options: {},

run: (path, options) => {
if (!path) {
throw new Error("Argument 'path' is required")
}
if (!options) {
options = {}
}
utils.getIPFS((err, ipfs) => {
if (err) {
throw err
}
if (utils.isDaemonOn()) {
ipfs.cat(path, (err, res) => {
if (err) {
throw new Error(err)
}
res.pipe(process.stdout)
})
return
}

ipfs.files.cat(path, (err, res) => {
if (err) {
throw new Error(err)
}
if (res) {
res.on('file', (data) => {
data.stream.pipe(process.stdout)
})
}
})
})
}
})
70 changes: 70 additions & 0 deletions src/cli/commands/files/get.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
'use strict'

const Command = require('ronin').Command
const debug = require('debug')
const utils = require('../../utils')
const log = debug('cli:files')
log.error = debug('cli:files:error')
var fs = require('fs')

module.exports = Command.extend({
desc: 'Download IPFS objects',

options: {},

run: (path, options) => {
let dir
let filepath
let ws

if (!path) {
throw new Error("Argument 'path' is required")
}
if (!options) {
options = {}
dir = process.cwd()
} else {
if (options.slice(-1) !== '/') {
options += '/'
}
dir = options
}

utils.getIPFS((err, ipfs) => {
if (err) {
throw err
}
ipfs.files.get(path, (err, data) => {
if (err) {
throw new Error(err)
}
data.on('file', (data) => {
if (data.path.lastIndexOf('/') === -1) {
filepath = data.path
if (data.dir === false) {
ws = fs.createWriteStream(dir + data.path)
data.stream.pipe(ws)
} else {
try {
fs.mkdirSync(dir + filepath)
} catch (err) {
console.log(err)
}
}
} else {
filepath = data.path.substring(0, data.path.lastIndexOf('/') + 1)
try {
fs.mkdirSync(dir + filepath)
} catch (err) {
}
ws = fs.createWriteStream(dir + data.path)
// data.stream.on('end', () => {
// console.log('finished writing file to disk')
// })
data.stream.pipe(ws)
}
})
})
})
}
})
54 changes: 49 additions & 5 deletions src/core/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,12 @@ const DAGService = mDAG.DAGService
const peerId = require('peer-id')
const PeerInfo = require('peer-info')
const multiaddr = require('multiaddr')
const importer = require('ipfs-data-importing').import
const Importer = require('ipfs-unixfs-engine').importer
const Exporter = require('ipfs-unixfs-engine').exporter
const libp2p = require('libp2p-ipfs')
const init = require('./init')
const IPFSRepo = require('ipfs-repo')
const UnixFS = require('ipfs-unixfs')

exports = module.exports = IPFS

Expand Down Expand Up @@ -392,10 +394,52 @@ function IPFS (repo) {
}

this.files = {
add: (path, options, callback) => {
options.path = path
options.dagService = dagS
importer(options, callback)
add: (arr, callback) => {
if (typeof arr === 'function') {
callback = arr
arr = undefined
}
if (callback === undefined) {
callback = function noop () {}
}
if (arr === undefined) {
return new Importer(dagS)
}

const i = new Importer(dagS)
const res = []

i.on('data', (info) => {
res.push(info)
})

i.on('end', () => {
callback(null, res)
})

arr.forEach((tuple) => {
i.write(tuple)
})

i.end()
},
cat: (hash, callback) => {
dagS.get(hash, (err, fetchedNode) => {
if (err) {
return callback(err, null)
}
const data = UnixFS.unmarshal(fetchedNode.data)
if (data.type === 'directory') {
callback('This dag node is a directory', null)
} else {
const exportEvent = Exporter(hash, dagS)
callback(null, exportEvent)
}
})
},
get: (hash, callback) => {
var exportFile = Exporter(hash, dagS)
callback(null, exportFile)
}
}
}
Expand Down

0 comments on commit 849f96d

Please sign in to comment.