Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.

Commit 001a6eb

Browse files
victorbdaviddias
authored andcommitted
feat(files): interface-ipfs-core tests over ipfs-api
1 parent 11cb4ca commit 001a6eb

File tree

3 files changed

+99
-84
lines changed

3 files changed

+99
-84
lines changed

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,7 @@
9696
"promisify-es6": "^1.0.1",
9797
"pull-file": "^1.0.0",
9898
"pull-paramap": "^1.1.6",
99+
"pull-pushable": "^2.0.1",
99100
"pull-sort": "^1.0.0",
100101
"pull-stream": "^3.4.5",
101102
"pull-stream-to-stream": "^1.3.3",

src/core/ipfs/files.js

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@ module.exports = function files (self) {
5353
pull(
5454
pull.values([hash]),
5555
pull.asyncMap(self._dagS.get.bind(self._dagS)),
56+
pull.take(1),
5657
pull.map((node) => {
5758
const data = UnixFS.unmarshal(node.data)
5859
if (data.type === 'directory') {
@@ -81,6 +82,10 @@ module.exports = function files (self) {
8182
return file
8283
})
8384
)))
85+
}),
86+
87+
getPull: promisify((hash, callback) => {
88+
callback(null, exporter(hash, self._dagS))
8489
})
8590
}
8691
}

src/http-api/resources/files.js

Lines changed: 93 additions & 84 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,16 @@
11
'use strict'
22

33
const bs58 = require('bs58')
4-
const ndjson = require('ndjson')
54
const multipart = require('ipfs-multipart')
65
const debug = require('debug')
76
const tar = require('tar-stream')
87
const log = debug('http-api:files')
98
log.error = debug('http-api:files:error')
10-
const async = require('async')
9+
const pull = require('pull-stream')
10+
const toStream = require('pull-stream-to-stream')
11+
const toPull = require('stream-to-pull-stream')
12+
const pushable = require('pull-pushable')
13+
const EOL = require('os').EOL
1114

1215
exports = module.exports
1316

@@ -37,15 +40,23 @@ exports.cat = {
3740
// main route handler which is called after the above `parseArgs`, but only if the args were valid
3841
handler: (request, reply) => {
3942
const key = request.pre.args.key
43+
const ipfs = request.server.app.ipfs
4044

41-
request.server.app.ipfs.files.cat(key, (err, stream) => {
45+
ipfs.files.cat(key, (err, stream) => {
4246
if (err) {
4347
log.error(err)
4448
return reply({
4549
Message: 'Failed to cat file: ' + err,
4650
Code: 0
4751
}).code(500)
4852
}
53+
54+
// hapi is not very clever and throws if no
55+
// - _read method
56+
// - _readableState object
57+
// are there :(
58+
stream._read = () => {}
59+
stream._readableState = {}
4960
return reply(stream).header('X-Stream-Output', '1')
5061
})
5162
}
@@ -58,45 +69,44 @@ exports.get = {
5869
// main route handler which is called after the above `parseArgs`, but only if the args were valid
5970
handler: (request, reply) => {
6071
const key = request.pre.args.key
61-
62-
request.server.app.ipfs.files.get(key, (err, stream) => {
63-
if (err) {
64-
log.error(err)
65-
return reply({
66-
Message: 'Failed to get file: ' + err,
67-
Code: 0
68-
}).code(500)
69-
}
70-
var pack = tar.pack()
71-
const files = []
72-
stream.on('data', (data) => {
73-
files.push(data)
74-
})
75-
const processFile = (file) => {
76-
return (callback) => {
77-
if (!file.content) { // is directory
78-
pack.entry({name: file.path, type: 'directory'})
79-
callback()
80-
} else { // is file
81-
const fileContents = []
82-
file.content.on('data', (data) => {
83-
fileContents.push(data)
84-
})
85-
file.content.on('end', () => {
86-
pack.entry({name: file.path}, Buffer.concat(fileContents))
87-
callback()
88-
})
72+
const ipfs = request.server.app.ipfs
73+
const pack = tar.pack()
74+
75+
ipfs.files.getPull(key, (err, stream) => {
76+
if (err) return handleError(err)
77+
78+
pull(
79+
stream,
80+
pull.asyncMap((file, cb) => {
81+
const header = {name: file.path}
82+
83+
if (!file.content) {
84+
header.type = 'directory'
85+
pack.entry(header)
86+
cb()
87+
} else {
88+
header.size = file.size
89+
toStream.source(file.content)
90+
.pipe(pack.entry(header, cb))
8991
}
90-
}
91-
}
92-
stream.on('end', () => {
93-
const callbacks = files.map(processFile)
94-
async.series(callbacks, () => {
92+
}),
93+
pull.onEnd((err) => {
94+
if (err) return handleError(err)
95+
9596
pack.finalize()
9697
reply(pack).header('X-Stream-Output', '1')
9798
})
98-
})
99+
)
99100
})
101+
102+
function handleError (err) {
103+
log.error(err)
104+
105+
reply({
106+
Message: 'Failed to get file: ' + err,
107+
Code: 0
108+
}).code(500)
109+
}
100110
}
101111
}
102112

@@ -106,67 +116,66 @@ exports.add = {
106116
return reply('Array, Buffer, or String is required.').code(400).takeover()
107117
}
108118

119+
const ipfs = request.server.app.ipfs
120+
// TODO: make pull-multipart
109121
const parser = multipart.reqParser(request.payload)
122+
let filesParsed = false
110123

111-
var filesParsed = false
112-
var filesAdded = 0
124+
const fileAdder = pushable()
113125

114-
var serialize = ndjson.serialize()
115-
// hapi doesn't permit object streams: http://hapijs.com/api#replyerr-result
116-
serialize._readableState.objectMode = false
117-
118-
request.server.app.ipfs.files.createAddStream((err, fileAdder) => {
119-
if (err) {
120-
return reply({
121-
Message: err,
122-
Code: 0
123-
}).code(500)
126+
parser.on('file', (fileName, fileStream) => {
127+
const filePair = {
128+
path: fileName,
129+
content: toPull(fileStream)
124130
}
131+
filesParsed = true
132+
fileAdder.push(filePair)
133+
})
125134

126-
fileAdder.on('data', (file) => {
127-
const filePath = file.path ? file.path : file.hash
128-
serialize.write({
129-
Name: filePath,
130-
Hash: file.hash
131-
})
132-
filesAdded++
135+
parser.on('directory', (directory) => {
136+
fileAdder.push({
137+
path: directory,
138+
content: ''
133139
})
140+
})
141+
142+
parser.on('end', () => {
143+
if (!filesParsed) {
144+
return reply("File argument 'data' is required.")
145+
.code(400).takeover()
146+
}
147+
fileAdder.end()
148+
})
134149

135-
fileAdder.on('end', () => {
136-
if (filesAdded === 0 && filesParsed) {
150+
pull(
151+
fileAdder,
152+
ipfs.files.createAddPullStream(),
153+
pull.map((file) => {
154+
return {
155+
Name: file.path ? file.path : file.hash,
156+
Hash: file.hash
157+
}
158+
}),
159+
pull.map((file) => JSON.stringify(file) + EOL),
160+
pull.collect((err, files) => {
161+
if (err) {
137162
return reply({
138-
Message: 'Failed to add files.',
163+
Message: err,
139164
Code: 0
140165
}).code(500)
141-
} else {
142-
serialize.end()
143-
return reply(serialize)
144-
.header('x-chunked-output', '1')
145-
.header('content-type', 'application/json')
146166
}
147-
})
148167

149-
parser.on('file', (fileName, fileStream) => {
150-
var filePair = {
151-
path: fileName,
152-
content: fileStream
168+
if (files.length === 0 && filesParsed) {
169+
return reply({
170+
Message: 'Failed to add files.',
171+
Code: 0
172+
}).code(500)
153173
}
154-
filesParsed = true
155-
fileAdder.write(filePair)
156-
})
157-
parser.on('directory', (directory) => {
158-
fileAdder.write({
159-
path: directory,
160-
content: ''
161-
})
162-
})
163174

164-
parser.on('end', () => {
165-
if (!filesParsed) {
166-
return reply("File argument 'data' is required.").code(400).takeover()
167-
}
168-
fileAdder.end()
175+
reply(files.join(''))
176+
.header('x-chunked-output', '1')
177+
.header('content-type', 'application/json')
169178
})
170-
})
179+
)
171180
}
172181
}

0 commit comments

Comments
 (0)