@@ -14,37 +14,31 @@ const {
14
14
} = require ( '../utils' )
15
15
const values = require ( 'pull-stream/sources/values' )
16
16
const log = require ( 'debug' ) ( 'ipfs:mfs:write' )
17
- const bs58 = require ( 'bs58' )
18
17
const importNode = require ( './import-node' )
19
18
const updateNode = require ( './update-node' )
20
19
const toPull = require ( 'stream-to-pull-stream' )
21
20
const isStream = require ( 'is-stream' )
22
- const isNode = require ( 'detect-node' )
23
21
const fileReaderStream = require ( 'filereader-stream' )
24
22
const isPullStream = require ( 'is-pull-stream' )
25
23
const cat = require ( 'pull-cat' )
26
24
const pull = require ( 'pull-stream/pull' )
27
-
28
- let fs
29
-
30
- if ( isNode ) {
31
- fs = require ( 'fs' )
32
- }
25
+ const fs = require ( 'fs' )
33
26
34
27
const defaultOptions = {
35
28
offset : 0 , // the offset in the file to begin writing
36
29
length : undefined , // how many bytes from the incoming buffer to write
37
30
create : false , // whether to create the file if it does not exist
38
31
truncate : false , // whether to truncate the file first
39
- rawLeafNodes : true ,
32
+ rawLeaves : false ,
40
33
reduceSingleLeafToSelf : false ,
41
- cidVersion : undefined ,
34
+ cidVersion : 0 ,
42
35
hashAlg : 'sha2-256' ,
43
36
format : 'dag-pb' ,
44
37
parents : false , // whether to create intermediate directories if they do not exist
45
38
progress : undefined ,
46
39
strategy : 'trickle' ,
47
- flush : true
40
+ flush : true ,
41
+ leafType : 'raw'
48
42
}
49
43
50
44
const toPullSource = ( content , options , callback ) => {
@@ -160,10 +154,11 @@ const updateOrImport = (ipfs, options, path, source, containingFolder, callback)
160
154
} , null )
161
155
162
156
if ( existingChild ) {
163
- log ( 'Updating linked DAGNode' , bs58 . encode ( existingChild . multihash ) )
157
+ const cid = new CID ( existingChild . multihash )
158
+ log ( `Updating linked DAGNode ${ cid . toBaseEncodedString ( ) } ` )
164
159
165
160
// overwrite the existing file or part of it, possibly truncating what's left
166
- updateNode ( ipfs , new CID ( existingChild . multihash ) , source , options , next )
161
+ updateNode ( ipfs , cid , source , options , next )
167
162
} else {
168
163
if ( ! options . create ) {
169
164
return next ( new Error ( 'file does not exist' ) )
@@ -185,10 +180,7 @@ const updateOrImport = (ipfs, options, path, source, containingFolder, callback)
185
180
)
186
181
187
182
log ( 'Importing file' , path . name )
188
- importNode ( ipfs , source , options , ( error , result ) => {
189
- log ( `Imported file ${ path . name } ${ bs58 . encode ( result . multihash ) } ` )
190
- next ( error , result )
191
- } )
183
+ importNode ( ipfs , source , options , next )
192
184
}
193
185
} ,
194
186
@@ -205,12 +197,6 @@ const updateOrImport = (ipfs, options, path, source, containingFolder, callback)
205
197
// Store new containing folder CID
206
198
containingFolder . node = newContaingFolder
207
199
208
- log ( `New CID for the containing folder is ${ bs58 . encode ( newContaingFolder . multihash ) } ` )
209
-
210
- newContaingFolder . links . forEach ( link => {
211
- log ( `${ link . name } ${ bs58 . encode ( link . multihash ) } ` )
212
- } )
213
-
214
200
next ( error )
215
201
} ) ,
216
202
0 commit comments