Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

docs(example): show folks how to use pull-streams instead #988

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
3 changes: 3 additions & 0 deletions examples/exchange-files-in-browser/package.json
Expand Up @@ -12,6 +12,9 @@
"http-server": "^0.10.0"
},
"dependencies": {
"browserify-aes": "crypto-browserify/browserify-aes#master",
"pull-filereader": "^1.0.1",
"pull-stream": "^3.6.0",
"stream-buffers": "^3.0.1"
}
}
2 changes: 1 addition & 1 deletion examples/exchange-files-in-browser/public/index.html
Expand Up @@ -62,7 +62,7 @@ <h2>Peers</h2>
</div>

<!-- The IPFS node module -->
<script src="//unpkg.com/ipfs/dist/index.min.js"></script>
<!-- <script src="//unpkg.com/ipfs/dist/index.min.js"></script> -->
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was supposed to be the example that showed that "no extra batteries required". Kind of sad we lost that.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@diasdavid unpkg doesn't allow linking to a branch? We could also publish latest master ourselves on IPFS and link that instead

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We don't have to keep this, with the new release that would be fine to use the unkpkg version again

<!-- <script src="js/app.js"></script> -->
<script src="js/bundle.js"></script>
</body>
Expand Down
120 changes: 36 additions & 84 deletions examples/exchange-files-in-browser/public/js/app.js
@@ -1,5 +1,4 @@
'use strict'
/* global self */

const $startButton = document.querySelector('#start')
const $stopButton = document.querySelector('#stop')
Expand All @@ -20,7 +19,9 @@ const $details = document.querySelector('#details')
const $allDisabledButtons = document.querySelectorAll('button:disabled')
const $allDisabledInputs = document.querySelectorAll('input:disabled')
const $filesList = document.querySelector('.file-list')
const streamBuffers = require('stream-buffers')
const Ipfs = require('../../../../src/core')
const pullFilereader = require('pull-filereader')
const pull = require('pull-stream')

let node
let peerInfo
Expand All @@ -33,7 +34,7 @@ function start () {
if (!node) {
updateView('starting', node)

node = new self.Ipfs({repo: 'ipfs-' + Math.random()})
node = new Ipfs({repo: 'ipfs-' + Math.random()})

node.on('start', () => {
node.id().then((id) => {
Expand Down Expand Up @@ -119,93 +120,44 @@ function onDrop (event) {
onError('IPFS must be started before files can be added')
return
}
const dt = event.dataTransfer
const files = dt.files

function readFileContents (file) {
return new Promise((resolve) => {
const reader = new window.FileReader()
reader.onload = (event) => resolve(event.target.result)
reader.readAsArrayBuffer(file)
})
}

let filesArray = []
for (let i = 0; i < files.length; i++) {
filesArray.push(files[i])
let files = []
for (let i = 0; i < event.dataTransfer.files.length; i++) {
files.push(event.dataTransfer.files[i])
}

filesArray.map((file) => {
readFileContents(file)
.then((buffer) => {
let fileSize = buffer.byteLength

if (fileSize < 50000000) {
return node.files.add([{
path: file.name,
content: new node.types.Buffer(buffer)
}])
} else {
// use createAddStream and chunk the file.
let progress = 0

let myReadableStreamBuffer = new streamBuffers.ReadableStreamBuffer({
// frequency: 10, // in milliseconds.
chunkSize: 32048 // in bytes.
})

node.files.createAddStream((err, stream) => {
if (err) throw err

stream.on('data', (file) => {
$multihashInput.value = file.hash
$filesStatus.innerHTML = `Added ${file.path} as ${file.hash}`

if (progressbar) {
clearInterval(progressbar)
progress = 0
}
})

myReadableStreamBuffer.on('data', (chunk) => {
progress += chunk.byteLength
})

if (!myReadableStreamBuffer.destroy) {
myReadableStreamBuffer.destroy = () => {}
}

stream.write({
path: file.name,
content: myReadableStreamBuffer
})

myReadableStreamBuffer.put(Buffer.from(buffer))
myReadableStreamBuffer.stop()

myReadableStreamBuffer.on('end', () => {
stream.end()
})

myReadableStreamBuffer.resume()

// progress.
let progressbar = setInterval(() => {
console.log('progress: ', progress, '/', fileSize, ' = ', Math.floor((progress / fileSize) * 100), '%')
}, 5000)
})
pull(
pull.values(files),
pull.through((file) => console.log('Adding %s', file)),
pull.asyncMap((file, cb) => pull(
pull.values([{
path: file.name,
content: pullFilereader(file)
}]),
node.files.createAddPullStream(),
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is awesome, but we shouldn't have examples with undocumented (and already agreed to change) API

Needs this solved first. ipfs-inactive/interface-js-ipfs-core#126

pull.collect((err, res) => {
if (err) {
return cb(err)
}
})
.then((files) => {
if (files && files.length) {
$multihashInput.value = files[0].hash
$filesStatus.innerHTML = files
const file = res[0]
console.log('Adding %s finished', file.path)

$multihashInput.value = file.hash
$filesStatus.innerHTML = `Added ${file.path} as ${file.hash}`
cb(null, file)
}))),
pull.collect((err, files) => {
if (err) {
return onError(err)
}
if (files && files.length) {
$multihashInput.value = files[0].hash
$filesStatus.innerHTML = files
.map((e) => `Added ${e.path} as ${e.hash}`)
.join('<br>')
}
})
.catch(onError)
})
}
})
)
}

/*
Expand Down