diff --git a/.aegir.js b/.aegir.js index 979ffde3df..5d87aa66d7 100644 --- a/.aegir.js +++ b/.aegir.js @@ -1,8 +1,11 @@ 'use strict' -const createServer = require('ipfsd-ctl').createServer +const IPFSFactory = require('ipfsd-ctl') +const parallel = require('async/parallel') +const MockPreloadNode = require('./test/utils/mock-preload-node') -const server = createServer() +const ipfsdServer = IPFSFactory.createServer() +const preloadNode = MockPreloadNode.createNode() module.exports = { webpack: { @@ -21,9 +24,29 @@ module.exports = { singleRun: true }, hooks: { + node: { + pre: (cb) => preloadNode.start(cb), + post: (cb) => preloadNode.stop(cb) + }, browser: { - pre: server.start.bind(server), - post: server.stop.bind(server) + pre: (cb) => { + parallel([ + (cb) => { + ipfsdServer.start() + cb() + }, + (cb) => preloadNode.start(cb) + ], cb) + }, + post: (cb) => { + parallel([ + (cb) => { + ipfsdServer.stop() + cb() + }, + (cb) => preloadNode.stop(cb) + ], cb) + } } } } diff --git a/.npmignore b/.npmignore index 59335fda64..6609102526 100644 --- a/.npmignore +++ b/.npmignore @@ -32,3 +32,4 @@ build node_modules test +examples diff --git a/CHANGELOG.md b/CHANGELOG.md index 8292a2da8d..1a516395d9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,105 @@ + +## [0.31.7](https://github.com/ipfs/js-ipfs/compare/v0.31.6...v0.31.7) (2018-08-20) + + +### Bug Fixes + +* fails to start when preload disabled ([#1516](https://github.com/ipfs/js-ipfs/issues/1516)) ([511ab47](https://github.com/ipfs/js-ipfs/commit/511ab47)), closes [#1514](https://github.com/ipfs/js-ipfs/issues/1514) +* npm publishes examples folder ([#1513](https://github.com/ipfs/js-ipfs/issues/1513)) ([4a68ac1](https://github.com/ipfs/js-ipfs/commit/4a68ac1)) + + + + +## [0.31.6](https://github.com/ipfs/js-ipfs/compare/v0.31.5...v0.31.6) (2018-08-17) + + +### Features + +* adds data-encoding argument to control data encoding ([#1420](https://github.com/ipfs/js-ipfs/issues/1420)) ([1eb8485](https://github.com/ipfs/js-ipfs/commit/1eb8485)) + + + + +## [0.31.5](https://github.com/ipfs/js-ipfs/compare/v0.31.4...v0.31.5) (2018-08-17) + + +### Bug Fixes + +* add missing space after emoji ([5cde7c1](https://github.com/ipfs/js-ipfs/commit/5cde7c1)) +* improper input validation ([#1506](https://github.com/ipfs/js-ipfs/issues/1506)) ([91a482b](https://github.com/ipfs/js-ipfs/commit/91a482b)) +* object.patch.rmLink not working ([#1508](https://github.com/ipfs/js-ipfs/issues/1508)) ([afd3255](https://github.com/ipfs/js-ipfs/commit/afd3255)) +* stub out call to fetch for ipfs.dns test in browser ([#1512](https://github.com/ipfs/js-ipfs/issues/1512)) ([86c3d81](https://github.com/ipfs/js-ipfs/commit/86c3d81)) + + + + +## [0.31.4](https://github.com/ipfs/js-ipfs/compare/v0.31.3...v0.31.4) (2018-08-09) + + +### Bug Fixes + +* consistent badge style in docs ([#1494](https://github.com/ipfs/js-ipfs/issues/1494)) ([4a72e23](https://github.com/ipfs/js-ipfs/commit/4a72e23)) +* files.ls and files.read*Stream tests ([#1493](https://github.com/ipfs/js-ipfs/issues/1493)) ([a0bc79b](https://github.com/ipfs/js-ipfs/commit/a0bc79b)) + + + + +## [0.31.3](https://github.com/ipfs/js-ipfs/compare/v0.31.2...v0.31.3) (2018-08-09) + + +### Bug Fixes + +* failing tests in master ([#1488](https://github.com/ipfs/js-ipfs/issues/1488)) ([e607560](https://github.com/ipfs/js-ipfs/commit/e607560)) +* **dag:** check dag.put options for plain object ([#1480](https://github.com/ipfs/js-ipfs/issues/1480)) ([d0b671b](https://github.com/ipfs/js-ipfs/commit/d0b671b)), closes [#1479](https://github.com/ipfs/js-ipfs/issues/1479) +* **dht:** allow for options object in `findProvs()` API ([#1457](https://github.com/ipfs/js-ipfs/issues/1457)) ([99911b1](https://github.com/ipfs/js-ipfs/commit/99911b1)), closes [#1322](https://github.com/ipfs/js-ipfs/issues/1322) + + + + +## [0.31.2](https://github.com/ipfs/js-ipfs/compare/v0.31.1...v0.31.2) (2018-08-02) + + +### Bug Fixes + +* fix content-type by doing a fall-back using extensions ([#1482](https://github.com/ipfs/js-ipfs/issues/1482)) ([d528b3f](https://github.com/ipfs/js-ipfs/commit/d528b3f)) + + + + +## [0.31.1](https://github.com/ipfs/js-ipfs/compare/v0.31.0...v0.31.1) (2018-07-29) + + +### Bug Fixes + +* logo link ([a9219ad](https://github.com/ipfs/js-ipfs/commit/a9219ad)) +* XMLHTTPRequest is deprecated and unavailable in service workers ([#1478](https://github.com/ipfs/js-ipfs/issues/1478)) ([7d6f0ca](https://github.com/ipfs/js-ipfs/commit/7d6f0ca)) + + + + +# [0.31.0](https://github.com/ipfs/js-ipfs/compare/v0.30.1...v0.31.0) (2018-07-29) + + +### Bug Fixes + +* emit boot error only once ([#1472](https://github.com/ipfs/js-ipfs/issues/1472)) ([45b80a0](https://github.com/ipfs/js-ipfs/commit/45b80a0)) + + +### Features + +* preload content ([#1464](https://github.com/ipfs/js-ipfs/issues/1464)) ([bffe080](https://github.com/ipfs/js-ipfs/commit/bffe080)), closes [#1459](https://github.com/ipfs/js-ipfs/issues/1459) +* preload on content fetch requests ([#1475](https://github.com/ipfs/js-ipfs/issues/1475)) ([649b755](https://github.com/ipfs/js-ipfs/commit/649b755)), closes [#1473](https://github.com/ipfs/js-ipfs/issues/1473) +* remove decomissioned bootstrappers ([e3868f4](https://github.com/ipfs/js-ipfs/commit/e3868f4)) +* rm decomissioned bootstrappers - nodejs ([90e9f68](https://github.com/ipfs/js-ipfs/commit/90e9f68)) +* support --raw-leaves ([#1454](https://github.com/ipfs/js-ipfs/issues/1454)) ([1f63e8c](https://github.com/ipfs/js-ipfs/commit/1f63e8c)) + + +### Reverts + +* docs: add migration note about upgrading from < 0.30.0 ([#1450](https://github.com/ipfs/js-ipfs/issues/1450)) ([#1456](https://github.com/ipfs/js-ipfs/issues/1456)) ([f4344b0](https://github.com/ipfs/js-ipfs/commit/f4344b0)) + + + ## [0.30.1](https://github.com/ipfs/js-ipfs/compare/v0.30.0...v0.30.1) (2018-07-17) diff --git a/MGMT.md b/MGMT.md deleted file mode 100644 index 35c508f91d..0000000000 --- a/MGMT.md +++ /dev/null @@ -1,36 +0,0 @@ -# Core Dev Team Work Tracking & Managment - -## How work gets organized (a tl;dr;) - -The js-ipfs core working group follows the OKR structure established for the IPFS project to set the quarterly targets. Within each quarter, work gets tracked using Github and Waffle. - -- Github is used for discussions and track current endeavours. -- Waffle gives us a [Kanban](https://en.wikipedia.org/wiki/Kanban) view over the work at hand. - -![](https://ipfs.io/ipfs/QmWNd86qtjyFnygSAHkZDy4fUB1WnRa4WNt8gt1rSiq7of) - -In the Waffle board, we have 4 columns: - -- **Inbox** - New issues or PRs that haven't been evaluated yet -- **Backlog** - Issues that are blocked or discussion threads that are not currently active -- **Ready** - Issues Ready to be worked on -- **In Progress** - Issues that someone is already tackling. Contributors should focus on a few things rather than many at once. -- **Done** - Issues are automatically moved here when the issue is closed or the PR merged. - -We track work for the JavaScript implementation of the IPFS protocol in 3 separate waffle boards: - -- [js-ipfs](http://waffle.io/ipfs/js-ipfs) -- [js-libp2p](http://waffle.io/libp2p/js-libp2p) -- [js-ipld](http://waffle.io/ipld/js-ipld) - -## Issue labels and how to use filters - -We use labels to tag urgency and the difficulty of an issue. The current label system has: - -- `difficulty:{easy, moderate, hard}` - This is an instinctive measure give by the project lead or leads. It is a subjective best guess, however the current golden rule is that an issue with difficulty:easy should not require more than a morning (3~4 hours) to do and it should not require having to mess with multiple modules to complete. Issues with difficulty moderate or hard might require some discussion around the problem or even request that another team (i.e go-ipfs) makes some changes. The length of moderate or hard issue might be a day to ad-aeternum. -- `priority (P0, P1, P2, P3, P4)` - P0 is the most important while P4 is the least. -- `help wanted` - Issues perfect for new contributors. They will have the information necessary or the pointers for a new contributor to figure out what is required. These issues are never blocked on some other issue be done first. - -## Weekly Core Dev Team Calls - -[⚡️ⒿⓈ Core Dev Team Weekly Sync 🙌🏽](https://github.com/ipfs/pm/issues/650) diff --git a/README.md b/README.md index 0c20f35cdb..8693353cdc 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@

- + IPFS in JavaScript logo

@@ -7,10 +7,10 @@

The JavaScript implementation of the IPFS protocol.

- - - -
+ + + +

@@ -18,10 +18,10 @@

- - - - + + + +

@@ -97,15 +97,22 @@ This project is available through [npm](https://www.npmjs.com/). To install run > npm install ipfs --save ``` -Requires npm@3 and node@6 or above, tested on OSX & Linux, expected to work on Windows. +We support both the Current and Active LTS versions of Node.js. Please see [nodejs.org](https://nodejs.org/) for what these currently are. + +This project is tested on OSX & Linux, expected to work on Windows. ### Use in Node.js -To include this project programmatically: +To create an IPFS node programmatically: -```JavaScript +```js const IPFS = require('ipfs') const node = new IPFS() + +node.on('ready', () => { + // Ready to use! + // See https://github.com/ipfs/js-ipfs#core-api +}) ``` ### Through command line tool @@ -113,7 +120,7 @@ const node = new IPFS() In order to use js-ipfs as a CLI, you must install it with the `global` flag. Run the following (even if you have ipfs installed locally): ```bash -> npm install ipfs --global +npm install ipfs --global ``` The CLI is available by using the command `jsipfs` in your terminal. This is aliased, instead of using `ipfs`, to make sure it does not conflict with the [Go implementation](https://github.com/ipfs/go-ipfs). @@ -134,7 +141,19 @@ You can also load it using a ` ``` -Inserting one of the above lines will make an `Ipfs` object available in the global namespace. + +Inserting one of the above lines will make an `Ipfs` object available in the global namespace: + +```html + +``` ## Usage @@ -174,7 +193,7 @@ If you want a programmatic way to spawn a IPFS Daemon using JavaScript, check ou Use the IPFS Module as a dependency of a project to __spawn in process instances of IPFS__. Create an instance by calling `new IPFS()` and waiting for its `ready` event: -```JavaScript +```js // Create the IPFS node instance const node = new IPFS() @@ -202,53 +221,122 @@ const node = new IPFS([options]) Creates and returns an instance of an IPFS node. Use the `options` argument to specify advanced configuration. It is an object with any of these properties: -- `repo` (string or [`ipfs.Repo`](https://github.com/ipfs/js-ipfs-repo) instance): The file path at which to store the IPFS node’s data. Alternatively, you can set up a customized storage system by providing an [`ipfs.Repo`](https://github.com/ipfs/js-ipfs-repo) instance. (Default: `'~/.jsipfs'` in Node.js, `'ipfs'` in browsers.) - Example: +##### `options.repo` - ```js - // Store data outside your user directory - const node = new IPFS({ repo: '/var/ipfs/data' }) - ``` +| Type | Default | +|------|---------| +| string or [`ipfs.Repo`](https://github.com/ipfs/js-ipfs-repo) instance | `'~/.jsipfs'` in Node.js, `'ipfs'` in browsers | + +The file path at which to store the IPFS node’s data. Alternatively, you can set up a customized storage system by providing an [`ipfs.Repo`](https://github.com/ipfs/js-ipfs-repo) instance. + +Example: + +```js +// Store data outside your user directory +const node = new IPFS({ repo: '/var/ipfs/data' }) +``` + +##### `options.init` + +| Type | Default | +|------|---------| +| boolean or object | `true` | -- `init` (boolean or object): Initialize the repo when creating the IPFS node. (Default: `true`) +Initialize the repo when creating the IPFS node. - If you have already initialized a repo before creating your IPFS node (e.g. you are loading a repo that was saved to disk from a previous run of your program), you must make sure to set this to `false`. Note that *initializing* a repo is different from creating an instance of [`ipfs.Repo`](https://github.com/ipfs/js-ipfs-repo). The IPFS constructor sets many special properties when initializing a repo, so you should usually not try and call `repoInstance.init()` yourself. +If you have already initialized a repo before creating your IPFS node (e.g. you are loading a repo that was saved to disk from a previous run of your program), you must make sure to set this to `false`. Note that *initializing* a repo is different from creating an instance of [`ipfs.Repo`](https://github.com/ipfs/js-ipfs-repo). The IPFS constructor sets many special properties when initializing a repo, so you should usually not try and call `repoInstance.init()` yourself. - Instead of a boolean, you may provide an object with custom initialization options. All properties are optional: +Instead of a boolean, you may provide an object with custom initialization options. All properties are optional: - - `init.emptyRepo` (boolean) Whether to remove built-in assets, like the instructional tour and empty mutable file system, from the repo. (Default: `false`) - - `init.bits` (number) Number of bits to use in the generated key pair. (Default: `2048`) - - `init.pass` (string) A passphrase to encrypt keys. You should generally use the top-level `pass` option instead of the `init.pass` option (this one will take its value from the top-level option if not set). +- `emptyRepo` (boolean) Whether to remove built-in assets, like the instructional tour and empty mutable file system, from the repo. (Default: `false`) +- `bits` (number) Number of bits to use in the generated key pair. (Default: `2048`) +- `pass` (string) A passphrase to encrypt keys. You should generally use the [top-level `pass` option](#optionspass) instead of the `init.pass` option (this one will take its value from the top-level option if not set). -- `start` (boolean): If `false`, do not automatically start the IPFS node. Instead, you’ll need to manually call `node.start()` yourself. (Default: `true`) +##### `options.start` -- `pass` (string): A passphrase to encrypt/decrypt your keys. +| Type | Default | +|------|---------| +| boolean | `true` | -- `relay` (object): Configure circuit relay (see the [circuit relay tutorial](https://github.com/ipfs/js-ipfs/tree/master/examples/circuit-relaying) to learn more). - - `enabled` (boolean): Enable circuit relay dialer and listener. (Default: `false`) - - `hop` (object) - - `enabled` (boolean): Make this node a relay (other nodes can connect *through* it). (Default: `false`) - - `active` (boolean): Make this an *active* relay node. Active relay nodes will attempt to dial a destination peer even if that peer is not yet connected to the relay. (Default: `false`) + If `false`, do not automatically start the IPFS node. Instead, you’ll need to manually call [`node.start()`](#nodestartcallback) yourself. -- `EXPERIMENTAL` (object): Enable and configure experimental features. - - `pubsub` (boolean): Enable libp2p pub-sub. (Default: `false`) - - `sharding` (boolean): Enable directory sharding. Directories that have many child objects will be represented by multiple DAG nodes instead of just one. It can improve lookup performance when a directory has several thousand files or more. (Default: `false`) - - `dht` (boolean): Enable KadDHT. **This is currently not interopable with `go-ipfs`.** +##### `options.pass` -- `config` (object) Modify the default IPFS node config. Find the Node.js defaults at [`src/core/runtime/config-nodejs.js`](https://github.com/ipfs/js-ipfs/tree/master/src/core/runtime/config-nodejs.js) and the browser defaults at [`src/core/runtime/config-browser.js`](https://github.com/ipfs/js-ipfs/tree/master/src/core/runtime/config-browser.js). This object will be *merged* with the default config; it will not replace it. +| Type | Default | +|------|---------| +| string | `null` | -- `libp2p` (object) add custom modules to the libp2p stack of your node - - `modules` (object): - - `transport` (Array<[libp2p.Transport](https://github.com/libp2p/interface-transport)>): An array of Libp2p transport classes/instances to use _instead_ of the defaults. See [libp2p/interface-transport](https://github.com/libp2p/interface-transport) for details. - - `peerDiscovery` (Array<[libp2p.PeerDiscovery](https://github.com/libp2p/interface-peer-discovery)>): An array of Libp2p peer discovery classes/instances to use _instead_ of the defaults. See [libp2p/peer-discovery](https://github.com/libp2p/interface-peer-discovery) for details. If passing a class, configuration can be passed using the config section below under the key corresponding to you module's unique `tag` (a static property on the class) - - `config` (object): - - `peerDiscovery` (object): - - `[PeerDiscovery.tag]` (object): configuration for a peer discovery module - - `enabled` (boolean): whether this module is enabled or disabled - - `[custom config]` (any): other keys are specific to the module +A passphrase to encrypt/decrypt your keys. -- `connectionManager` (object): Configure the libp2p connection manager, see the [documentation for available options](https://github.com/libp2p/js-libp2p-connection-manager#create-a-connectionmanager). +##### `options.relay` + +| Type | Default | +|------|---------| +| object | `{ enabled: false, hop: { enabled: false, active: false } }` | + +Configure circuit relay (see the [circuit relay tutorial](https://github.com/ipfs/js-ipfs/tree/master/examples/circuit-relaying) to learn more). + +- `enabled` (boolean): Enable circuit relay dialer and listener. (Default: `false`) +- `hop` (object) + - `enabled` (boolean): Make this node a relay (other nodes can connect *through* it). (Default: `false`) + - `active` (boolean): Make this an *active* relay node. Active relay nodes will attempt to dial a destination peer even if that peer is not yet connected to the relay. (Default: `false`) + +##### `options.preload` + +| Type | Default | +|------|---------| +| object | `{ enabled: true, addresses: [...] }` | + +Configure external nodes that will preload content added to this node. + +- `enabled` (boolean): Enable content preloading (Default: `true`) +- `addresses` (array): Multiaddr API addresses of nodes that should preload content. **NOTE:** nodes specified here should also be added to your node's bootstrap address list at [`config.Boostrap`](#optionsconfig). + +##### `options.EXPERIMENTAL` + +| Type | Default | +|------|---------| +| object | `{ pubsub: false, sharding: false, dht: false }` | + +Enable and configure experimental features. + +- `pubsub` (boolean): Enable libp2p pub-sub. (Default: `false`) +- `sharding` (boolean): Enable directory sharding. Directories that have many child objects will be represented by multiple DAG nodes instead of just one. It can improve lookup performance when a directory has several thousand files or more. (Default: `false`) +- `dht` (boolean): Enable KadDHT. **This is currently not interopable with `go-ipfs`.** + +##### `options.config` + +| Type | Default | +|------|---------| +| object | [`config-nodejs.js`](https://github.com/ipfs/js-ipfs/tree/master/src/core/runtime/config-nodejs.js) in Node.js, [`config-browser.js`](https://github.com/ipfs/js-ipfs/tree/master/src/core/runtime/config-browser.js) in browsers | + +Modify the default IPFS node config. This object will be *merged* with the default config; it will not replace it. + +##### `options.libp2p` + +| Type | Default | +|------|---------| +| object | [`libp2p-nodejs.js`](https://github.com/ipfs/js-ipfs/blob/master/src/core/runtime/libp2p-nodejs.js) in Node.js, [`libp2p-browser.js`](https://github.com/ipfs/js-ipfs/blob/master/src/core/runtime/libp2p-browser.js) in browsers | + +Add custom modules to the libp2p stack of your node. + +- `modules` (object): + - `transport` (Array<[libp2p.Transport](https://github.com/libp2p/interface-transport)>): An array of Libp2p transport classes/instances to use _instead_ of the defaults. See [libp2p/interface-transport](https://github.com/libp2p/interface-transport) for details. + - `peerDiscovery` (Array<[libp2p.PeerDiscovery](https://github.com/libp2p/interface-peer-discovery)>): An array of Libp2p peer discovery classes/instances to use _instead_ of the defaults. See [libp2p/peer-discovery](https://github.com/libp2p/interface-peer-discovery) for details. If passing a class, configuration can be passed using the config section below under the key corresponding to you module's unique `tag` (a static property on the class) +- `config` (object): + - `peerDiscovery` (object): + - `[PeerDiscovery.tag]` (object): configuration for a peer discovery module + - `enabled` (boolean): whether this module is enabled or disabled + - `[custom config]` (any): other keys are specific to the module + +##### `options.connectionManager` + +| Type | Default | +|------|---------| +| object | [defaults](https://github.com/libp2p/js-libp2p-connection-manager#create-a-connectionmanager) | + +Configure the libp2p connection manager. #### Events @@ -284,31 +372,55 @@ Start listening for connections with other IPFS nodes on the network. In most ca This method is asynchronous. There are several ways to be notified when the node has finished starting: 1. If you call `node.start()` with no arguments, it returns a promise. -2. If you pass a function as the final argument, it will be called when the node is started. *(Note: this method will **not** return a promise if you use a callback function.)* + + ```js + const node = new IPFS({ start: false }) + + node.on('ready', async () => { + console.log('Node is ready to use!') + + try { + await node.start() + console.log('Node started!') + } catch (error) { + console.error('Node failed to start!', error) + } + }) + ``` + +2. If you pass a function as the final argument, it will be called when the node is started (Note: this method will **not** return a promise if you use a callback function). + + ```js + const node = new IPFS({ start: false }) + + node.on('ready', () => { + console.log('Node is ready to use!') + + node.start(error => { + if (error) { + return console.error('Node failed to start!', error) + } + console.log('Node started!') + }) + }) + ``` + 3. You can listen for the [`start` event](#events). -```js -const node = new IPFS({ start: false }) - -// Use a promise: -node.start() - .then(() => console.log('Node started!')) - .catch(error => console.error('Node failed to start!', error)) - -// OR use a callback: -node.start(error => { - if (error) { - console.error('Node failed to start!', error) - return - } - console.log('Node started!') -}) + ```js + const node = new IPFS({ start: false }) -// OR use events: -node.on('error', error => console.error('Something went terribly wrong!', error)) -node.on('start', () => console.log('Node started!')) -node.start() -``` + node.on('ready', () => { + console.log('Node is ready to use!') + node.start() + }) + + node.on('error', error => { + console.error('Something went terribly wrong!', error) + }) + + node.on('start', () => console.log('Node started!')) + ``` #### `node.stop([callback])` @@ -317,33 +429,55 @@ Close and stop listening for connections with other IPFS nodes, then release acc This method is asynchronous. There are several ways to be notified when the node has completely stopped: 1. If you call `node.stop()` with no arguments, it returns a promise. -2. If you pass a function as the final argument, it will be called when the node is stopped. *(Note: this method will **not** return a promise if you use a callback function.)* + + ```js + const node = new IPFS() + + node.on('ready', async () => { + console.log('Node is ready to use!') + + try { + await node.stop() + console.log('Node stopped!') + } catch (error) { + console.error('Node failed to stop cleanly!', error) + } + }) + ``` + +2. If you pass a function as the final argument, it will be called when the node is stopped (Note: this method will **not** return a promise if you use a callback function). + + ```js + const node = new IPFS() + + node.on('ready', () => { + console.log('Node is ready to use!') + + node.stop(error => { + if (error) { + return console.error('Node failed to stop cleanly!', error) + } + console.log('Node stopped!') + }) + }) + ``` + 3. You can listen for the [`stop` event](#events). -```js -const node = new IPFS() -node.on('ready', () => { - console.log('Node is ready to use!') - - // Stop with a promise: - node.stop() - .then(() => console.log('Node stopped!')) - .catch(error => console.error('Node failed to stop cleanly!', error)) - - // OR use a callback: - node.stop(error => { - if (error) { - console.error('Node failed to stop cleanly!', error) - return - } - console.log('Node stopped!') - }) + ```js + const node = new IPFS() - // OR use events: - node.on('error', error => console.error('Something went terribly wrong!', error)) - node.stop() -}) -``` + node.on('ready', () => { + console.log('Node is ready to use!') + node.stop() + }) + + node.on('error', error => { + console.error('Something went terribly wrong!', error) + }) + + node.on('stop', () => console.log('Node stopped!')) + ``` #### Core API @@ -353,17 +487,17 @@ The IPFS core API provides all functionality that is not specific to setting up The core API is grouped into several areas: -#### `Files` +#### Files - [files](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md) - [`ipfs.files.add(data, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesadd). Alias to `ipfs.add`. - [`ipfs.files.addPullStream([options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesaddpullstream) - [`ipfs.files.addReadableStream([options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesaddreadablestream) - [`ipfs.files.cat(ipfsPath, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filescat). Alias to `ipfs.cat`. - - [`ipfs.files.catPullStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filescatpullstream) + - [`ipfs.files.catPullStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filescatpullstream) - [`ipfs.files.catReadableStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filescatreadablestream) - [`ipfs.files.get(ipfsPath, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesget). Alias to `ipfs.get`. - - [`ipfs.files.getPullStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesgetpullstream) + - [`ipfs.files.getPullStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesgetpullstream) - [`ipfs.files.getReadableStream(ipfsPath, [options])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesgetreadablestream) - [`ipfs.ls(ipfsPath, [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#ls) - [`ipfs.lsPullStream(ipfsPath)`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#lspullstream) @@ -386,7 +520,7 @@ The core API is grouped into several areas: - [`ipfs.block.put(block, cid, [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/BLOCK.md#put) - [`ipfs.block.stat(cid, [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/BLOCK.md#stat) -#### `Graph` +#### Graph - [dag](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/DAG.md) - [`ipfs.dag.put(dagNode, options, callback)`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/DAG.md#dagput) @@ -404,9 +538,13 @@ The core API is grouped into several areas: - [`ipfs.object.patch.rmLink(multihash, DAGLink, [options, callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/OBJECT.md#objectpatchrmlink) - [`ipfs.object.patch.appendData(multihash, data, [options, callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/OBJECT.md#objectpatchappenddata) - [`ipfs.object.patch.setData(multihash, data, [options, callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/OBJECT.md#objectpatchsetdata) -- [pin (not implemented, yet!)](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/) -#### `Crypto and Key Management` +- [pin](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/PIN.md) + - [`ipfs.pin.add(hash, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/PIN.md#pinadd) + - [`ipfs.pin.ls([hash], [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/PIN.md#pinls) + - [`ipfs.pin.rm(hash, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/PIN.md#pinrm) + +#### Crypto and Key Management - [key](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/KEY.md) - `ipfs.key.export(name, password, [callback])` @@ -417,7 +555,7 @@ The core API is grouped into several areas: - `ipfs.key.rm(name, [callback])` - [crypto (not yet implemented)](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC) -#### `Network` +#### Network - [bootstrap](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/) - `ipfs.bootstrap.list` @@ -449,7 +587,7 @@ The core API is grouped into several areas: - [`ipfs.swarm.disconnect(addr, [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/SWARM.md#disconnect) - [`ipfs.swarm.peers([opts] [, callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/SWARM.md#peers) -#### `Node Management` +#### Node Management - [miscellaneous operations](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/MISCELLANEOUS.md) - [`ipfs.id([callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/MISCELLANEOUS.md#id) @@ -478,7 +616,7 @@ The core API is grouped into several areas: - [`ipfs.config.set(key, value, [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/CONFIG.md#configset) - [`ipfs.config.replace(config, [callback])`](https://github.com/ipfs/interface-ipfs-core/tree/master/SPEC/CONFIG.md#configreplace) -#### `Domain data types` +#### Domain data types A set of data types are exposed directly from the IPFS instance under `ipfs.types`. That way you're not required to import/require the following. @@ -492,7 +630,7 @@ A set of data types are exposed directly from the IPFS instance under `ipfs.type - [`ipfs.types.dagPB`](https://github.com/ipld/js-ipld-dag-pb) - [`ipfs.types.dagCBOR`](https://github.com/ipld/js-ipld-dag-cbor) -#### `Util` +#### Util A set of utils are exposed directly from the IPFS instance under `ipfs.util`. That way you're not required to import/require the following: @@ -682,59 +820,56 @@ $ curl --silent localhost:5002/api/v0/id | jq .ID ## Packages -Listing of the main packages used in the IPFS ecosystem. There are also three -specifications worth linking here: +Listing of the main packages used in the IPFS ecosystem. There are also three specifications worth linking here: - [`interface-ipfs-core`](https://github.com/ipfs/interface-ipfs-core) - [`http-api-spec`](https://github.com/ipfs/http-api-spec) - [`cli spec`](https://github.com/ipfs/specs/tree/master/public-api/cli) - +> This table is generated using the module `package-table` with `package-table --data=package-list.json`. + | Package | Version | Deps | CI | Coverage | | ---------|---------|---------|---------|--------- | | **Files** | -| [`ipfs-unixfs-engine`](//github.com/ipfs/js-ipfs-unixfs-engine) | [![npm](https://img.shields.io/npm/v/ipfs-unixfs-engine.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-unixfs-engine/releases) | [![Dep](https://david-dm.org/ipfs/js-ipfs-unixfs-engine.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-unixfs-engine) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/js-ipfs-unixfs-engine/master)](https://ci.ipfs.team/job/ipfs/job/js-ipfs-unixfs-engine/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/js-ipfs-unixfs-engine/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-unixfs-engine) | +| [`ipfs-unixfs-engine`](//github.com/ipfs/js-ipfs-unixfs-engine) | [![npm](https://img.shields.io/npm/v/ipfs-unixfs-engine.svg?maxAge=86400&style=flat)](//github.com/ipfs/js-ipfs-unixfs-engine/releases) | [![Dep](https://david-dm.org/ipfs/js-ipfs-unixfs-engine.svg?style=flat)](https://david-dm.org/ipfs/js-ipfs-unixfs-engine) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/js-ipfs-unixfs-engine/master)](https://ci.ipfs.team/job/ipfs/job/js-ipfs-unixfs-engine/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/js-ipfs-unixfs-engine/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-unixfs-engine) | | **DAG** | -| [`ipld`](//github.com/ipld/js-ipld) | [![npm](https://img.shields.io/npm/v/ipld.svg?maxAge=86400&style=flat-square)](//github.com/ipld/js-ipld/releases) | [![Dep](https://david-dm.org/ipld/js-ipld.svg?style=flat-square)](https://david-dm.org/ipld/js-ipld) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipld/js-ipld/master)](https://ci.ipfs.team/job/ipld/job/js-ipld/job/master/) | [![Coverage Status](https://codecov.io/gh/ipld/js-ipld/branch/master/graph/badge.svg)](https://codecov.io/gh/ipld/js-ipld) | -| [`ipld-dag-pb`](//github.com/ipld/js-ipld-dag-pb) | [![npm](https://img.shields.io/npm/v/ipld-dag-pb.svg?maxAge=86400&style=flat-square)](//github.com/ipld/js-ipld-dag-pb/releases) | [![Dep](https://david-dm.org/ipld/js-ipld-dag-pb.svg?style=flat-square)](https://david-dm.org/ipld/js-ipld-dag-pb) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipld/js-ipld-dag-pb/master)](https://ci.ipfs.team/job/ipld/job/js-ipld-dag-pb/job/master/) | [![Coverage Status](https://codecov.io/gh/ipld/js-ipld-dag-pb/branch/master/graph/badge.svg)](https://codecov.io/gh/ipld/js-ipld-dag-pb) | -| [`ipld-dag-cbor`](//github.com/ipld/js-ipld-dag-cbor) | [![npm](https://img.shields.io/npm/v/ipld-dag-cbor.svg?maxAge=86400&style=flat-square)](//github.com/ipld/js-ipld-dag-cbor/releases) | [![Dep](https://david-dm.org/ipld/js-ipld-dag-cbor.svg?style=flat-square)](https://david-dm.org/ipld/js-ipld-dag-cbor) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipld/js-ipld-dag-cbor/master)](https://ci.ipfs.team/job/ipld/job/js-ipld-dag-cbor/job/master/) | [![Coverage Status](https://codecov.io/gh/ipld/js-ipld-dag-cbor/branch/master/graph/badge.svg)](https://codecov.io/gh/ipld/js-ipld-dag-cbor) | +| [`ipld`](//github.com/ipld/js-ipld) | [![npm](https://img.shields.io/npm/v/ipld.svg?maxAge=86400&style=flat)](//github.com/ipld/js-ipld/releases) | [![Dep](https://david-dm.org/ipld/js-ipld.svg?style=flat)](https://david-dm.org/ipld/js-ipld) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipld/js-ipld/master)](https://ci.ipfs.team/job/ipld/job/js-ipld/job/master/) | [![Coverage Status](https://codecov.io/gh/ipld/js-ipld/branch/master/graph/badge.svg)](https://codecov.io/gh/ipld/js-ipld) | +| [`ipld-dag-pb`](//github.com/ipld/js-ipld-dag-pb) | [![npm](https://img.shields.io/npm/v/ipld-dag-pb.svg?maxAge=86400&style=flat)](//github.com/ipld/js-ipld-dag-pb/releases) | [![Dep](https://david-dm.org/ipld/js-ipld-dag-pb.svg?style=flat)](https://david-dm.org/ipld/js-ipld-dag-pb) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipld/js-ipld-dag-pb/master)](https://ci.ipfs.team/job/ipld/job/js-ipld-dag-pb/job/master/) | [![Coverage Status](https://codecov.io/gh/ipld/js-ipld-dag-pb/branch/master/graph/badge.svg)](https://codecov.io/gh/ipld/js-ipld-dag-pb) | +| [`ipld-dag-cbor`](//github.com/ipld/js-ipld-dag-cbor) | [![npm](https://img.shields.io/npm/v/ipld-dag-cbor.svg?maxAge=86400&style=flat)](//github.com/ipld/js-ipld-dag-cbor/releases) | [![Dep](https://david-dm.org/ipld/js-ipld-dag-cbor.svg?style=flat)](https://david-dm.org/ipld/js-ipld-dag-cbor) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipld/js-ipld-dag-cbor/master)](https://ci.ipfs.team/job/ipld/job/js-ipld-dag-cbor/job/master/) | [![Coverage Status](https://codecov.io/gh/ipld/js-ipld-dag-cbor/branch/master/graph/badge.svg)](https://codecov.io/gh/ipld/js-ipld-dag-cbor) | | **Repo** | -| [`ipfs-repo`](//github.com/ipfs/js-ipfs-repo) | [![npm](https://img.shields.io/npm/v/ipfs-repo.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-repo/releases) | [![Dep](https://david-dm.org/ipfs/js-ipfs-repo.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-repo) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/js-ipfs-repo/master)](https://ci.ipfs.team/job/ipfs/job/js-ipfs-repo/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/js-ipfs-repo/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-repo) | +| [`ipfs-repo`](//github.com/ipfs/js-ipfs-repo) | [![npm](https://img.shields.io/npm/v/ipfs-repo.svg?maxAge=86400&style=flat)](//github.com/ipfs/js-ipfs-repo/releases) | [![Dep](https://david-dm.org/ipfs/js-ipfs-repo.svg?style=flat)](https://david-dm.org/ipfs/js-ipfs-repo) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/js-ipfs-repo/master)](https://ci.ipfs.team/job/ipfs/job/js-ipfs-repo/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/js-ipfs-repo/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-repo) | | **Exchange** | -| [`ipfs-block-service`](//github.com/ipfs/js-ipfs-block-service) | [![npm](https://img.shields.io/npm/v/ipfs-block-service.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-block-service/releases) | [![Dep](https://david-dm.org/ipfs/js-ipfs-block-service.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-block-service) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/js-ipfs-block-service/master)](https://ci.ipfs.team/job/ipfs/job/js-ipfs-block-service/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/js-ipfs-block-service/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-block-service) | -| [`ipfs-bitswap`](//github.com/ipfs/js-ipfs-bitswap) | [![npm](https://img.shields.io/npm/v/ipfs-bitswap.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-bitswap/releases) | [![Dep](https://david-dm.org/ipfs/js-ipfs-bitswap.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-bitswap) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/js-ipfs-bitswap/master)](https://ci.ipfs.team/job/ipfs/job/js-ipfs-bitswap/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/js-ipfs-bitswap/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-bitswap) | +| [`ipfs-block-service`](//github.com/ipfs/js-ipfs-block-service) | [![npm](https://img.shields.io/npm/v/ipfs-block-service.svg?maxAge=86400&style=flat)](//github.com/ipfs/js-ipfs-block-service/releases) | [![Dep](https://david-dm.org/ipfs/js-ipfs-block-service.svg?style=flat)](https://david-dm.org/ipfs/js-ipfs-block-service) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/js-ipfs-block-service/master)](https://ci.ipfs.team/job/ipfs/job/js-ipfs-block-service/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/js-ipfs-block-service/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-block-service) | +| [`ipfs-bitswap`](//github.com/ipfs/js-ipfs-bitswap) | [![npm](https://img.shields.io/npm/v/ipfs-bitswap.svg?maxAge=86400&style=flat)](//github.com/ipfs/js-ipfs-bitswap/releases) | [![Dep](https://david-dm.org/ipfs/js-ipfs-bitswap.svg?style=flat)](https://david-dm.org/ipfs/js-ipfs-bitswap) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/js-ipfs-bitswap/master)](https://ci.ipfs.team/job/ipfs/job/js-ipfs-bitswap/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/js-ipfs-bitswap/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-bitswap) | | **libp2p** | -| [`libp2p`](//github.com/libp2p/js-libp2p) | [![npm](https://img.shields.io/npm/v/libp2p.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p) | -| [`libp2p-circuit`](//github.com/libp2p/js-libp2p-circuit) | [![npm](https://img.shields.io/npm/v/libp2p-circuit.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-circuit/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-circuit.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-circuit) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-circuit/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-circuit/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-circuit/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-circuit) | -| [`libp2p-floodsub`](//github.com/libp2p/js-libp2p-floodsub) | [![npm](https://img.shields.io/npm/v/libp2p-floodsub.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-floodsub/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-floodsub.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-floodsub) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-floodsub/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-floodsub/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-floodsub/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-floodsub) | -| [`libp2p-kad-dht`](//github.com/libp2p/js-libp2p-kad-dht) | [![npm](https://img.shields.io/npm/v/libp2p-kad-dht.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-kad-dht/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-kad-dht.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-kad-dht) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-kad-dht/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-kad-dht/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-kad-dht/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-kad-dht) | -| [`libp2p-mdns`](//github.com/libp2p/js-libp2p-mdns) | [![npm](https://img.shields.io/npm/v/libp2p-mdns.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-mdns/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-mdns.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-mdns) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-mdns/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-mdns/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-mdns/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-mdns) | -| [`libp2p-mplex`](//github.com/libp2p/js-libp2p-mplex) | [![npm](https://img.shields.io/npm/v/libp2p-mplex.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-mplex/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-mplex.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-mplex) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-mplex/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-mplex/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-mplex/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-mplex) | -| [`libp2p-railing`](//github.com/libp2p/js-libp2p-railing) | [![npm](https://img.shields.io/npm/v/libp2p-railing.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-railing/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-railing.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-railing) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-railing/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-railing/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-railing/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-railing) | -| [`libp2p-secio`](//github.com/libp2p/js-libp2p-secio) | [![npm](https://img.shields.io/npm/v/libp2p-secio.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-secio/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-secio.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-secio) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-secio/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-secio/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-secio/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-secio) | -| [`libp2p-tcp`](//github.com/libp2p/js-libp2p-tcp) | [![npm](https://img.shields.io/npm/v/libp2p-tcp.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-tcp/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-tcp.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-tcp) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-tcp/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-tcp/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-tcp/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-tcp) | -| [`libp2p-webrtc-star`](//github.com/libp2p/js-libp2p-webrtc-star) | [![npm](https://img.shields.io/npm/v/libp2p-webrtc-star.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-webrtc-star/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-webrtc-star.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-webrtc-star) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-webrtc-star/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-webrtc-star/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-webrtc-star/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-webrtc-star) | -| [`libp2p-websocket-star`](//github.com/libp2p/js-libp2p-websocket-star) | [![npm](https://img.shields.io/npm/v/libp2p-websocket-star.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-websocket-star/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-websocket-star.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-websocket-star) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-websocket-star/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-websocket-star/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-websocket-star/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-websocket-star) | -| [`libp2p-websockets`](//github.com/libp2p/js-libp2p-websockets) | [![npm](https://img.shields.io/npm/v/libp2p-websockets.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-websockets/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-websockets.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-websockets) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-websockets/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-websockets/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-websockets/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-websockets) | +| [`libp2p`](//github.com/libp2p/js-libp2p) | [![npm](https://img.shields.io/npm/v/libp2p.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p) | +| [`libp2p-circuit`](//github.com/libp2p/js-libp2p-circuit) | [![npm](https://img.shields.io/npm/v/libp2p-circuit.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p-circuit/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-circuit.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p-circuit) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-circuit/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-circuit/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-circuit/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-circuit) | +| [`libp2p-floodsub`](//github.com/libp2p/js-libp2p-floodsub) | [![npm](https://img.shields.io/npm/v/libp2p-floodsub.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p-floodsub/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-floodsub.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p-floodsub) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-floodsub/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-floodsub/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-floodsub/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-floodsub) | +| [`libp2p-kad-dht`](//github.com/libp2p/js-libp2p-kad-dht) | [![npm](https://img.shields.io/npm/v/libp2p-kad-dht.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p-kad-dht/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-kad-dht.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p-kad-dht) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-kad-dht/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-kad-dht/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-kad-dht/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-kad-dht) | +| [`libp2p-mdns`](//github.com/libp2p/js-libp2p-mdns) | [![npm](https://img.shields.io/npm/v/libp2p-mdns.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p-mdns/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-mdns.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p-mdns) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-mdns/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-mdns/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-mdns/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-mdns) | +| [`libp2p-mplex`](//github.com/libp2p/js-libp2p-mplex) | [![npm](https://img.shields.io/npm/v/libp2p-mplex.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p-mplex/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-mplex.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p-mplex) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-mplex/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-mplex/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-mplex/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-mplex) | +| [`libp2p-railing`](//github.com/libp2p/js-libp2p-railing) | [![npm](https://img.shields.io/npm/v/libp2p-railing.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p-railing/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-railing.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p-railing) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-railing/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-railing/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-railing/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-railing) | +| [`libp2p-secio`](//github.com/libp2p/js-libp2p-secio) | [![npm](https://img.shields.io/npm/v/libp2p-secio.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p-secio/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-secio.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p-secio) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-secio/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-secio/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-secio/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-secio) | +| [`libp2p-tcp`](//github.com/libp2p/js-libp2p-tcp) | [![npm](https://img.shields.io/npm/v/libp2p-tcp.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p-tcp/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-tcp.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p-tcp) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-tcp/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-tcp/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-tcp/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-tcp) | +| [`libp2p-webrtc-star`](//github.com/libp2p/js-libp2p-webrtc-star) | [![npm](https://img.shields.io/npm/v/libp2p-webrtc-star.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p-webrtc-star/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-webrtc-star.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p-webrtc-star) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-webrtc-star/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-webrtc-star/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-webrtc-star/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-webrtc-star) | +| [`libp2p-websocket-star`](//github.com/libp2p/js-libp2p-websocket-star) | [![npm](https://img.shields.io/npm/v/libp2p-websocket-star.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p-websocket-star/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-websocket-star.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p-websocket-star) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-websocket-star/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-websocket-star/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-websocket-star/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-websocket-star) | +| [`libp2p-websockets`](//github.com/libp2p/js-libp2p-websockets) | [![npm](https://img.shields.io/npm/v/libp2p-websockets.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p-websockets/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-websockets.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p-websockets) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-websockets/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-websockets/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-websockets/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-websockets) | | **Data Types** | -| [`ipfs-block`](//github.com/ipfs/js-ipfs-block) | [![npm](https://img.shields.io/npm/v/ipfs-block.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-block/releases) | [![Dep](https://david-dm.org/ipfs/js-ipfs-block.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-block) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/js-ipfs-block/master)](https://ci.ipfs.team/job/ipfs/job/js-ipfs-block/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/js-ipfs-block/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-block) | -| [`ipfs-unixfs`](//github.com/ipfs/js-ipfs-unixfs) | [![npm](https://img.shields.io/npm/v/ipfs-unixfs.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-unixfs/releases) | [![Dep](https://david-dm.org/ipfs/js-ipfs-unixfs.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-unixfs) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/js-ipfs-unixfs/master)](https://ci.ipfs.team/job/ipfs/job/js-ipfs-unixfs/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/js-ipfs-unixfs/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-unixfs) | -| [`peer-id`](//github.com/libp2p/js-peer-id) | [![npm](https://img.shields.io/npm/v/peer-id.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-peer-id/releases) | [![Dep](https://david-dm.org/libp2p/js-peer-id.svg?style=flat-square)](https://david-dm.org/libp2p/js-peer-id) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-peer-id/master)](https://ci.ipfs.team/job/libp2p/job/js-peer-id/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-peer-id/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-peer-id) | -| [`peer-info`](//github.com/libp2p/js-peer-info) | [![npm](https://img.shields.io/npm/v/peer-info.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-peer-info/releases) | [![Dep](https://david-dm.org/libp2p/js-peer-info.svg?style=flat-square)](https://david-dm.org/libp2p/js-peer-info) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-peer-info/master)](https://ci.ipfs.team/job/libp2p/job/js-peer-info/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-peer-info/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-peer-info) | -| [`multiaddr`](//github.com/multiformats/js-multiaddr) | [![npm](https://img.shields.io/npm/v/multiaddr.svg?maxAge=86400&style=flat-square)](//github.com/multiformats/js-multiaddr/releases) | [![Dep](https://david-dm.org/multiformats/js-multiaddr.svg?style=flat-square)](https://david-dm.org/multiformats/js-multiaddr) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=multiformats/js-multiaddr/master)](https://ci.ipfs.team/job/multiformats/job/js-multiaddr/job/master/) | [![Coverage Status](https://codecov.io/gh/multiformats/js-multiaddr/branch/master/graph/badge.svg)](https://codecov.io/gh/multiformats/js-multiaddr) | -| [`multihashes`](//github.com/multiformats/js-multihash) | [![npm](https://img.shields.io/npm/v/multihashes.svg?maxAge=86400&style=flat-square)](//github.com/multiformats/js-multihash/releases) | [![Dep](https://david-dm.org/multiformats/js-multihash.svg?style=flat-square)](https://david-dm.org/multiformats/js-multihash) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=multiformats/js-multihash/master)](https://ci.ipfs.team/job/multiformats/job/js-multihash/job/master/) | [![Coverage Status](https://codecov.io/gh/multiformats/js-multihash/branch/master/graph/badge.svg)](https://codecov.io/gh/multiformats/js-multihash) | +| [`ipfs-block`](//github.com/ipfs/js-ipfs-block) | [![npm](https://img.shields.io/npm/v/ipfs-block.svg?maxAge=86400&style=flat)](//github.com/ipfs/js-ipfs-block/releases) | [![Dep](https://david-dm.org/ipfs/js-ipfs-block.svg?style=flat)](https://david-dm.org/ipfs/js-ipfs-block) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/js-ipfs-block/master)](https://ci.ipfs.team/job/ipfs/job/js-ipfs-block/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/js-ipfs-block/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-block) | +| [`ipfs-unixfs`](//github.com/ipfs/js-ipfs-unixfs) | [![npm](https://img.shields.io/npm/v/ipfs-unixfs.svg?maxAge=86400&style=flat)](//github.com/ipfs/js-ipfs-unixfs/releases) | [![Dep](https://david-dm.org/ipfs/js-ipfs-unixfs.svg?style=flat)](https://david-dm.org/ipfs/js-ipfs-unixfs) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/js-ipfs-unixfs/master)](https://ci.ipfs.team/job/ipfs/job/js-ipfs-unixfs/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/js-ipfs-unixfs/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-unixfs) | +| [`peer-id`](//github.com/libp2p/js-peer-id) | [![npm](https://img.shields.io/npm/v/peer-id.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-peer-id/releases) | [![Dep](https://david-dm.org/libp2p/js-peer-id.svg?style=flat)](https://david-dm.org/libp2p/js-peer-id) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-peer-id/master)](https://ci.ipfs.team/job/libp2p/job/js-peer-id/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-peer-id/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-peer-id) | +| [`peer-info`](//github.com/libp2p/js-peer-info) | [![npm](https://img.shields.io/npm/v/peer-info.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-peer-info/releases) | [![Dep](https://david-dm.org/libp2p/js-peer-info.svg?style=flat)](https://david-dm.org/libp2p/js-peer-info) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-peer-info/master)](https://ci.ipfs.team/job/libp2p/job/js-peer-info/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-peer-info/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-peer-info) | +| [`multiaddr`](//github.com/multiformats/js-multiaddr) | [![npm](https://img.shields.io/npm/v/multiaddr.svg?maxAge=86400&style=flat)](//github.com/multiformats/js-multiaddr/releases) | [![Dep](https://david-dm.org/multiformats/js-multiaddr.svg?style=flat)](https://david-dm.org/multiformats/js-multiaddr) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=multiformats/js-multiaddr/master)](https://ci.ipfs.team/job/multiformats/job/js-multiaddr/job/master/) | [![Coverage Status](https://codecov.io/gh/multiformats/js-multiaddr/branch/master/graph/badge.svg)](https://codecov.io/gh/multiformats/js-multiaddr) | +| [`multihashes`](//github.com/multiformats/js-multihash) | [![npm](https://img.shields.io/npm/v/multihashes.svg?maxAge=86400&style=flat)](//github.com/multiformats/js-multihash/releases) | [![Dep](https://david-dm.org/multiformats/js-multihash.svg?style=flat)](https://david-dm.org/multiformats/js-multihash) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=multiformats/js-multihash/master)](https://ci.ipfs.team/job/multiformats/job/js-multihash/job/master/) | [![Coverage Status](https://codecov.io/gh/multiformats/js-multihash/branch/master/graph/badge.svg)](https://codecov.io/gh/multiformats/js-multihash) | | **Crypto** | -| [`libp2p-crypto`](//github.com/libp2p/js-libp2p-crypto) | [![npm](https://img.shields.io/npm/v/libp2p-crypto.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-crypto/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-crypto.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-crypto) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-crypto/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-crypto/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-crypto/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-crypto) | -| [`libp2p-keychain`](//github.com/libp2p/js-libp2p-keychain) | [![npm](https://img.shields.io/npm/v/libp2p-keychain.svg?maxAge=86400&style=flat-square)](//github.com/libp2p/js-libp2p-keychain/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-keychain.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-keychain) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-keychain/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-keychain/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-keychain/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-keychain) | +| [`libp2p-crypto`](//github.com/libp2p/js-libp2p-crypto) | [![npm](https://img.shields.io/npm/v/libp2p-crypto.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p-crypto/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-crypto.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p-crypto) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-crypto/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-crypto/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-crypto/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-crypto) | +| [`libp2p-keychain`](//github.com/libp2p/js-libp2p-keychain) | [![npm](https://img.shields.io/npm/v/libp2p-keychain.svg?maxAge=86400&style=flat)](//github.com/libp2p/js-libp2p-keychain/releases) | [![Dep](https://david-dm.org/libp2p/js-libp2p-keychain.svg?style=flat)](https://david-dm.org/libp2p/js-libp2p-keychain) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=libp2p/js-libp2p-keychain/master)](https://ci.ipfs.team/job/libp2p/job/js-libp2p-keychain/job/master/) | [![Coverage Status](https://codecov.io/gh/libp2p/js-libp2p-keychain/branch/master/graph/badge.svg)](https://codecov.io/gh/libp2p/js-libp2p-keychain) | | **Generics/Utils** | -| [`ipfs-api`](//github.com/ipfs/js-ipfs-api) | [![npm](https://img.shields.io/npm/v/ipfs-api.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/js-ipfs-api/releases) | [![Dep](https://david-dm.org/ipfs/js-ipfs-api.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-api) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/js-ipfs-api/master)](https://ci.ipfs.team/job/ipfs/job/js-ipfs-api/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/js-ipfs-api/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-api) | -| [`ipfs-multipart`](//github.com/ipfs/ipfs-multipart) | [![npm](https://img.shields.io/npm/v/ipfs-multipart.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/ipfs-multipart/releases) | [![Dep](https://david-dm.org/ipfs/ipfs-multipart.svg?style=flat-square)](https://david-dm.org/ipfs/ipfs-multipart) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/ipfs-multipart/master)](https://ci.ipfs.team/job/ipfs/job/ipfs-multipart/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/ipfs-multipart/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/ipfs-multipart) | -| [`is-ipfs`](//github.com/ipfs/is-ipfs) | [![npm](https://img.shields.io/npm/v/is-ipfs.svg?maxAge=86400&style=flat-square)](//github.com/ipfs/is-ipfs/releases) | [![Dep](https://david-dm.org/ipfs/is-ipfs.svg?style=flat-square)](https://david-dm.org/ipfs/is-ipfs) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/is-ipfs/master)](https://ci.ipfs.team/job/ipfs/job/is-ipfs/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/is-ipfs/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/is-ipfs) | -| [`multihashing`](//github.com/multiformats/js-multihashing) | [![npm](https://img.shields.io/npm/v/multihashing.svg?maxAge=86400&style=flat-square)](//github.com/multiformats/js-multihashing/releases) | [![Dep](https://david-dm.org/multiformats/js-multihashing.svg?style=flat-square)](https://david-dm.org/multiformats/js-multihashing) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=multiformats/js-multihashing/master)](https://ci.ipfs.team/job/multiformats/job/js-multihashing/job/master/) | [![Coverage Status](https://codecov.io/gh/multiformats/js-multihashing/branch/master/graph/badge.svg)](https://codecov.io/gh/multiformats/js-multihashing) | -| [`mafmt`](//github.com/multiformats/js-mafmt) | [![npm](https://img.shields.io/npm/v/mafmt.svg?maxAge=86400&style=flat-square)](//github.com/multiformats/js-mafmt/releases) | [![Dep](https://david-dm.org/multiformats/js-mafmt.svg?style=flat-square)](https://david-dm.org/multiformats/js-mafmt) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=multiformats/js-mafmt/master)](https://ci.ipfs.team/job/multiformats/job/js-mafmt/job/master/) | [![Coverage Status](https://codecov.io/gh/multiformats/js-mafmt/branch/master/graph/badge.svg)](https://codecov.io/gh/multiformats/js-mafmt) | +| [`ipfs-api`](//github.com/ipfs/js-ipfs-api) | [![npm](https://img.shields.io/npm/v/ipfs-api.svg?maxAge=86400&style=flat)](//github.com/ipfs/js-ipfs-api/releases) | [![Dep](https://david-dm.org/ipfs/js-ipfs-api.svg?style=flat)](https://david-dm.org/ipfs/js-ipfs-api) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/js-ipfs-api/master)](https://ci.ipfs.team/job/ipfs/job/js-ipfs-api/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/js-ipfs-api/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-api) | +| [`ipfs-multipart`](//github.com/ipfs/ipfs-multipart) | [![npm](https://img.shields.io/npm/v/ipfs-multipart.svg?maxAge=86400&style=flat)](//github.com/ipfs/ipfs-multipart/releases) | [![Dep](https://david-dm.org/ipfs/ipfs-multipart.svg?style=flat)](https://david-dm.org/ipfs/ipfs-multipart) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/ipfs-multipart/master)](https://ci.ipfs.team/job/ipfs/job/ipfs-multipart/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/ipfs-multipart/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/ipfs-multipart) | +| [`is-ipfs`](//github.com/ipfs/is-ipfs) | [![npm](https://img.shields.io/npm/v/is-ipfs.svg?maxAge=86400&style=flat)](//github.com/ipfs/is-ipfs/releases) | [![Dep](https://david-dm.org/ipfs/is-ipfs.svg?style=flat)](https://david-dm.org/ipfs/is-ipfs) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=ipfs/is-ipfs/master)](https://ci.ipfs.team/job/ipfs/job/is-ipfs/job/master/) | [![Coverage Status](https://codecov.io/gh/ipfs/is-ipfs/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/is-ipfs) | +| [`multihashing`](//github.com/multiformats/js-multihashing) | [![npm](https://img.shields.io/npm/v/multihashing.svg?maxAge=86400&style=flat)](//github.com/multiformats/js-multihashing/releases) | [![Dep](https://david-dm.org/multiformats/js-multihashing.svg?style=flat)](https://david-dm.org/multiformats/js-multihashing) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=multiformats/js-multihashing/master)](https://ci.ipfs.team/job/multiformats/job/js-multihashing/job/master/) | [![Coverage Status](https://codecov.io/gh/multiformats/js-multihashing/branch/master/graph/badge.svg)](https://codecov.io/gh/multiformats/js-multihashing) | +| [`mafmt`](//github.com/multiformats/js-mafmt) | [![npm](https://img.shields.io/npm/v/mafmt.svg?maxAge=86400&style=flat)](//github.com/multiformats/js-mafmt/releases) | [![Dep](https://david-dm.org/multiformats/js-mafmt.svg?style=flat)](https://david-dm.org/multiformats/js-mafmt) | [![Build Status](https://ci.ipfs.team/buildStatus/icon?job=multiformats/js-mafmt/master)](https://ci.ipfs.team/job/multiformats/job/js-mafmt/job/master/) | [![Coverage Status](https://codecov.io/gh/multiformats/js-mafmt/branch/master/graph/badge.svg)](https://codecov.io/gh/multiformats/js-mafmt) | ## Development diff --git a/RELEASE.md b/RELEASE.md index 576ce9a6fe..bfd2ecb52e 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -31,7 +31,7 @@ - [ ] Reddit - [ ] Blog post -# 🙌🏽Want to contribute? +# 🙌🏽 Want to contribute? Would you like to contribute to the IPFS project and don't know how? Well, there are a few places you can get started: diff --git a/examples/ipfs-101/1.js b/examples/ipfs-101/1.js index 9f206d543a..664af3d90c 100644 --- a/examples/ipfs-101/1.js +++ b/examples/ipfs-101/1.js @@ -1,32 +1,22 @@ 'use strict' -const series = require('async/series') const IPFS = require('ipfs') const node = new IPFS() -let fileMultihash - -series([ - (cb) => node.on('ready', cb), - (cb) => node.version((err, version) => { - if (err) { return cb(err) } - console.log('Version:', version.version) - cb() - }), - (cb) => node.files.add({ + +node.on('ready', async () => { + const version = await node.version() + + console.log('Version:', version.version) + + const filesAdded = await node.files.add({ path: 'hello.txt', content: Buffer.from('Hello World 101') - }, (err, filesAdded) => { - if (err) { return cb(err) } - - console.log('\nAdded file:', filesAdded[0].path, filesAdded[0].hash) - fileMultihash = filesAdded[0].hash - cb() - }), - (cb) => node.files.cat(fileMultihash, (err, data) => { - if (err) { return cb(err) } - - console.log('\nFile content:') - process.stdout.write(data) }) -]) + + console.log('Added file:', filesAdded[0].path, filesAdded[0].hash) + + const fileBuffer = await node.files.cat(filesAdded[0].hash) + + console.log('Added file contents:', fileBuffer.toString()) +}) diff --git a/examples/ipfs-101/README.md b/examples/ipfs-101/README.md index df44debe7e..98e969f534 100644 --- a/examples/ipfs-101/README.md +++ b/examples/ipfs-101/README.md @@ -1,80 +1,91 @@ # IPFS 101, spawn a node and add a file to the IPFS network -In this tutorial, we go through spawning an IPFS node, adding a file and cat'ing the file multihash locally and throught the gateway. +In this tutorial, we go through spawning an IPFS node, adding a file and cat'ing the file multihash locally and through the gateway. -You can find a complete version of this tutorial in [1.js](./1.js). For this tutorial, you need to install the following dependencies: `ipfs` and `async` using `npm install ipfs async`. +You can find a complete version of this tutorial in [1.js](./1.js). For this tutorial, you need to install `ipfs` using `npm install ipfs`. Creating an IPFS instance can be done in one line, after requiring the module, you simply have to: -```JavaScript +```js const IPFS = require('ipfs') const node = new IPFS() ``` -We can listen for the `ready` event to learn when the node is ready to be used. In this part, we start using `async/series` to help us manage the async flow. As a test, we are going to check the version of the node. +We can listen for the `ready` event to learn when the node is ready to be used. Within the ready event, we'll use `async`/`await` to help us manage the async flow. -```JavaScript +As a test, we are going to check the version of the node. + +```js const IPFS = require('ipfs') const node = new IPFS() -series([ - (cb) => node.on('ready', cb), - (cb) => node.version((err, version) => { - if (err) { return cb(err) } - console.log('Version:', version.version) - cb() - }) -]) +node.on('ready', async () => { + const version = await node.version() + + console.log('Version:', version.version) +}) ``` +(If you prefer not to use `async`/`await`, you can instead use `.then()` as you would with any promise, +or pass an [error-first callback](https://nodejs.org/api/errors.html#errors_error_first_callbacks), e.g. `node.version((err, version) => { ... })`) + Running the code above gets you: ```bash > node 1.js -IPFS Version: 0.25.0 +Version: 0.31.2 ``` -Now lets make it more interesting and add a file to IPFS. We can do it by adding another async call to the series that uses the `node.files.add` call. You can learn about IPFS API for files at [interface-ipfs-core](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md). - -```JavaScript -// Create the File to add, a file consists of a path + content. More details on -// https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md -(cb) => node.files.add({ - path: 'hello.txt', - content: Buffer.from('Hello World') -}, (err, filesAdded) => { - if (err) { return cb(err) } - - // Once the file is added, we get back an object containing the path, the - // multihash and the sie of the file - console.log('\nAdded file:', filesAdded[0].path, filesAdded[0].hash) - fileMultihash = filesAdded[0].hash - cb() +Now let's make it more interesting and add a file to IPFS using `node.files.add`. A file consists of a path and content. + +You can learn about the IPFS File API at [interface-ipfs-core](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md). + +```js +node.on('ready', async () => { + const version = await node.version() + + console.log('Version:', version.version) + + const filesAdded = await node.files.add({ + path: 'hello.txt', + content: Buffer.from('Hello World 101') + }) + + console.log('Added file:', filesAdded[0].path, filesAdded[0].hash) }) ``` -If you avoid calling that last `cb()`, the program won't exit enabling you to go to an IPFS Gateway and load the printed hash from a gateway. Go ahead and try it! +You can now go to an IPFS Gateway and load the printed hash from a gateway. Go ahead and try it! ```bash > node 1.js -Version: 0.25.0 +Version: 0.31.2 Added file: hello.txt QmXgZAUWd8yo4tvjBETqzUy3wLx5YRzuDwUQnBwRGrAmAo # Copy that hash and load it on the gateway, here is a prefiled url: # https://ipfs.io/ipfs/QmXgZAUWd8yo4tvjBETqzUy3wLx5YRzuDwUQnBwRGrAmAo ``` -The last step of this tutorial is retrieving the file back using the `cat` 😺 call. Add another step on the series chain that does the following: +The last step of this tutorial is retrieving the file back using the `cat` 😺 call. + +```js +node.on('ready', async () => { + const version = await node.version() + + console.log('Version:', version.version) + + const filesAdded = await node.files.add({ + path: 'hello.txt', + content: Buffer.from('Hello World 101') + }) + + console.log('Added file:', filesAdded[0].path, filesAdded[0].hash) -```JavaScript -(cb) => node.files.cat(fileMultihash, (err, data) => { - if (err) { return cb(err) } + const fileBuffer = await node.files.cat(filesAdded[0].hash) - console.log('\nFile content:') - // print the file to the terminal and then exit the program - process.stdout.write(data) + console.log('Added file contents:', fileBuffer.toString()) }) ``` diff --git a/examples/ipfs-101/package.json b/examples/ipfs-101/package.json index 4973b2c618..7b77539945 100644 --- a/examples/ipfs-101/package.json +++ b/examples/ipfs-101/package.json @@ -9,7 +9,6 @@ "author": "David Dias ", "license": "MIT", "dependencies": { - "async": "^2.6.0", "ipfs": "file:../../" } } diff --git a/package-list.json b/package-list.json new file mode 100644 index 0000000000..7259091d66 --- /dev/null +++ b/package-list.json @@ -0,0 +1,58 @@ +{ + "columns": [ + "Package", + "Version", + "Deps", + "CI", + "Coverage" + ], + "rows": [ + "Files", + ["ipfs/js-ipfs-unixfs-engine", "ipfs-unixfs-engine"], + + "DAG", + ["ipld/js-ipld", "ipld"], + ["ipld/js-ipld-dag-pb", "ipld-dag-pb"], + ["ipld/js-ipld-dag-cbor", "ipld-dag-cbor"], + + "Repo", + ["ipfs/js-ipfs-repo", "ipfs-repo"], + + "Exchange", + ["ipfs/js-ipfs-block-service", "ipfs-block-service"], + ["ipfs/js-ipfs-bitswap", "ipfs-bitswap"], + + "libp2p", + ["libp2p/js-libp2p", "libp2p"], + ["libp2p/js-libp2p-circuit", "libp2p-circuit"], + ["libp2p/js-libp2p-floodsub", "libp2p-floodsub"], + ["libp2p/js-libp2p-kad-dht", "libp2p-kad-dht"], + ["libp2p/js-libp2p-mdns", "libp2p-mdns"], + ["libp2p/js-libp2p-mplex", "libp2p-mplex"], + ["libp2p/js-libp2p-railing", "libp2p-railing"], + ["libp2p/js-libp2p-secio", "libp2p-secio"], + ["libp2p/js-libp2p-tcp", "libp2p-tcp"], + ["libp2p/js-libp2p-webrtc-star", "libp2p-webrtc-star"], + ["libp2p/js-libp2p-websocket-star", "libp2p-websocket-star"], + ["libp2p/js-libp2p-websockets", "libp2p-websockets"], + + "Data Types", + ["ipfs/js-ipfs-block", "ipfs-block"], + ["ipfs/js-ipfs-unixfs", "ipfs-unixfs"], + ["libp2p/js-peer-id", "peer-id"], + ["libp2p/js-peer-info", "peer-info"], + ["multiformats/js-multiaddr", "multiaddr"], + ["multiformats/js-multihash", "multihashes"], + + "Crypto", + ["libp2p/js-libp2p-crypto", "libp2p-crypto"], + ["libp2p/js-libp2p-keychain", "libp2p-keychain"], + + "Generics/Utils", + ["ipfs/js-ipfs-api", "ipfs-api"], + ["ipfs/ipfs-multipart", "ipfs-multipart"], + ["ipfs/is-ipfs", "is-ipfs"], + ["multiformats/js-multihashing", "multihashing"], + ["multiformats/js-mafmt", "mafmt"] + ] +} diff --git a/package.json b/package.json index 1c657514af..66d2c0dd5c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "ipfs", - "version": "0.30.1", + "version": "0.31.7", "description": "JavaScript implementation of the IPFS specification", "leadMaintainer": "Alan Shaw ", "bin": { @@ -11,6 +11,7 @@ "./src/core/components/init-assets.js": false, "./src/core/runtime/config-nodejs.js": "./src/core/runtime/config-browser.js", "./src/core/runtime/libp2p-nodejs.js": "./src/core/runtime/libp2p-browser.js", + "./src/core/runtime/preload-nodejs.js": "./src/core/runtime/preload-browser.js", "./src/core/runtime/repo-nodejs.js": "./src/core/runtime/repo-browser.js", "./src/core/runtime/dns-nodejs.js": "./src/core/runtime/dns-browser.js", "./test/utils/create-repo-nodejs.js": "./test/utils/create-repo-browser.js", @@ -57,7 +58,7 @@ }, "homepage": "https://github.com/ipfs/js-ipfs#readme", "devDependencies": { - "aegir": "^15.0.0", + "aegir": "^15.1.0", "buffer-loader": "~0.0.1", "chai": "^4.1.2", "delay": "^3.0.0", @@ -69,8 +70,8 @@ "expose-loader": "~0.7.5", "form-data": "^2.3.2", "hat": "0.0.3", - "interface-ipfs-core": "~0.72.0", - "ipfsd-ctl": "~0.37.5", + "interface-ipfs-core": "~0.76.1", + "ipfsd-ctl": "~0.39.1", "mocha": "^5.2.0", "ncp": "^2.0.0", "nexpect": "~0.5.0", @@ -92,7 +93,8 @@ "byteman": "^1.3.5", "cids": "~0.5.3", "debug": "^3.1.0", - "file-type": "^8.0.0", + "err-code": "^1.1.2", + "file-type": "^8.1.0", "filesize": "^3.6.1", "fnv1a": "^1.0.1", "fsm-event": "^2.1.0", @@ -103,30 +105,30 @@ "hoek": "^5.0.3", "human-to-milliseconds": "^1.0.0", "interface-datastore": "~0.4.2", - "ipfs-api": "^22.2.1", - "ipfs-bitswap": "~0.20.2", + "ipfs-api": "^24.0.0", + "ipfs-bitswap": "~0.20.3", "ipfs-block": "~0.7.1", "ipfs-block-service": "~0.14.0", "ipfs-http-response": "~0.1.2", - "ipfs-mfs": "~0.2.2", + "ipfs-mfs": "~0.3.0", "ipfs-multipart": "~0.1.0", - "ipfs-repo": "~0.22.1", + "ipfs-repo": "~0.23.1", "ipfs-unixfs": "~0.1.15", - "ipfs-unixfs-engine": "~0.31.2", + "ipfs-unixfs-engine": "~0.32.3", "ipld": "~0.17.3", "ipld-dag-cbor": "~0.12.1", - "ipld-dag-pb": "~0.14.5", - "is-ipfs": "~0.3.2", + "ipld-dag-pb": "~0.14.6", + "is-ipfs": "~0.4.2", "is-pull-stream": "~0.0.0", "is-stream": "^1.1.0", "joi": "^13.4.0", "joi-browser": "^13.4.0", "joi-multiaddr": "^2.0.0", - "libp2p": "~0.22.0", + "libp2p": "~0.23.0", "libp2p-bootstrap": "~0.9.3", "libp2p-circuit": "~0.2.0", "libp2p-floodsub": "~0.15.0", - "libp2p-kad-dht": "~0.10.0", + "libp2p-kad-dht": "~0.10.1", "libp2p-keychain": "~0.3.1", "libp2p-mdns": "~0.12.0", "libp2p-mplex": "~0.8.0", @@ -137,9 +139,10 @@ "libp2p-websockets": "~0.12.0", "lodash": "^4.17.10", "mafmt": "^6.0.0", - "mime-types": "^2.1.18", + "mime-types": "^2.1.19", "mkdirp": "~0.5.1", "multiaddr": "^5.0.0", + "multiaddr-to-uri": "^4.0.0", "multibase": "~0.4.0", "multihashes": "~0.4.13", "once": "^1.4.0", @@ -178,6 +181,7 @@ "Alan Shaw ", "Alex Potsides ", "Andrew de Andrade ", + "André Cruz ", "Arpit Agarwal <93arpit@gmail.com>", "Arpit Agarwal ", "Bernard Mordan ", @@ -190,6 +194,7 @@ "Daniel J. O'Quinn ", "Daniela Borges Matos de Carvalho ", "David Dias ", + "David Gilbertson ", "David da Silva ", "Diogo Silva ", "Dmitriy Ryajov ", @@ -258,9 +263,11 @@ "kumavis ", "nginnever ", "npmcdn-to-unpkg-bot ", + "robbsolter <35879806+robbsolter@users.noreply.github.com>", "seungwon-kang ", "tcme ", "Łukasz Magiera ", + "Максим Ильин ", "ᴠɪᴄᴛᴏʀ ʙᴊᴇʟᴋʜᴏʟᴍ " ] } diff --git a/scripts/generate-package-table-for-readme.js b/scripts/generate-package-table-for-readme.js deleted file mode 100755 index 2a9754599e..0000000000 --- a/scripts/generate-package-table-for-readme.js +++ /dev/null @@ -1,115 +0,0 @@ -#! /usr/bin/env node - -// This script generates the table of packages you can see in the readme - -// Columns to show at the header of the table -const columns = [ - 'Package', - 'Version', - 'Deps', - 'CI', - 'Coverage' -] - -// Headings are a string -// Arrays are packages. Index 0 is the GitHub repo and index 1 is the npm package -const rows = [ - 'Files', - ['ipfs/js-ipfs-unixfs-engine', 'ipfs-unixfs-engine'], - - 'DAG', - ['ipld/js-ipld', 'ipld'], - ['ipld/js-ipld-dag-pb', 'ipld-dag-pb'], - ['ipld/js-ipld-dag-cbor', 'ipld-dag-cbor'], - - 'Repo', - ['ipfs/js-ipfs-repo', 'ipfs-repo'], - - 'Exchange', - ['ipfs/js-ipfs-block-service', 'ipfs-block-service'], - ['ipfs/js-ipfs-bitswap', 'ipfs-bitswap'], - - 'libp2p', - ['libp2p/js-libp2p', 'libp2p'], - ['libp2p/js-libp2p-circuit', 'libp2p-circuit'], - ['libp2p/js-libp2p-floodsub', 'libp2p-floodsub'], - ['libp2p/js-libp2p-kad-dht', 'libp2p-kad-dht'], - ['libp2p/js-libp2p-mdns', 'libp2p-mdns'], - ['libp2p/js-libp2p-mplex', 'libp2p-mplex'], - ['libp2p/js-libp2p-railing', 'libp2p-railing'], - ['libp2p/js-libp2p-secio', 'libp2p-secio'], - ['libp2p/js-libp2p-tcp', 'libp2p-tcp'], - ['libp2p/js-libp2p-webrtc-star', 'libp2p-webrtc-star'], - ['libp2p/js-libp2p-websocket-star', 'libp2p-websocket-star'], - ['libp2p/js-libp2p-websockets', 'libp2p-websockets'], - - 'Data Types', - ['ipfs/js-ipfs-block', 'ipfs-block'], - ['ipfs/js-ipfs-unixfs', 'ipfs-unixfs'], - ['libp2p/js-peer-id', 'peer-id'], - ['libp2p/js-peer-info', 'peer-info'], - ['multiformats/js-multiaddr', 'multiaddr'], - ['multiformats/js-multihash', 'multihashes'], - - 'Crypto', - ['libp2p/js-libp2p-crypto', 'libp2p-crypto'], - ['libp2p/js-libp2p-keychain', 'libp2p-keychain'], - - 'Generics/Utils', - ['ipfs/js-ipfs-api', 'ipfs-api'], - ['ipfs/ipfs-multipart', 'ipfs-multipart'], - ['ipfs/is-ipfs', 'is-ipfs'], - ['multiformats/js-multihashing', 'multihashing'], - ['multiformats/js-mafmt', 'mafmt'] -] - -const isItemPackage = (item) => { - return Array.isArray(item) -} - -const packageBadges = [ - // Package - (gh, npm) => `[\`${npm}\`](//github.com/${gh})`, - // Version - (gh, npm) => `[![npm](https://img.shields.io/npm/v/${npm}.svg?maxAge=86400&style=flat-square)](//github.com/${gh}/releases)`, - // Deps - (gh, npm) => `[![Deps](https://david-dm.org/${gh}.svg?style=flat-square)](https://david-dm.org/${gh})`, - // CI - (gh, npm) => { - // Need to fix the path for jenkins links, as jenkins adds `/job/` between everything - const jenkinsPath = gh.split('/').join('/job/') - return `[![jenkins](https://ci.ipfs.team/buildStatus/icon?job=${gh}/master)](https://ci.ipfs.team/job/${jenkinsPath}/job/master/)` - }, - // Coverage - (gh, npm) => `[![codecov](https://codecov.io/gh/${gh}/branch/master/graph/badge.svg)](https://codecov.io/gh/${gh})` -] - -// Creates the table row for a package -const generatePackageRow = (item) => { - const row = packageBadges.map((func) => { - // First string is GitHub path, second is npm package name - return func(item[0], item[1]) - }).join(' | ') - const fullRow = `| ${row} |` - return fullRow -} - -// Generates a row for the table, depending if it's a package or a heading -const generateRow = (item) => { - if (isItemPackage(item)) { - return generatePackageRow(item) - } else { - return `| **${item}** |` - } -} - -const header = `| ${columns.join(' | ')} |` -const hr = `| ${columns.map(() => '---------').join('|')} |` - -const toPrint = [ - header, - hr, - rows.map((row) => generateRow(row)).join('\n') -] - -toPrint.forEach((t) => console.log(t)) diff --git a/src/cli/commands/files/add.js b/src/cli/commands/files/add.js index fe2a602a41..e9f87a81d5 100644 --- a/src/cli/commands/files/add.js +++ b/src/cli/commands/files/add.js @@ -135,6 +135,10 @@ module.exports = { default: false, describe: 'Only chunk and hash, do not write' }, + chunker: { + default: 'size-262144', + describe: 'Chunking algorithm to use, formatted like [size-{size}, rabin, rabin-{avg}, rabin-{min}-{avg}-{max}]' + }, 'enable-sharding-experiment': { type: 'boolean', default: false @@ -145,12 +149,12 @@ module.exports = { }, 'raw-leaves': { type: 'boolean', - default: undefined, describe: 'Use raw blocks for leaf nodes. (experimental)' }, 'cid-version': { type: 'integer', - describe: 'Cid version. Non-zero value will change default of \'raw-leaves\' to true. (experimental)' + describe: 'CID version. Defaults to 0 unless an option that depends on CIDv1 is passed. (experimental)', + default: 0 }, hash: { type: 'string', @@ -194,35 +198,8 @@ module.exports = { onlyHash: argv.onlyHash, hashAlg: argv.hash, wrapWithDirectory: argv.wrapWithDirectory, - pin: argv.pin - } - - // Temporary restriction on raw-leaves: - // When cid-version=1 then raw-leaves MUST be present and false. - // - // This is because raw-leaves is not yet implemented in js-ipfs, - // and go-ipfs changes the value of raw-leaves to true when - // cid-version > 0 unless explicitly set to false. - // - // This retains feature parity without having to implement raw-leaves. - if (options.cidVersion > 0 && options.rawLeaves !== false) { - throw new Error('Implied argument raw-leaves must be passed and set to false when cid-version is > 0') - } - - // Temporary restriction on raw-leaves: - // When hash != undefined then raw-leaves MUST be present and false. - // - // This is because raw-leaves is not yet implemented in js-ipfs, - // and go-ipfs changes the value of raw-leaves to true when - // hash != undefined unless explicitly set to false. - // - // This retains feature parity without having to implement raw-leaves. - if (options.hash && options.rawLeaves !== false) { - throw new Error('Implied argument raw-leaves must be passed and set to false when hash argument is specified') - } - - if (options.rawLeaves) { - throw new Error('Not implemented: raw-leaves') + pin: argv.pin, + chunker: argv.chunker } if (options.enableShardingExperiment && utils.isDaemonOn()) { diff --git a/src/cli/commands/object/get.js b/src/cli/commands/object/get.js index 629a3a82d1..9bb9e71542 100644 --- a/src/cli/commands/object/get.js +++ b/src/cli/commands/object/get.js @@ -7,7 +7,12 @@ module.exports = { describe: 'Get and serialize the DAG node named by ', - builder: {}, + builder: { + 'data-encoding': { + type: 'string', + default: 'base64' + } + }, handler (argv) { argv.ipfs.object.get(argv.key, {enc: 'base58'}, (err, node) => { @@ -16,7 +21,9 @@ module.exports = { } const nodeJSON = node.toJSON() - nodeJSON.data = nodeJSON.data ? nodeJSON.data.toString() : '' + if (Buffer.isBuffer(node.data)) { + nodeJSON.data = node.data.toString(argv['data-encoding'] || undefined) + } const answer = { Data: nodeJSON.data, diff --git a/src/cli/commands/object/patch/rm-link.js b/src/cli/commands/object/patch/rm-link.js index cac1666ce3..dde69c30f8 100644 --- a/src/cli/commands/object/patch/rm-link.js +++ b/src/cli/commands/object/patch/rm-link.js @@ -1,6 +1,5 @@ 'use strict' -const DAGLink = require('ipld-dag-pb').DAGLink const debug = require('debug') const log = debug('cli:object') log.error = debug('cli:object:error') @@ -14,12 +13,7 @@ module.exports = { builder: {}, handler (argv) { - // TODO rmLink should support removing by name and/or multihash - // without having to know everything, which in fact it does, however, - // since it expectes a DAGLink type, we have to pass some fake size and - // hash. - const link = new DAGLink(argv.link, 1, 'Qm') - argv.ipfs.object.patch.rmLink(argv.root, link, { + argv.ipfs.object.patch.rmLink(argv.root, { name: argv.link }, { enc: 'base58' }, (err, node) => { if (err) { diff --git a/src/cli/commands/resolve.js b/src/cli/commands/resolve.js new file mode 100644 index 0000000000..c1b7a54991 --- /dev/null +++ b/src/cli/commands/resolve.js @@ -0,0 +1,24 @@ +'use strict' + +const print = require('../utils').print + +module.exports = { + command: 'resolve ', + + description: 'Resolve the value of names to IPFS', + + builder: { + recursive: { + alias: 'r', + type: 'boolean', + default: false + } + }, + + handler (argv) { + argv.ipfs.resolve(argv.name, { recursive: argv.recursive }, (err, res) => { + if (err) throw err + print(res) + }) + } +} diff --git a/src/core/boot.js b/src/core/boot.js index 0602cb90a0..50b2677b5c 100644 --- a/src/core/boot.js +++ b/src/core/boot.js @@ -30,7 +30,10 @@ module.exports = (self) => { (repoOpened, cb) => { // Init with existing initialized, opened, repo if (repoOpened) { - return self.init({ repo: self._repo }, (err) => cb(err)) + return self.init({ repo: self._repo }, (err) => { + if (err) return cb(Object.assign(err, { emitted: true })) + cb() + }) } if (doInit) { @@ -38,7 +41,10 @@ module.exports = (self) => { { bits: 2048, pass: self._options.pass }, typeof options.init === 'object' ? options.init : {} ) - return self.init(initOptions, (err) => cb(err)) + return self.init(initOptions, (err) => { + if (err) return cb(Object.assign(err, { emitted: true })) + cb() + }) } cb() @@ -48,11 +54,18 @@ module.exports = (self) => { if (!doStart) { return cb() } - self.start(cb) + + self.start((err) => { + if (err) return cb(Object.assign(err, { emitted: true })) + cb() + }) } ], (err) => { if (err) { - return self.emit('error', err) + if (!err.emitted) { + self.emit('error', err) + } + return } self.log('booted') self.emit('ready') diff --git a/src/core/components/bitswap.js b/src/core/components/bitswap.js index bd59eb3b16..652135c70a 100644 --- a/src/core/components/bitswap.js +++ b/src/core/components/bitswap.js @@ -6,6 +6,7 @@ const setImmediate = require('async/setImmediate') const Big = require('big.js') const CID = require('cids') const PeerId = require('peer-id') +const errCode = require('err-code') function formatWantlist (list) { return Array.from(list).map((e) => ({ '/': e[1].cid.toBaseEncodedString() })) @@ -69,12 +70,18 @@ module.exports = function bitswap (self) { if (!Array.isArray(keys)) { keys = [keys] } - keys = keys.map((key) => { - if (CID.isCID(key)) { - return key - } - return new CID(key) - }) + + try { + keys = keys.map((key) => { + if (CID.isCID(key)) { + return key + } + return new CID(key) + }) + } catch (err) { + return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + } + return setImmediate(() => callback(null, self._bitswap.unwant(keys))) }) } diff --git a/src/core/components/block.js b/src/core/components/block.js index 89b18db7d3..f350363a01 100644 --- a/src/core/components/block.js +++ b/src/core/components/block.js @@ -5,12 +5,30 @@ const multihash = require('multihashes') const multihashing = require('multihashing-async') const CID = require('cids') const waterfall = require('async/waterfall') +const setImmediate = require('async/setImmediate') const promisify = require('promisify-es6') +const errCode = require('err-code') module.exports = function block (self) { return { - get: promisify((cid, callback) => { - cid = cleanCid(cid) + get: promisify((cid, options, callback) => { + if (typeof options === 'function') { + callback = options + options = {} + } + + options = options || {} + + try { + cid = cleanCid(cid) + } catch (err) { + return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + } + + if (options.preload !== false) { + self._preload(cid) + } + self._blockService.get(cid, callback) }), put: promisify((block, options, callback) => { @@ -52,16 +70,38 @@ module.exports = function block (self) { if (err) { return cb(err) } + + if (options.preload !== false) { + self._preload(block.cid) + } + cb(null, block) }) ], callback) }), rm: promisify((cid, callback) => { - cid = cleanCid(cid) + try { + cid = cleanCid(cid) + } catch (err) { + return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + } self._blockService.delete(cid, callback) }), - stat: promisify((cid, callback) => { - cid = cleanCid(cid) + stat: promisify((cid, options, callback) => { + if (typeof options === 'function') { + callback = options + options = {} + } + + try { + cid = cleanCid(cid) + } catch (err) { + return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + } + + if (options.preload !== false) { + self._preload(cid) + } self._blockService.get(cid, (err, block) => { if (err) { diff --git a/src/core/components/dag.js b/src/core/components/dag.js index 88a80bdcce..317d6acb8a 100644 --- a/src/core/components/dag.js +++ b/src/core/components/dag.js @@ -4,18 +4,22 @@ const promisify = require('promisify-es6') const CID = require('cids') const pull = require('pull-stream') const mapAsync = require('async/map') +const setImmediate = require('async/setImmediate') const flattenDeep = require('lodash/flattenDeep') +const errCode = require('err-code') module.exports = function dag (self) { return { put: promisify((dagNode, options, callback) => { if (typeof options === 'function') { callback = options - } else if (options.cid && (options.format || options.hashAlg)) { + options = {} + } else if (options && options.cid && (options.format || options.hashAlg)) { return callback(new Error('Can\'t put dag node. Please provide either `cid` OR `format` and `hashAlg` options.')) - } else if ((options.format && !options.hashAlg) || (!options.format && options.hashAlg)) { + } else if (options && ((options.format && !options.hashAlg) || (!options.format && options.hashAlg))) { return callback(new Error('Can\'t put dag node. Please provide `format` AND `hashAlg` options.')) } + options = options || {} const optionDefaults = { format: 'dag-cbor', @@ -24,7 +28,15 @@ module.exports = function dag (self) { options = options.cid ? options : Object.assign({}, optionDefaults, options) - self._ipld.put(dagNode, options, callback) + self._ipld.put(dagNode, options, (err, cid) => { + if (err) return callback(err) + + if (options.preload !== false) { + self._preload(cid) + } + + callback(null, cid) + }) }), get: promisify((cid, path, options, callback) => { @@ -42,7 +54,13 @@ module.exports = function dag (self) { if (typeof cid === 'string') { const split = cid.split('/') - cid = new CID(split[0]) + + try { + cid = new CID(split[0]) + } catch (err) { + return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + } + split.shift() if (split.length > 0) { @@ -54,10 +72,14 @@ module.exports = function dag (self) { try { cid = new CID(cid) } catch (err) { - return callback(err) + return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) } } + if (options.preload !== false) { + self._preload(cid) + } + self._ipld.get(cid, path, options, callback) }), @@ -82,7 +104,13 @@ module.exports = function dag (self) { if (typeof cid === 'string') { const split = cid.split('/') - cid = new CID(split[0]) + + try { + cid = new CID(split[0]) + } catch (err) { + return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + } + split.shift() if (split.length > 0) { @@ -92,6 +120,10 @@ module.exports = function dag (self) { } } + if (options.preload !== false) { + self._preload(cid) + } + pull( self._ipld.treeStream(cid, path, options), pull.collect(callback) @@ -99,10 +131,25 @@ module.exports = function dag (self) { }), // TODO - use IPLD selectors once they are implemented - _getRecursive: promisify((multihash, callback) => { + _getRecursive: promisify((multihash, options, callback) => { // gets flat array of all DAGNodes in tree given by multihash - self.dag.get(new CID(multihash), (err, res) => { + if (typeof options === 'function') { + callback = options + options = {} + } + + options = options || {} + + let cid + + try { + cid = new CID(multihash) + } catch (err) { + return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + } + + self.dag.get(cid, '', options, (err, res) => { if (err) { return callback(err) } mapAsync(res.value.links, (link, cb) => { diff --git a/src/core/components/dht.js b/src/core/components/dht.js index 8504df7488..848b2e0621 100644 --- a/src/core/components/dht.js +++ b/src/core/components/dht.js @@ -5,7 +5,9 @@ const every = require('async/every') const PeerId = require('peer-id') const CID = require('cids') const each = require('async/each') +const setImmediate = require('async/setImmediate') // const bsplit = require('buffer-split') +const errCode = require('err-code') module.exports = (self) => { return { @@ -56,12 +58,30 @@ module.exports = (self) => { * @param {function(Error, Array)} [callback] * @returns {Promise|void} */ - findprovs: promisify((key, callback) => { + findprovs: promisify((key, opts, callback) => { + if (typeof opts === 'function') { + callback = opts + opts = {} + } + + opts = opts || {} + if (typeof key === 'string') { - key = new CID(key) + try { + key = new CID(key) + } catch (err) { + return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + } } - self._libp2pNode.contentRouting.findProviders(key, callback) + if (typeof opts === 'function') { + callback = opts + opts = {} + } + + opts = opts || {} + + self._libp2pNode.contentRouting.findProviders(key, opts.timeout || null, callback) }), /** diff --git a/src/core/components/files.js b/src/core/components/files.js index f69868bd77..d575d893fa 100644 --- a/src/core/components/files.js +++ b/src/core/components/files.js @@ -17,6 +17,8 @@ const Duplex = require('readable-stream').Duplex const OtherBuffer = require('buffer').Buffer const CID = require('cids') const toB58String = require('multihashes').toB58String +const errCode = require('err-code') +const parseChunkerString = require('../utils').parseChunkerString const WRAPPER = 'wrapper/' @@ -89,6 +91,20 @@ function normalizeContent (opts, content) { }) } +function preloadFile (self, opts, file) { + const isRootFile = opts.wrapWithDirectory + ? file.path === '' + : !file.path.includes('/') + + const shouldPreload = isRootFile && !opts.onlyHash && opts.preload !== false + + if (shouldPreload) { + self._preload(file.hash) + } + + return file +} + function pinFile (self, opts, file, cb) { // Pin a file if it is the root dir of a recursive add or the single file // of a direct add. @@ -133,12 +149,18 @@ class AddHelper extends Duplex { } module.exports = function files (self) { - function _addPullStream (options) { + function _addPullStream (options = {}) { + let chunkerOptions + try { + chunkerOptions = parseChunkerString(options.chunker) + } catch (err) { + return pull.map(() => { throw err }) + } const opts = Object.assign({}, { shardSplitThreshold: self._options.EXPERIMENTAL.sharding ? 1000 : Infinity - }, options) + }, options, chunkerOptions) if (opts.hashAlg && opts.cidVersion !== 1) { opts.cidVersion = 1 @@ -158,6 +180,7 @@ module.exports = function files (self) { pull.flatten(), importer(self._ipld, opts), pull.asyncMap(prepareFile.bind(null, self, opts)), + pull.map(preloadFile.bind(null, self, opts)), pull.asyncMap(pinFile.bind(null, self, opts)) ) } @@ -167,11 +190,17 @@ module.exports = function files (self) { throw new Error('You must supply an ipfsPath') } + options = options || {} + ipfsPath = normalizePath(ipfsPath) const pathComponents = ipfsPath.split('/') const restPath = normalizePath(pathComponents.slice(1).join('/')) const filterFile = (file) => (restPath && file.path === restPath) || (file.path === ipfsPath) + if (options.preload !== false) { + self._preload(pathComponents[0]) + } + const d = deferred.source() pull( @@ -198,16 +227,21 @@ module.exports = function files (self) { } function _lsPullStreamImmutable (ipfsPath, options) { + options = options || {} + const path = normalizePath(ipfsPath) - const recursive = options && options.recursive - const pathDepth = path.split('/').length + const recursive = options.recursive + const pathComponents = path.split('/') + const pathDepth = pathComponents.length const maxDepth = recursive ? global.Infinity : pathDepth - const opts = Object.assign({}, { - maxDepth: maxDepth - }, options) + options.maxDepth = options.maxDepth || maxDepth + + if (options.preload !== false) { + self._preload(pathComponents[0]) + } return pull( - exporter(ipfsPath, self._ipld, opts), + exporter(ipfsPath, self._ipld, options), pull.filter(node => recursive ? node.depth >= pathDepth : node.depth === pathDepth ), @@ -319,8 +353,18 @@ module.exports = function files (self) { options = {} } - if (typeof callback !== 'function') { - throw new Error('Please supply a callback to ipfs.files.get') + options = options || {} + + if (options.preload !== false) { + let pathComponents + + try { + pathComponents = normalizePath(ipfsPath).split('/') + } catch (err) { + return setImmediate(() => callback(errCode(err, 'ERR_INVALID_PATH'))) + } + + self._preload(pathComponents[0]) } pull( @@ -344,6 +388,20 @@ module.exports = function files (self) { }), getReadableStream: (ipfsPath, options) => { + options = options || {} + + if (options.preload !== false) { + let pathComponents + + try { + pathComponents = normalizePath(ipfsPath).split('/') + } catch (err) { + return toStream.source(pull.error(errCode(err, 'ERR_INVALID_PATH'))) + } + + self._preload(pathComponents[0]) + } + return toStream.source( pull( exporter(ipfsPath, self._ipld, options), @@ -360,6 +418,20 @@ module.exports = function files (self) { }, getPullStream: (ipfsPath, options) => { + options = options || {} + + if (options.preload !== false) { + let pathComponents + + try { + pathComponents = normalizePath(ipfsPath).split('/') + } catch (err) { + return pull.error(errCode(err, 'ERR_INVALID_PATH')) + } + + self._preload(pathComponents[0]) + } + return exporter(ipfsPath, self._ipld, options) }, @@ -369,6 +441,8 @@ module.exports = function files (self) { options = {} } + options = options || {} + pull( _lsPullStreamImmutable(ipfsPath, options), pull.collect((err, values) => { diff --git a/src/core/components/index.js b/src/core/components/index.js index 9eb36ad4c3..6fc833499d 100644 --- a/src/core/components/index.js +++ b/src/core/components/index.js @@ -26,4 +26,5 @@ exports.dht = require('./dht') exports.dns = require('./dns') exports.key = require('./key') exports.stats = require('./stats') -exports.mfs = require('ipfs-mfs/core') +exports.mfs = require('./mfs') +exports.resolve = require('./resolve') diff --git a/src/core/components/mfs.js b/src/core/components/mfs.js new file mode 100644 index 0000000000..2fd44d2979 --- /dev/null +++ b/src/core/components/mfs.js @@ -0,0 +1,40 @@ +'use strict' + +const promisify = require('promisify-es6') +const mfs = require('ipfs-mfs/core') + +module.exports = self => { + const mfsSelf = Object.assign({}, self) + + // A patched dag API to ensure preload doesn't happen for MFS operations + // (MFS is preloaded periodically) + mfsSelf.dag = Object.assign({}, self.dag, { + get: promisify((cid, path, opts, cb) => { + if (typeof path === 'function') { + cb = path + path = undefined + } + + if (typeof opts === 'function') { + cb = opts + opts = {} + } + + opts = Object.assign({}, opts, { preload: false }) + + return self.dag.get(cid, path, opts, cb) + }), + put: promisify((node, opts, cb) => { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + + opts = Object.assign({}, opts, { preload: false }) + + return self.dag.put(node, opts, cb) + }) + }) + + return mfs(mfsSelf, mfsSelf._options) +} diff --git a/src/core/components/object.js b/src/core/components/object.js index 5e8be00e4a..d83deae62a 100644 --- a/src/core/components/object.js +++ b/src/core/components/object.js @@ -1,6 +1,7 @@ 'use strict' const waterfall = require('async/waterfall') +const setImmediate = require('async/setImmediate') const promisify = require('promisify-es6') const dagPB = require('ipld-dag-pb') const DAGNode = dagPB.DAGNode @@ -8,7 +9,7 @@ const DAGLink = dagPB.DAGLink const CID = require('cids') const mh = require('multihashes') const Unixfs = require('ipfs-unixfs') -const assert = require('assert') +const errCode = require('err-code') function normalizeMultihash (multihash, enc) { if (typeof multihash === 'string') { @@ -80,10 +81,17 @@ module.exports = function object (self) { if (err) { return cb(err) } - self._ipld.put(node, { - cid: new CID(node.multihash) - }, (err) => { - cb(err, node) + + const cid = new CID(node.multihash) + + self._ipld.put(node, { cid }, (err) => { + if (err) return cb(err) + + if (options.preload !== false) { + self._preload(cid) + } + + cb(null, node) }) }) } @@ -92,16 +100,26 @@ module.exports = function object (self) { } return { - new: promisify((template, callback) => { + new: promisify((template, options, callback) => { if (typeof template === 'function') { callback = template template = undefined + options = {} + } + + if (typeof options === 'function') { + callback = options + options = {} } + options = options || {} + let data if (template) { - assert(template === 'unixfs-dir', 'unkown template') + if (template !== 'unixfs-dir') { + return setImmediate(() => callback(new Error('unknown template'))) + } data = (new Unixfs('directory')).marshal() } else { data = Buffer.alloc(0) @@ -111,13 +129,18 @@ module.exports = function object (self) { if (err) { return callback(err) } - self._ipld.put(node, { - cid: new CID(node.multihash) - }, (err) => { + + const cid = new CID(node.multihash) + + self._ipld.put(node, { cid }, (err) => { if (err) { return callback(err) } + if (options.preload !== false) { + self._preload(cid) + } + callback(null, node) }) }) @@ -166,13 +189,23 @@ module.exports = function object (self) { } function next () { - self._ipld.put(node, { - cid: new CID(node.multihash) - }, (err) => { + let cid + + try { + cid = new CID(node.multihash) + } catch (err) { + return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) + } + + self._ipld.put(node, { cid }, (err) => { if (err) { return callback(err) } + if (options.preload !== false) { + self._preload(cid) + } + self.object.get(node.multihash, callback) }) } @@ -184,19 +217,28 @@ module.exports = function object (self) { options = {} } - let mh + let mh, cid try { mh = normalizeMultihash(multihash, options.enc) } catch (err) { - return callback(err) + return setImmediate(() => callback(errCode(err, 'ERR_INVALID_MULTIHASH'))) + } + + try { + cid = new CID(mh) + } catch (err) { + return setImmediate(() => callback(errCode(err, 'ERR_INVALID_CID'))) } - let cid = new CID(mh) if (options.cidVersion === 1) { cid = cid.toV1() } + if (options.preload !== false) { + self._preload(cid) + } + self._ipld.get(cid, (err, result) => { if (err) { return callback(err) @@ -282,6 +324,8 @@ module.exports = function object (self) { editAndSave((node, cb) => { if (DAGLink.isDAGLink(linkRef)) { linkRef = linkRef._name + } else if (linkRef && linkRef.name) { + linkRef = linkRef.name } DAGNode.rmLink(node, linkRef, cb) })(multihash, options, callback) diff --git a/src/core/components/pin-set.js b/src/core/components/pin-set.js index f18a248604..1f31697825 100644 --- a/src/core/components/pin-set.js +++ b/src/core/components/pin-set.js @@ -6,7 +6,8 @@ const protobuf = require('protons') const fnv1a = require('fnv1a') const varint = require('varint') const { DAGNode, DAGLink } = require('ipld-dag-pb') -const async = require('async') +const some = require('async/some') +const eachOf = require('async/eachOf') const pbSchema = require('./pin.proto') @@ -67,14 +68,14 @@ exports = module.exports = function (dag) { return searchChildren(root, callback) function searchChildren (root, cb) { - async.some(root.links, ({ multihash }, someCb) => { + some(root.links, ({ multihash }, someCb) => { const bs58Link = toB58String(multihash) if (bs58Link === childhash) { return someCb(null, true) } if (bs58Link in seen) { return someCb(null, false) } seen[bs58Link] = true - dag.get(multihash, (err, res) => { + dag.get(multihash, '', { preload: false }, (err, res) => { if (err) { return someCb(err) } searchChildren(res.value, someCb) }) @@ -90,7 +91,7 @@ exports = module.exports = function (dag) { pinSet.storeItems(pins, (err, rootNode) => { if (err) { return callback(err) } - const opts = { cid: new CID(rootNode.multihash) } + const opts = { cid: new CID(rootNode.multihash), preload: false } dag.put(rootNode, opts, (err, cid) => { if (err) { return callback(err) } callback(null, rootNode) @@ -150,7 +151,7 @@ exports = module.exports = function (dag) { return bins }, {}) - async.eachOf(bins, (bin, idx, eachCb) => { + eachOf(bins, (bin, idx, eachCb) => { storePins( bin, depth + 1, @@ -168,7 +169,8 @@ exports = module.exports = function (dag) { function storeChild (err, child, binIdx, cb) { if (err) { return cb(err) } - dag.put(child, { cid: new CID(child._multihash) }, err => { + const opts = { cid: new CID(child.multihash), preload: false } + dag.put(child, opts, err => { if (err) { return cb(err) } fanoutLinks[binIdx] = new DAGLink('', child.size, child.multihash) cb(null) @@ -183,7 +185,7 @@ exports = module.exports = function (dag) { return callback(new Error('No link found with name ' + name)) } - dag.get(link.multihash, (err, res) => { + dag.get(link.multihash, '', { preload: false }, (err, res) => { if (err) { return callback(err) } const keys = [] const step = link => keys.push(link.multihash) @@ -202,7 +204,7 @@ exports = module.exports = function (dag) { return callback(err) } - async.eachOf(node.links, (link, idx, eachCb) => { + eachOf(node.links, (link, idx, eachCb) => { if (idx < pbh.header.fanout) { // the first pbh.header.fanout links are fanout bins // if a fanout bin is not 'empty', dig into and walk its DAGLinks @@ -210,7 +212,7 @@ exports = module.exports = function (dag) { if (!emptyKey.equals(linkHash)) { // walk the links of this fanout bin - return dag.get(linkHash, (err, res) => { + return dag.get(linkHash, '', { preload: false }, (err, res) => { if (err) { return eachCb(err) } pinSet.walkItems(res.value, step, eachCb) }) diff --git a/src/core/components/pin.js b/src/core/components/pin.js index c6f6cc9696..196ca1a67b 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -4,9 +4,16 @@ const promisify = require('promisify-es6') const { DAGNode, DAGLink } = require('ipld-dag-pb') const CID = require('cids') -const multihashes = require('multihashes') -const async = require('async') +const map = require('async/map') +const mapSeries = require('async/mapSeries') +const series = require('async/series') +const parallel = require('async/parallel') +const eachLimit = require('async/eachLimit') +const waterfall = require('async/waterfall') +const someLimit = require('async/someLimit') +const setImmediate = require('async/setImmediate') const { Key } = require('interface-datastore') +const errCode = require('err-code') const createPinSet = require('./pin-set') const { resolvePath } = require('../utils') @@ -19,6 +26,11 @@ function toB58String (hash) { return new CID(hash).toBaseEncodedString() } +function invalidPinTypeErr (type) { + const errMsg = `Invalid type '${type}', must be one of {direct, indirect, recursive, all}` + return errCode(new Error(errMsg), 'ERR_INVALID_PIN_TYPE') +} + module.exports = (self) => { const repo = self._repo const dag = self.dag @@ -34,13 +46,13 @@ module.exports = (self) => { let recursivePins = new Set() const directKeys = () => - Array.from(directPins).map(key => multihashes.fromB58String(key)) + Array.from(directPins).map(key => new CID(key).buffer) const recursiveKeys = () => - Array.from(recursivePins).map(key => multihashes.fromB58String(key)) + Array.from(recursivePins).map(key => new CID(key).buffer) function getIndirectKeys (callback) { const indirectKeys = new Set() - async.eachLimit(recursiveKeys(), concurrencyLimit, (multihash, cb) => { + eachLimit(recursiveKeys(), concurrencyLimit, (multihash, cb) => { dag._getRecursive(multihash, (err, nodes) => { if (err) { return cb(err) } @@ -63,16 +75,16 @@ module.exports = (self) => { // a DAGNode holding those as DAGLinks, a kind of root pin function flushPins (callback) { let dLink, rLink, root - async.series([ + series([ // create a DAGLink to the node with direct pins - cb => async.waterfall([ + cb => waterfall([ cb => pinset.storeSet(directKeys(), cb), (node, cb) => DAGLink.create(types.direct, node.size, node.multihash, cb), (link, cb) => { dLink = link; cb(null) } ], cb), // create a DAGLink to the node with recursive pins - cb => async.waterfall([ + cb => waterfall([ cb => pinset.storeSet(recursiveKeys(), cb), (node, cb) => DAGLink.create(types.recursive, node.size, node.multihash, cb), (link, cb) => { rLink = link; cb(null) } @@ -81,14 +93,14 @@ module.exports = (self) => { // the pin-set nodes link to a special 'empty' node, so make sure it exists cb => DAGNode.create(Buffer.alloc(0), (err, empty) => { if (err) { return cb(err) } - dag.put(empty, { cid: new CID(empty.multihash) }, cb) + dag.put(empty, { cid: new CID(empty.multihash), preload: false }, cb) }), // create a root node with DAGLinks to the direct and recursive DAGs cb => DAGNode.create(Buffer.alloc(0), [dLink, rLink], (err, node) => { if (err) { return cb(err) } root = node - dag.put(root, { cid: new CID(root.multihash) }, cb) + dag.put(root, { cid: new CID(root.multihash), preload: false }, cb) }), // hack for CLI tests @@ -115,7 +127,7 @@ module.exports = (self) => { if (err) { return callback(err) } // verify that each hash can be pinned - async.map(mhs, (multihash, cb) => { + map(mhs, (multihash, cb) => { const key = toB58String(multihash) if (recursive) { if (recursivePins.has(key)) { @@ -175,7 +187,7 @@ module.exports = (self) => { if (err) { return callback(err) } // verify that each hash can be unpinned - async.map(mhs, (multihash, cb) => { + map(mhs, (multihash, cb) => { pin._isPinnedWithType(multihash, types.all, (err, res) => { if (err) { return cb(err) } const { pinned, reason } = res @@ -236,12 +248,13 @@ module.exports = (self) => { paths = null } if (options && options.type) { + if (typeof options.type !== 'string') { + return setImmediate(() => callback(invalidPinTypeErr(options.type))) + } type = options.type.toLowerCase() } - if (!types[type]) { - return callback(new Error( - `Invalid type '${type}', must be one of {direct, indirect, recursive, all}` - )) + if (!Object.keys(types).includes(type)) { + return setImmediate(() => callback(invalidPinTypeErr(type))) } if (paths) { @@ -249,7 +262,7 @@ module.exports = (self) => { resolvePath(self.object, paths, (err, mhs) => { if (err) { return callback(err) } - async.mapSeries(mhs, (multihash, cb) => { + mapSeries(mhs, (multihash, cb) => { pin._isPinnedWithType(multihash, types.all, (err, res) => { if (err) { return cb(err) } const { pinned, reason } = res @@ -337,7 +350,7 @@ module.exports = (self) => { // check each recursive key to see if multihash is under it // arbitrary limit, enables handling 1000s of pins. let foundPin - async.someLimit(recursiveKeys(), concurrencyLimit, (key, cb) => { + someLimit(recursiveKeys(), concurrencyLimit, (key, cb) => { dag.get(new CID(key), (err, res) => { if (err) { return cb(err) } @@ -355,13 +368,13 @@ module.exports = (self) => { }), _load: promisify(callback => { - async.waterfall([ + waterfall([ // hack for CLI tests (cb) => repo.closed ? repo.datastore.open(cb) : cb(null, null), (_, cb) => repo.datastore.has(pinDataStoreKey, cb), (has, cb) => has ? cb() : cb(new Error('No pins to load')), (cb) => repo.datastore.get(pinDataStoreKey, cb), - (mh, cb) => dag.get(new CID(mh), cb) + (mh, cb) => dag.get(new CID(mh), '', { preload: false }, cb) ], (err, pinRoot) => { if (err) { if (err.message === 'No pins to load') { @@ -372,7 +385,7 @@ module.exports = (self) => { } } - async.parallel([ + parallel([ cb => pinset.loadSet(pinRoot.value, types.recursive, cb), cb => pinset.loadSet(pinRoot.value, types.direct, cb) ], (err, keys) => { diff --git a/src/core/components/resolve.js b/src/core/components/resolve.js new file mode 100644 index 0000000000..dd866cbfbb --- /dev/null +++ b/src/core/components/resolve.js @@ -0,0 +1,86 @@ +'use strict' + +const promisify = require('promisify-es6') +const isIpfs = require('is-ipfs') +const setImmediate = require('async/setImmediate') +const doUntil = require('async/doUntil') +const CID = require('cids') + +module.exports = (self) => { + return promisify((name, opts, cb) => { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + + opts = opts || {} + + if (!isIpfs.path(name)) { + return setImmediate(() => cb(new Error('invalid argument'))) + } + + // TODO remove this and update subsequent code when IPNS is implemented + if (!isIpfs.ipfsPath(name)) { + return setImmediate(() => cb(new Error('resolve non-IPFS names is not implemented'))) + } + + const split = name.split('/') // ['', 'ipfs', 'hash', ...path] + const cid = new CID(split[2]) + + if (split.length === 3) { + return setImmediate(() => cb(null, name)) + } + + const path = split.slice(3).join('/') + + resolve(cid, path, (err, cid) => { + if (err) return cb(err) + if (!cid) return cb(new Error('found non-link at given path')) + cb(null, `/ipfs/${cid.toBaseEncodedString(opts.cidBase)}`) + }) + }) + + // Resolve the given CID + path to a CID. + function resolve (cid, path, callback) { + let value + + doUntil( + (cb) => { + self.block.get(cid, (err, block) => { + if (err) return cb(err) + + const r = self._ipld.resolvers[cid.codec] + + if (!r) { + return cb(new Error(`No resolver found for codec "${cid.codec}"`)) + } + + r.resolver.resolve(block.data, path, (err, result) => { + if (err) return cb(err) + value = result.value + path = result.remainderPath + cb() + }) + }) + }, + () => { + const endReached = !path || path === '/' + + if (endReached) { + return true + } + + if (value) { + cid = new CID(value['/']) + } + + return false + }, + (err) => { + if (err) return callback(err) + if (value && value['/']) return callback(null, new CID(value['/'])) + callback() + } + ) + } +} diff --git a/src/core/components/start.js b/src/core/components/start.js index fd4832e35a..3a7a5716ce 100644 --- a/src/core/components/start.js +++ b/src/core/components/start.js @@ -42,7 +42,9 @@ module.exports = (self) => { self._bitswap.start() self._blockService.setExchange(self._bitswap) - cb() + + self._preload.start() + self._mfsPreload.start(cb) } ], done) }) diff --git a/src/core/components/stats.js b/src/core/components/stats.js index ad87cf981e..010a01431d 100644 --- a/src/core/components/stats.js +++ b/src/core/components/stats.js @@ -5,6 +5,7 @@ const Big = require('big.js') const Pushable = require('pull-pushable') const human = require('human-to-milliseconds') const toStream = require('pull-stream-to-stream') +const errCode = require('err-code') function bandwidthStats (self, opts) { return new Promise((resolve, reject) => { @@ -49,7 +50,9 @@ module.exports = function stats (self) { if (opts.poll) { human(opts.interval || '1s', (err, value) => { - if (err) throw err + if (err) { + return stream.end(errCode(err, 'ERR_INVALID_POLL_INTERVAL')) + } interval = setInterval(() => { bandwidthStats(self, opts) diff --git a/src/core/components/stop.js b/src/core/components/stop.js index 4d35190d21..cf97b6ec6a 100644 --- a/src/core/components/stop.js +++ b/src/core/components/stop.js @@ -30,8 +30,10 @@ module.exports = (self) => { self.state.stop() self._blockService.unsetExchange() self._bitswap.stop() + self._preload.stop() series([ + (cb) => self._mfsPreload.stop(cb), (cb) => self.libp2p.stop(cb), (cb) => self._repo.close(cb) ], done) diff --git a/src/core/components/swarm.js b/src/core/components/swarm.js index 5d1c8d26b6..c6c4d3073c 100644 --- a/src/core/components/swarm.js +++ b/src/core/components/swarm.js @@ -1,6 +1,5 @@ 'use strict' -const multiaddr = require('multiaddr') const promisify = require('promisify-es6') const values = require('lodash/values') @@ -67,10 +66,6 @@ module.exports = function swarm (self) { return callback(new Error(OFFLINE_ERROR)) } - if (typeof maddr === 'string') { - maddr = multiaddr(maddr) - } - self._libp2pNode.dial(maddr, callback) }), @@ -79,10 +74,6 @@ module.exports = function swarm (self) { return callback(new Error(OFFLINE_ERROR)) } - if (typeof maddr === 'string') { - maddr = multiaddr(maddr) - } - self._libp2pNode.hangUp(maddr, callback) }), diff --git a/src/core/config.js b/src/core/config.js index 1b04d10a2f..7b16c17d06 100644 --- a/src/core/config.js +++ b/src/core/config.js @@ -8,6 +8,10 @@ const schema = Joi.object().keys({ Joi.string() ).allow(null), repoOwner: Joi.boolean().default(true), + preload: Joi.object().keys({ + enabled: Joi.boolean().default(true), + addresses: Joi.array().items(Joi.multiaddr().options({ convert: false })) + }).allow(null), init: Joi.alternatives().try( Joi.boolean(), Joi.object().keys({ bits: Joi.number().integer() }) diff --git a/src/core/index.js b/src/core/index.js index 8d1bbdc8b6..06cd3f97b1 100644 --- a/src/core/index.js +++ b/src/core/index.js @@ -25,6 +25,8 @@ const boot = require('./boot') const components = require('./components') // replaced by repo-browser when running in the browser const defaultRepo = require('./runtime/repo-nodejs') +const preload = require('./preload') +const mfsPreload = require('./mfs-preload') class IPFS extends EventEmitter { constructor (options) { @@ -33,7 +35,14 @@ class IPFS extends EventEmitter { this._options = { init: true, start: true, - EXPERIMENTAL: {} + EXPERIMENTAL: {}, + preload: { + enabled: true, + addresses: [ + '/dnsaddr/node0.preload.ipfs.io/https', + '/dnsaddr/node1.preload.ipfs.io/https' + ] + } } options = config.validate(options || {}) @@ -81,6 +90,8 @@ class IPFS extends EventEmitter { this._blockService = new BlockService(this._repo) this._ipld = new Ipld(this._blockService) this._pubsub = undefined + this._preload = preload(this) + this._mfsPreload = mfsPreload(this) // IPFS Core exposed components // - for booting up a node @@ -112,6 +123,8 @@ class IPFS extends EventEmitter { this.dns = components.dns(this) this.key = components.key(this) this.stats = components.stats(this) + this.resolve = components.resolve(this) + this.state = require('./state')(this) // ipfs.ls @@ -126,7 +139,7 @@ class IPFS extends EventEmitter { } // ipfs.files - const mfs = components.mfs(this, this._options) + const mfs = components.mfs(this) Object.keys(mfs).forEach(key => { this.files[key] = mfs[key] diff --git a/src/core/mfs-preload.js b/src/core/mfs-preload.js new file mode 100644 index 0000000000..4f0cfd16a5 --- /dev/null +++ b/src/core/mfs-preload.js @@ -0,0 +1,51 @@ +'use strict' + +const debug = require('debug') + +const log = debug('jsipfs:mfs-preload') +log.error = debug('jsipfs:mfs-preload:error') + +module.exports = (self, options) => { + options = options || {} + options.interval = options.interval || 30 * 1000 + + let rootCid + let timeoutId + + const preloadMfs = () => { + self.files.stat('/', (err, stats) => { + if (err) { + timeoutId = setTimeout(preloadMfs, options.interval) + return log.error('failed to stat MFS root for preload', err) + } + + if (rootCid !== stats.hash) { + log(`preloading updated MFS root ${rootCid} -> ${stats.hash}`) + + return self._preload(stats.hash, (err) => { + timeoutId = setTimeout(preloadMfs, options.interval) + if (err) return log.error(`failed to preload MFS root ${stats.hash}`, err) + rootCid = stats.hash + }) + } + + timeoutId = setTimeout(preloadMfs, options.interval) + }) + } + + return { + start (cb) { + self.files.stat('/', (err, stats) => { + if (err) return cb(err) + rootCid = stats.hash + log(`monitoring MFS root ${rootCid}`) + timeoutId = setTimeout(preloadMfs, options.interval) + cb() + }) + }, + stop (cb) { + clearTimeout(timeoutId) + cb() + } + } +} diff --git a/src/core/preload.js b/src/core/preload.js new file mode 100644 index 0000000000..2902005fc2 --- /dev/null +++ b/src/core/preload.js @@ -0,0 +1,91 @@ +'use strict' + +const setImmediate = require('async/setImmediate') +const retry = require('async/retry') +const toUri = require('multiaddr-to-uri') +const debug = require('debug') +const CID = require('cids') +const preload = require('./runtime/preload-nodejs') + +const log = debug('jsipfs:preload') +log.error = debug('jsipfs:preload:error') + +const noop = (err) => { if (err) log.error(err) } + +module.exports = self => { + const options = self._options.preload || {} + options.enabled = Boolean(options.enabled) + options.addresses = options.addresses || [] + + if (!options.enabled || !options.addresses.length) { + const api = (_, callback) => { + if (callback) { + setImmediate(() => callback()) + } + } + api.start = () => {} + api.stop = () => {} + return api + } + + let stopped = true + let requests = [] + const apiUris = options.addresses.map(apiAddrToUri) + + const api = (cid, callback) => { + callback = callback || noop + + if (typeof cid !== 'string') { + try { + cid = new CID(cid).toBaseEncodedString() + } catch (err) { + return setImmediate(() => callback(err)) + } + } + + const fallbackApiUris = Array.from(apiUris) + let request + const now = Date.now() + + retry({ times: fallbackApiUris.length }, (cb) => { + if (stopped) return cb(new Error(`preload aborted for ${cid}`)) + + // Remove failed request from a previous attempt + requests = requests.filter(r => r !== request) + + const apiUri = fallbackApiUris.shift() + + request = preload(`${apiUri}/api/v0/refs?r=true&arg=${cid}`, cb) + requests = requests.concat(request) + }, (err) => { + requests = requests.filter(r => r !== request) + + if (err) { + return callback(err) + } + + log(`preloaded ${cid} in ${Date.now() - now}ms`) + callback() + }) + } + + api.start = () => { + stopped = false + } + + api.stop = () => { + stopped = true + log(`canceling ${requests.length} pending preload request(s)`) + requests.forEach(r => r.cancel()) + requests = [] + } + + return api +} + +function apiAddrToUri (addr) { + if (!(addr.endsWith('http') || addr.endsWith('https'))) { + addr = addr + '/http' + } + return toUri(addr) +} diff --git a/src/core/runtime/config-browser.js b/src/core/runtime/config-browser.js index 9819c04aaa..ca8c99e153 100644 --- a/src/core/runtime/config-browser.js +++ b/src/core/runtime/config-browser.js @@ -23,7 +23,7 @@ module.exports = () => ({ '/dns4/sgp-1.bootstrap.libp2p.io/tcp/443/wss/ipfs/QmSoLSafTMBsPKadTEgaXctDQVcqN88CNLHXMkTNwMKPnu', '/dns4/nyc-1.bootstrap.libp2p.io/tcp/443/wss/ipfs/QmSoLueR4xBeUbY9WZ9xGUUxunbKWcrNFTDAadQJmocnWm', '/dns4/nyc-2.bootstrap.libp2p.io/tcp/443/wss/ipfs/QmSoLV4Bbm51jM9C4gDYZQ9Cy3U6aXMJDAbzgu2fzaDs64', - '/dns4/wss0.bootstrap.libp2p.io/tcp/443/wss/ipfs/QmZMxNdpMkewiVZLMRxaNxUeZpDUb34pWjZ1kZvsd16Zic', - '/dns4/wss1.bootstrap.libp2p.io/tcp/443/wss/ipfs/Qmbut9Ywz9YEDrz8ySBSgWyJk41Uvm2QJPhwDJzJyGFsD6' + '/dns4/node0.preload.ipfs.io/tcp/443/wss/ipfs/QmZMxNdpMkewiVZLMRxaNxUeZpDUb34pWjZ1kZvsd16Zic', + '/dns4/node1.preload.ipfs.io/tcp/443/wss/ipfs/Qmbut9Ywz9YEDrz8ySBSgWyJk41Uvm2QJPhwDJzJyGFsD6' ] }) diff --git a/src/core/runtime/config-nodejs.js b/src/core/runtime/config-nodejs.js index 995f66261d..5b301d1e20 100644 --- a/src/core/runtime/config-nodejs.js +++ b/src/core/runtime/config-nodejs.js @@ -36,7 +36,7 @@ module.exports = () => ({ '/ip6/2a03:b0c0:0:1010::23:1001/tcp/4001/ipfs/QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd', '/ip6/2a03:b0c0:1:d0::e7:1/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3pNLQpmmEk35v6wYtsMGLzSr5QBU3', '/ip6/2604:a880:1:20::1d9:6001/tcp/4001/ipfs/QmSoLju6m7xTh3DuokvT3886QRYqxAzb1kShaanJgW36yx', - '/dns4/wss0.bootstrap.libp2p.io/tcp/443/wss/ipfs/QmZMxNdpMkewiVZLMRxaNxUeZpDUb34pWjZ1kZvsd16Zic', - '/dns4/wss1.bootstrap.libp2p.io/tcp/443/wss/ipfs/Qmbut9Ywz9YEDrz8ySBSgWyJk41Uvm2QJPhwDJzJyGFsD6' + '/dns4/node0.preload.ipfs.io/tcp/443/wss/ipfs/QmZMxNdpMkewiVZLMRxaNxUeZpDUb34pWjZ1kZvsd16Zic', + '/dns4/node1.preload.ipfs.io/tcp/443/wss/ipfs/Qmbut9Ywz9YEDrz8ySBSgWyJk41Uvm2QJPhwDJzJyGFsD6' ] }) diff --git a/src/core/runtime/preload-browser.js b/src/core/runtime/preload-browser.js new file mode 100644 index 0000000000..ff5b9b7698 --- /dev/null +++ b/src/core/runtime/preload-browser.js @@ -0,0 +1,29 @@ +/* eslint-env browser */ +'use strict' + +const debug = require('debug') + +const log = debug('jsipfs:preload') +log.error = debug('jsipfs:preload:error') + +module.exports = function preload (url, callback) { + log(url) + + const controller = new AbortController() + const signal = controller.signal + + fetch(url, { signal }) + .then(res => { + if (!res.ok) { + log.error('failed to preload', url, res.status, res.statusText) + throw new Error(`failed to preload ${url}`) + } + return res.text() + }) + .then(() => callback()) + .catch(callback) + + return { + cancel: () => controller.abort() + } +} diff --git a/src/core/runtime/preload-nodejs.js b/src/core/runtime/preload-nodejs.js new file mode 100644 index 0000000000..405798ca34 --- /dev/null +++ b/src/core/runtime/preload-nodejs.js @@ -0,0 +1,64 @@ +'use strict' + +const http = require('http') +const https = require('https') +const { URL } = require('url') +const debug = require('debug') +const setImmediate = require('async/setImmediate') + +const log = debug('jsipfs:preload') +log.error = debug('jsipfs:preload:error') + +module.exports = function preload (url, callback) { + log(url) + + try { + url = new URL(url) + } catch (err) { + return setImmediate(() => callback(err)) + } + + const transport = url.protocol === 'https:' ? https : http + + const req = transport.get({ + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search + }, (res) => { + if (res.statusCode < 200 || res.statusCode >= 300) { + res.resume() + log.error('failed to preload', url.href, res.statusCode, res.statusMessage) + return callback(new Error(`failed to preload ${url}`)) + } + + res.on('data', chunk => log(`data ${chunk}`)) + + res.on('abort', () => { + callback(new Error('request aborted')) + }) + + res.on('error', err => { + log.error('response error preloading', url.href, err) + callback(err) + }) + + res.on('end', () => { + // If aborted, callback is called in the abort handler + if (!res.aborted) callback() + }) + }) + + req.on('error', err => { + log.error('request error preloading', url.href, err) + callback(err) + }) + + return { + cancel: () => { + // No need to call callback here + // before repsonse - called in req error handler + // after response - called in res abort hander + req.abort() + } + } +} diff --git a/src/core/utils.js b/src/core/utils.js index a0d67e449a..5c66f76c94 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -1,9 +1,9 @@ 'use strict' -const multihashes = require('multihashes') const promisify = require('promisify-es6') const map = require('async/map') const isIpfs = require('is-ipfs') +const CID = require('cids') exports.OFFLINE_ERROR = 'This command must be run in online mode. Try running \'ipfs daemon\' first.' @@ -61,12 +61,15 @@ const resolvePath = promisify(function (objectAPI, ipfsPaths, callback) { map(ipfsPaths, (path, cb) => { if (typeof path !== 'string') { + let cid + try { - multihashes.validate(path) + cid = new CID(path) } catch (err) { return cb(err) } - return cb(null, path) + + return cb(null, cid.buffer) } let parsedPath @@ -76,10 +79,10 @@ const resolvePath = promisify(function (objectAPI, ipfsPaths, callback) { return cb(err) } - const rootHash = multihashes.fromB58String(parsedPath.hash) + const rootHash = new CID(parsedPath.hash) const rootLinks = parsedPath.links if (!rootLinks.length) { - return cb(null, rootHash) + return cb(null, rootHash.buffer) } objectAPI.get(rootHash, follow.bind(null, rootLinks)) @@ -107,5 +110,86 @@ const resolvePath = promisify(function (objectAPI, ipfsPaths, callback) { }, callback) }) +/** + * Parses chunker string into options used by DAGBuilder in ipfs-unixfs-engine + * + * + * @param {String} chunker Chunker algorithm supported formats: + * "size-{size}" + * "rabin" + * "rabin-{avg}" + * "rabin-{min}-{avg}-{max}" + * + * @return {Object} Chunker options for DAGBuilder + */ +function parseChunkerString (chunker) { + if (!chunker) { + return { + chunker: 'fixed' + } + } else if (chunker.startsWith('size-')) { + const sizeStr = chunker.split('-')[1] + const size = parseInt(sizeStr) + if (isNaN(size)) { + throw new Error('Chunker parameter size must be an integer') + } + return { + chunker: 'fixed', + chunkerOptions: { + maxChunkSize: size + } + } + } else if (chunker.startsWith('rabin')) { + return { + chunker: 'rabin', + chunkerOptions: parseRabinString(chunker) + } + } else { + throw new Error(`Unrecognized chunker option: ${chunker}`) + } +} + +/** + * Parses rabin chunker string + * + * @param {String} chunker Chunker algorithm supported formats: + * "rabin" + * "rabin-{avg}" + * "rabin-{min}-{avg}-{max}" + * + * @return {Object} rabin chunker options + */ +function parseRabinString (chunker) { + const options = {} + const parts = chunker.split('-') + switch (parts.length) { + case 1: + options.avgChunkSize = 262144 + break + case 2: + options.avgChunkSize = parseChunkSize(parts[1], 'avg') + break + case 4: + options.minChunkSize = parseChunkSize(parts[1], 'min') + options.avgChunkSize = parseChunkSize(parts[2], 'avg') + options.maxChunkSize = parseChunkSize(parts[3], 'max') + break + default: + throw new Error('Incorrect chunker format (expected "rabin" "rabin-[avg]" or "rabin-[min]-[avg]-[max]"') + } + + return options +} + +function parseChunkSize (str, name) { + let size = parseInt(str) + if (isNaN(size)) { + throw new Error(`Chunker parameter ${name} must be an integer`) + } + + return size +} + exports.parseIpfsPath = parseIpfsPath exports.resolvePath = resolvePath +exports.parseChunkerString = parseChunkerString diff --git a/src/http/api/resources/files.js b/src/http/api/resources/files.js index cd3c2f1e4c..3e2fb28d38 100644 --- a/src/http/api/resources/files.js +++ b/src/http/api/resources/files.js @@ -154,22 +154,11 @@ exports.add = { query: Joi.object() .keys({ 'cid-version': Joi.number().integer().min(0).max(1).default(0), - // Temporary restriction on raw-leaves: - // When cid-version=1 then raw-leaves MUST be present and false. - // - // This is because raw-leaves is not yet implemented in js-ipfs, - // and go-ipfs changes the value of raw-leaves to true when - // cid-version > 0 unless explicitly set to false. - // - // This retains feature parity without having to implement raw-leaves. - 'raw-leaves': Joi.boolean().when('cid-version', { - is: 1, - then: Joi.boolean().valid(false).required(), - otherwise: Joi.boolean().valid(false) - }), + 'raw-leaves': Joi.boolean(), 'only-hash': Joi.boolean(), pin: Joi.boolean().default(true), - 'wrap-with-directory': Joi.boolean() + 'wrap-with-directory': Joi.boolean(), + chunker: Joi.string() }) // TODO: Necessary until validate "recursive", "stream-channels" etc. .options({ allowUnknown: true }) @@ -233,7 +222,8 @@ exports.add = { onlyHash: request.query['only-hash'], hashAlg: request.query['hash'], wrapWithDirectory: request.query['wrap-with-directory'], - pin: request.query.pin + pin: request.query.pin, + chunker: request.query.chunker } const aborter = abortable() diff --git a/src/http/api/resources/index.js b/src/http/api/resources/index.js index 59040a99d8..f937bf2e1b 100644 --- a/src/http/api/resources/index.js +++ b/src/http/api/resources/index.js @@ -18,3 +18,4 @@ exports.pubsub = require('./pubsub') exports.dns = require('./dns') exports.key = require('./key') exports.stats = require('./stats') +exports.resolve = require('./resolve') diff --git a/src/http/api/resources/object.js b/src/http/api/resources/object.js index 16f5db9c8d..dc68c71251 100644 --- a/src/http/api/resources/object.js +++ b/src/http/api/resources/object.js @@ -85,7 +85,9 @@ exports.get = { const nodeJSON = node.toJSON() - nodeJSON.data = nodeJSON.data ? nodeJSON.data.toString() : '' + if (Buffer.isBuffer(node.data)) { + nodeJSON.data = node.data.toString(request.query['data-encoding'] || undefined) + } const answer = { Data: nodeJSON.data, @@ -520,7 +522,7 @@ exports.patchRmLink = { if (!request.query.arg[1]) { return reply({ - Message: 'cannot create link with no name!', + Message: 'cannot remove link with no name!', Code: 0 }).code(500).takeover() } @@ -545,11 +547,11 @@ exports.patchRmLink = { const link = request.pre.args.link const ipfs = request.server.app.ipfs - ipfs.object.patch.rmLink(root, link, (err, node) => { + ipfs.object.patch.rmLink(root, { name: link }, (err, node) => { if (err) { log.error(err) return reply({ - Message: 'Failed to add link to object: ' + err, + Message: 'Failed to remove link from object: ' + err, Code: 0 }).code(500) } diff --git a/src/http/api/resources/resolve.js b/src/http/api/resources/resolve.js new file mode 100644 index 0000000000..417f50d8e8 --- /dev/null +++ b/src/http/api/resources/resolve.js @@ -0,0 +1,37 @@ +'use strict' + +const Joi = require('joi') +const debug = require('debug') + +const log = debug('jsipfs:http-api:resolve') +log.error = debug('jsipfs:http-api:resolve:error') + +module.exports = { + validate: { + query: Joi.object().keys({ + r: Joi.alternatives() + .when('recursive', { + is: Joi.any().exist(), + then: Joi.any().forbidden(), + otherwise: Joi.boolean() + }), + recursive: Joi.boolean(), + arg: Joi.string().required() + }).unknown() + }, + handler (request, reply) { + const ipfs = request.server.app.ipfs + const name = request.query.arg + const recursive = request.query.r || request.query.recursive || false + + log(name, { recursive }) + + ipfs.resolve(name, { recursive }, (err, res) => { + if (err) { + log.error(err) + return reply({ Message: err.message, Code: 0 }).code(500) + } + reply({ Path: res }) + }) + } +} diff --git a/src/http/api/routes/index.js b/src/http/api/routes/index.js index bfec26a460..4087299ecd 100644 --- a/src/http/api/routes/index.js +++ b/src/http/api/routes/index.js @@ -21,4 +21,5 @@ module.exports = (server) => { require('./dns')(server) require('./key')(server) require('./stats')(server) + require('./resolve')(server) } diff --git a/src/http/api/routes/resolve.js b/src/http/api/routes/resolve.js new file mode 100644 index 0000000000..259ae3bdd1 --- /dev/null +++ b/src/http/api/routes/resolve.js @@ -0,0 +1,16 @@ +'use strict' + +const resources = require('./../resources') + +module.exports = (server) => { + const api = server.select('API') + + api.route({ + method: '*', + path: '/api/v0/resolve', + config: { + handler: resources.resolve.handler, + validate: resources.resolve.validate + } + }) +} diff --git a/src/http/gateway/resources/gateway.js b/src/http/gateway/resources/gateway.js index c3a866c359..856bf4a8d9 100644 --- a/src/http/gateway/resources/gateway.js +++ b/src/http/gateway/resources/gateway.js @@ -12,6 +12,21 @@ const Stream = require('readable-stream') const { resolver } = require('ipfs-http-response') const PathUtils = require('../utils/path') +function detectContentType (ref, chunk) { + let fileSignature + + // try to guess the filetype based on the first bytes + // note that `file-type` doesn't support svgs, therefore we assume it's a svg if ref looks like it + if (!ref.endsWith('.svg')) { + fileSignature = fileType(chunk) + } + + // if we were unable to, fallback to the `ref` which might contain the extension + const mimeType = mime.lookup(fileSignature ? fileSignature.ext : ref) + + return mime.contentType(mimeType) +} + module.exports = { checkCID: (request, reply) => { if (!request.params.cid) { @@ -97,7 +112,7 @@ module.exports = { } // response.continue() - let filetypeChecked = false + let contentTypeDetected = false let stream2 = new Stream.PassThrough({ highWaterMark: 1 }) stream2.on('error', (err) => { log.error('stream2 err: ', err) @@ -108,29 +123,20 @@ module.exports = { pull( toPull.source(stream), pull.through((chunk) => { - // Check file type. do this once. - if (chunk.length > 0 && !filetypeChecked) { - log('got first chunk') - let fileSignature = fileType(chunk) - log('file type: ', fileSignature) - - filetypeChecked = true - const mimeType = mime.lookup(fileSignature - ? fileSignature.ext - : null) + // Guess content-type (only once) + if (chunk.length > 0 && !contentTypeDetected) { + let contentType = detectContentType(ref, chunk) + contentTypeDetected = true log('ref ', ref) - log('mime-type ', mimeType) - - if (mimeType) { - log('writing mimeType') + log('mime-type ', contentType) - response - .header('Content-Type', mime.contentType(mimeType)) - .send() - } else { - response.send() + if (contentType) { + log('writing content-type header') + response.header('Content-Type', contentType) } + + response.send() } stream2.write(chunk) diff --git a/test/cli/bootstrap.js b/test/cli/bootstrap.js index 8807a12411..f71fee2d84 100644 --- a/test/cli/bootstrap.js +++ b/test/cli/bootstrap.js @@ -30,8 +30,8 @@ describe('bootstrap', () => runOnAndOff((thing) => { '/ip6/2a03:b0c0:0:1010::23:1001/tcp/4001/ipfs/QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd', '/ip6/2a03:b0c0:1:d0::e7:1/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3pNLQpmmEk35v6wYtsMGLzSr5QBU3', '/ip6/2604:a880:1:20::1d9:6001/tcp/4001/ipfs/QmSoLju6m7xTh3DuokvT3886QRYqxAzb1kShaanJgW36yx', - '/dns4/wss0.bootstrap.libp2p.io/tcp/443/wss/ipfs/QmZMxNdpMkewiVZLMRxaNxUeZpDUb34pWjZ1kZvsd16Zic', - '/dns4/wss1.bootstrap.libp2p.io/tcp/443/wss/ipfs/Qmbut9Ywz9YEDrz8ySBSgWyJk41Uvm2QJPhwDJzJyGFsD6' + '/dns4/node0.preload.ipfs.io/tcp/443/wss/ipfs/QmZMxNdpMkewiVZLMRxaNxUeZpDUb34pWjZ1kZvsd16Zic', + '/dns4/node1.preload.ipfs.io/tcp/443/wss/ipfs/Qmbut9Ywz9YEDrz8ySBSgWyJk41Uvm2QJPhwDJzJyGFsD6' ] const updatedList = [ @@ -52,8 +52,8 @@ describe('bootstrap', () => runOnAndOff((thing) => { '/ip6/2a03:b0c0:0:1010::23:1001/tcp/4001/ipfs/QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd', '/ip6/2a03:b0c0:1:d0::e7:1/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3pNLQpmmEk35v6wYtsMGLzSr5QBU3', '/ip6/2604:a880:1:20::1d9:6001/tcp/4001/ipfs/QmSoLju6m7xTh3DuokvT3886QRYqxAzb1kShaanJgW36yx', - '/dns4/wss0.bootstrap.libp2p.io/tcp/443/wss/ipfs/QmZMxNdpMkewiVZLMRxaNxUeZpDUb34pWjZ1kZvsd16Zic', - '/dns4/wss1.bootstrap.libp2p.io/tcp/443/wss/ipfs/Qmbut9Ywz9YEDrz8ySBSgWyJk41Uvm2QJPhwDJzJyGFsD6', + '/dns4/node0.preload.ipfs.io/tcp/443/wss/ipfs/QmZMxNdpMkewiVZLMRxaNxUeZpDUb34pWjZ1kZvsd16Zic', + '/dns4/node1.preload.ipfs.io/tcp/443/wss/ipfs/Qmbut9Ywz9YEDrz8ySBSgWyJk41Uvm2QJPhwDJzJyGFsD6', '/ip4/111.111.111.111/tcp/1001/ipfs/QmcyFFKfLDGJKwufn2GeitxvhricsBQyNKTkrD14psikoD' ] diff --git a/test/cli/commands.js b/test/cli/commands.js index 2c0c5fc033..2c52e23d74 100644 --- a/test/cli/commands.js +++ b/test/cli/commands.js @@ -4,7 +4,7 @@ const expect = require('chai').expect const runOnAndOff = require('../utils/on-and-off') -const commandCount = 77 +const commandCount = 78 describe('commands', () => runOnAndOff((thing) => { let ipfs diff --git a/test/cli/dns.js b/test/cli/dns.js index ae66c6c8c3..0c6ab4e1a5 100644 --- a/test/cli/dns.js +++ b/test/cli/dns.js @@ -16,7 +16,7 @@ describe('dns', () => runOnAndOff((thing) => { this.timeout(60 * 1000) return ipfs('dns ipfs.io').then((res) => { - expect(res.substr(0, 6)).to.eql('/ipfs/') + expect(res.substr(0, 6)).to.eql('/ipns/') }) }) })) diff --git a/test/cli/files.js b/test/cli/files.js index 2f749f1cfe..69843145e6 100644 --- a/test/cli/files.js +++ b/test/cli/files.js @@ -205,56 +205,64 @@ describe('files', () => runOnAndOff((thing) => { }) }) - // Temporarily expect to fail as raw-leaves not yet implemented. - // - // When cid-version=1 then raw-leaves MUST be present and false. - // - // This is because raw-leaves is not yet implemented in js-ipfs, - // and go-ipfs changes the value of raw-leaves to true when - // cid-version > 0 unless explicitly set to false. - // - // This retains feature parity without having to implement raw-leaves. - it('add with cid-version=1', function () { + it('add with cid-version=1 < default max chunk size', function () { this.timeout(30 * 1000) - return new Promise((resolve, reject) => { - ipfs('add src/init-files/init-docs/readme --cid-version=1') - .then(() => reject(new Error('Raw leaves not expected to be implemented'))) - .catch((err) => { - expect(err).to.exist() - resolve() - }) - }) + return ipfs('add test/fixtures/less-than-default-max-chunk-size --cid-version=1') + .then((out) => { + expect(out) + .to.eql('added zb2rhh5LdXumxQfNZCqV8pmcC56LX71ERgf2qCNQsmZnwYYx9 less-than-default-max-chunk-size\n') + }) }) - // TODO: this test is failing, @alanshaw? - it.skip('add with cid-version=1 and raw-leaves=false', () => { - return ipfs('add src/init-files/init-docs/readme --cid-version=1 --raw-leaves=false').then((out) => { - expect(out) - .to.eql('added zdj7WWeQ43G6JJvLWQWZpyHuAMq6uYWRjkBXFad11vE2LHhQ7 readme\n') - }) + it('add with cid-version=1 > default max chunk size', function () { + this.timeout(30 * 1000) + + return ipfs('add test/fixtures/greater-than-default-max-chunk-size --cid-version=1') + .then((out) => { + expect(out) + .to.eql('added zdj7WbyyZoWVifUHUe58SNS184PpN8qAuCP6HpAY91iA8CveT greater-than-default-max-chunk-size\n') + }) }) - // Temporarily expect to fail as raw-leaves not yet implemented - // - // When cid-version=1 then raw-leaves MUST be present and false. - // - // This is because raw-leaves is not yet implemented in js-ipfs, - // and go-ipfs changes the value of raw-leaves to true when - // cid-version > 0 unless explicitly set to false. - // - // This retains feature parity without having to implement raw-leaves. - it('add with cid-version=1 and raw-leaves=true', function () { + it('add with cid-version=1 and raw-leaves=false < default max chunk size', function () { this.timeout(30 * 1000) - return new Promise((resolve, reject) => { - ipfs('add src/init-files/init-docs/readme --cid-version=1 --raw-leaves=true') - .then(() => reject(new Error('Raw leaves not expected to be implemented'))) - .catch((err) => { - expect(err).to.exist() - resolve() - }) - }) + return ipfs(`add test/fixtures/less-than-default-max-chunk-size --cid-version=1 --raw-leaves=false`) + .then((out) => { + expect(out) + .to.eql('added zdj7WWPWpmpFkrWJBhUEZ4QkGumsFsEdkaaEGs7U4dzJraogp less-than-default-max-chunk-size\n') + }) + }) + + it('add with cid-version=1 and raw-leaves=false > default max chunk size', function () { + this.timeout(30 * 1000) + + return ipfs(`add test/fixtures/greater-than-default-max-chunk-size --cid-version=1 --raw-leaves=false`) + .then((out) => { + expect(out) + .to.eql('added zdj7WmYojH6vMkDQFNDNwUy2ZawrggqAhS6jjRJwb1C4KXZni greater-than-default-max-chunk-size\n') + }) + }) + + it('add with cid-version=1 and raw-leaves=true < default max chunk size', function () { + this.timeout(30 * 1000) + + return ipfs('add test/fixtures/less-than-default-max-chunk-size --cid-version=1 --raw-leaves=true') + .then((out) => { + expect(out) + .to.eql('added zb2rhh5LdXumxQfNZCqV8pmcC56LX71ERgf2qCNQsmZnwYYx9 less-than-default-max-chunk-size\n') + }) + }) + + it('add with cid-version=1 and raw-leaves=true > default max chunk size', function () { + this.timeout(30 * 1000) + + return ipfs('add test/fixtures/greater-than-default-max-chunk-size --cid-version=1 --raw-leaves=true') + .then((out) => { + expect(out) + .to.eql('added zdj7WbyyZoWVifUHUe58SNS184PpN8qAuCP6HpAY91iA8CveT greater-than-default-max-chunk-size\n') + }) }) it('add --quiet', function () { diff --git a/test/cli/object.js b/test/cli/object.js index 4173ecd816..1462d00d55 100644 --- a/test/cli/object.js +++ b/test/cli/object.js @@ -41,6 +41,34 @@ describe('object', () => runOnAndOff((thing) => { }) }) + it('get with data', function () { + this.timeout(15 * 1000) + + return ipfs('object new') + .then((out) => out.trim()) + .then((hash) => ipfs(`object patch set-data ${hash} test/fixtures/test-data/hello`)) + .then((out) => out.trim()) + .then((hash) => ipfs(`object get ${hash}`)) + .then((out) => { + const result = JSON.parse(out) + expect(result.Data).to.eql('aGVsbG8gd29ybGQK') + }) + }) + + it('get while overriding data-encoding', function () { + this.timeout(15 * 1000) + + return ipfs('object new') + .then((out) => out.trim()) + .then((hash) => ipfs(`object patch set-data ${hash} test/fixtures/test-data/hello`)) + .then((out) => out.trim()) + .then((hash) => ipfs(`object get --data-encoding=utf8 ${hash}`)) + .then((out) => { + const result = JSON.parse(out) + expect(result.Data).to.eql('hello world\n') + }) + }) + it('put', () => { return ipfs('object put test/fixtures/test-data/node.json').then((out) => { expect(out).to.eql( diff --git a/test/cli/resolve.js b/test/cli/resolve.js new file mode 100644 index 0000000000..fe670d4d15 --- /dev/null +++ b/test/cli/resolve.js @@ -0,0 +1,56 @@ +/* eslint-env mocha */ +'use strict' + +const path = require('path') +const expect = require('chai').expect +const isIpfs = require('is-ipfs') + +const runOnAndOff = require('../utils/on-and-off') + +describe('resolve', () => runOnAndOff((thing) => { + let ipfs + + before(() => { + ipfs = thing.ipfs + }) + + it('should resolve an IPFS hash', function () { + this.timeout(10 * 1000) + + const filePath = path.join(process.cwd(), '/src/init-files/init-docs/readme') + let hash + + return ipfs(`add ${filePath}`) + .then((out) => { + hash = out.split(' ')[1] + expect(isIpfs.cid(hash)).to.be.true() + return ipfs(`resolve /ipfs/${hash}`) + }) + .then((out) => { + expect(out).to.contain(`/ipfs/${hash}`) + }) + }) + + it('should resolve an IPFS path link', function () { + this.timeout(10 * 1000) + + const filePath = path.join(process.cwd(), '/src/init-files/init-docs/readme') + let fileHash, rootHash + + return ipfs(`add ${filePath} --wrap-with-directory`) + .then((out) => { + const lines = out.split('\n') + + fileHash = lines[0].split(' ')[1] + rootHash = lines[1].split(' ')[1] + + expect(isIpfs.cid(fileHash)).to.be.true() + expect(isIpfs.cid(rootHash)).to.be.true() + + return ipfs(`resolve /ipfs/${rootHash}/readme`) + }) + .then((out) => { + expect(out).to.contain(`/ipfs/${fileHash}`) + }) + }) +})) diff --git a/test/core/bitswap.spec.js b/test/core/bitswap.spec.js index 645581f2c1..5c0ce9f976 100644 --- a/test/core/bitswap.spec.js +++ b/test/core/bitswap.spec.js @@ -20,11 +20,6 @@ const IPFSFactory = require('ipfsd-ctl') const IPFS = require('../../src/core') -// TODO bitswap tests on windows is failing, missing proper shutdown of daemon -// https://github.com/ipfs/js-ipfsd-ctl/pull/205 -const isWindows = require('../utils/platforms').isWindows -const skipOnWindows = isWindows() ? describe.skip : describe - function makeBlock (callback) { const d = Buffer.from(`IPFS is awesome ${Math.random()}`) @@ -89,7 +84,7 @@ function addNode (fDaemon, inProcNode, callback) { }) } -skipOnWindows('bitswap', function () { +describe('bitswap', function () { this.timeout(80 * 1000) let inProcNode // Node spawned inside this process @@ -244,4 +239,14 @@ skipOnWindows('bitswap', function () { }) }) }) + + describe('unwant', () => { + it('should callback with error for invalid CID input', (done) => { + inProcNode.bitswap.unwant('INVALID CID', (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_CID') + done() + }) + }) + }) }) diff --git a/test/core/block.spec.js b/test/core/block.spec.js new file mode 100644 index 0000000000..fc47d3bf39 --- /dev/null +++ b/test/core/block.spec.js @@ -0,0 +1,69 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) + +const IPFSFactory = require('ipfsd-ctl') +const IPFS = require('../../src/core') + +describe('block', () => { + let ipfsd, ipfs + + before(function (done) { + this.timeout(20 * 1000) + + const factory = IPFSFactory.create({ type: 'proc' }) + + factory.spawn({ + exec: IPFS, + initOptions: { bits: 512 } + }, (err, _ipfsd) => { + expect(err).to.not.exist() + ipfsd = _ipfsd + ipfs = _ipfsd.api + done() + }) + }) + + after((done) => { + if (ipfsd) { + ipfsd.stop(done) + } else { + done() + } + }) + + describe('get', () => { + it('should callback with error for invalid CID input', (done) => { + ipfs.block.get('INVALID CID', (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_CID') + done() + }) + }) + }) + + describe('rm', () => { + it('should callback with error for invalid CID input', (done) => { + ipfs.block.rm('INVALID CID', (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_CID') + done() + }) + }) + }) + + describe('stat', () => { + it('should callback with error for invalid CID input', (done) => { + ipfs.block.stat('INVALID CID', (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_CID') + done() + }) + }) + }) +}) diff --git a/test/core/bootstrap.spec.js b/test/core/bootstrap.spec.js index f092765354..72cae70840 100644 --- a/test/core/bootstrap.spec.js +++ b/test/core/bootstrap.spec.js @@ -58,8 +58,8 @@ describe('bootstrap', () => { '/ip6/2a03:b0c0:0:1010::23:1001/tcp/4001/ipfs/QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd', '/ip6/2a03:b0c0:1:d0::e7:1/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3pNLQpmmEk35v6wYtsMGLzSr5QBU3', '/ip6/2604:a880:1:20::1d9:6001/tcp/4001/ipfs/QmSoLju6m7xTh3DuokvT3886QRYqxAzb1kShaanJgW36yx', - '/dns4/wss0.bootstrap.libp2p.io/tcp/443/wss/ipfs/QmZMxNdpMkewiVZLMRxaNxUeZpDUb34pWjZ1kZvsd16Zic', - '/dns4/wss1.bootstrap.libp2p.io/tcp/443/wss/ipfs/Qmbut9Ywz9YEDrz8ySBSgWyJk41Uvm2QJPhwDJzJyGFsD6' + '/dns4/node0.preload.ipfs.io/tcp/443/wss/ipfs/QmZMxNdpMkewiVZLMRxaNxUeZpDUb34pWjZ1kZvsd16Zic', + '/dns4/node1.preload.ipfs.io/tcp/443/wss/ipfs/Qmbut9Ywz9YEDrz8ySBSgWyJk41Uvm2QJPhwDJzJyGFsD6' ] const updatedList = [ @@ -80,8 +80,8 @@ describe('bootstrap', () => { '/ip6/2a03:b0c0:0:1010::23:1001/tcp/4001/ipfs/QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd', '/ip6/2a03:b0c0:1:d0::e7:1/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3pNLQpmmEk35v6wYtsMGLzSr5QBU3', '/ip6/2604:a880:1:20::1d9:6001/tcp/4001/ipfs/QmSoLju6m7xTh3DuokvT3886QRYqxAzb1kShaanJgW36yx', - '/dns4/wss0.bootstrap.libp2p.io/tcp/443/wss/ipfs/QmZMxNdpMkewiVZLMRxaNxUeZpDUb34pWjZ1kZvsd16Zic', - '/dns4/wss1.bootstrap.libp2p.io/tcp/443/wss/ipfs/Qmbut9Ywz9YEDrz8ySBSgWyJk41Uvm2QJPhwDJzJyGFsD6', + '/dns4/node0.preload.ipfs.io/tcp/443/wss/ipfs/QmZMxNdpMkewiVZLMRxaNxUeZpDUb34pWjZ1kZvsd16Zic', + '/dns4/node1.preload.ipfs.io/tcp/443/wss/ipfs/Qmbut9Ywz9YEDrz8ySBSgWyJk41Uvm2QJPhwDJzJyGFsD6', '/ip4/111.111.111.111/tcp/1001/ipfs/QmXFX2P5ammdmXQgfqGkfswtEVFsZUJ5KeHRXQYCTdiTAb' ] diff --git a/test/core/create-node.spec.js b/test/core/create-node.spec.js index 5dcb86ace2..1d4b43ab2f 100644 --- a/test/core/create-node.spec.js +++ b/test/core/create-node.spec.js @@ -123,7 +123,7 @@ describe('create node', function () { }) }) - it('init: false errors (start default: true)', function (done) { + it('init: false errors (start default: true) and errors only once', function (done) { this.timeout(80 * 1000) const node = new IPFS({ @@ -135,10 +135,24 @@ describe('create node', function () { } } }) - node.once('error', (err) => { - expect(err).to.exist() - done() - }) + + const shouldHappenOnce = () => { + let timeoutId = null + + return (err) => { + expect(err).to.exist() + + // Bad news, this handler has been executed before + if (timeoutId) { + clearTimeout(timeoutId) + return done(new Error('error handler called multiple times')) + } + + timeoutId = setTimeout(done, 100) + } + } + + node.on('error', shouldHappenOnce()) }) it('init: false, start: false', function (done) { diff --git a/test/core/dag.spec.js b/test/core/dag.spec.js new file mode 100644 index 0000000000..1c71d79943 --- /dev/null +++ b/test/core/dag.spec.js @@ -0,0 +1,67 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) + +const IPFSFactory = require('ipfsd-ctl') +const IPFS = require('../../src/core') + +describe('dag', () => { + let ipfsd, ipfs + + before(function (done) { + this.timeout(20 * 1000) + + const factory = IPFSFactory.create({ type: 'proc' }) + + factory.spawn({ + exec: IPFS, + initOptions: { bits: 512 } + }, (err, _ipfsd) => { + expect(err).to.not.exist() + ipfsd = _ipfsd + ipfs = _ipfsd.api + done() + }) + }) + + after((done) => { + if (ipfsd) { + ipfsd.stop(done) + } else { + done() + } + }) + + describe('get', () => { + it('should callback with error for invalid string CID input', (done) => { + ipfs.dag.get('INVALID CID', (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_CID') + done() + }) + }) + + it('should callback with error for invalid buffer CID input', (done) => { + ipfs.dag.get(Buffer.from('INVALID CID'), (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_CID') + done() + }) + }) + }) + + describe('tree', () => { + it('should callback with error for invalid CID input', (done) => { + ipfs.dag.tree('INVALID CID', (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_CID') + done() + }) + }) + }) +}) diff --git a/test/core/dht.spec.js b/test/core/dht.spec.js new file mode 100644 index 0000000000..0f08b15bb1 --- /dev/null +++ b/test/core/dht.spec.js @@ -0,0 +1,49 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) + +const IPFSFactory = require('ipfsd-ctl') +const IPFS = require('../../src/core') + +describe('dht', () => { + let ipfsd, ipfs + + before(function (done) { + this.timeout(20 * 1000) + + const factory = IPFSFactory.create({ type: 'proc' }) + + factory.spawn({ + exec: IPFS, + initOptions: { bits: 512 } + }, (err, _ipfsd) => { + expect(err).to.not.exist() + ipfsd = _ipfsd + ipfs = _ipfsd.api + done() + }) + }) + + after((done) => { + if (ipfsd) { + ipfsd.stop(done) + } else { + done() + } + }) + + describe('findprovs', () => { + it('should callback with error for invalid CID input', (done) => { + ipfs.dht.findprovs('INVALID CID', (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_CID') + done() + }) + }) + }) +}) diff --git a/test/core/files.spec.js b/test/core/files.spec.js new file mode 100644 index 0000000000..e34ecbed2c --- /dev/null +++ b/test/core/files.spec.js @@ -0,0 +1,78 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) + +const pull = require('pull-stream') +const IPFSFactory = require('ipfsd-ctl') +const IPFS = require('../../src/core') + +describe('files', () => { + let ipfsd, ipfs + + before(function (done) { + this.timeout(20 * 1000) + + const factory = IPFSFactory.create({ type: 'proc' }) + + factory.spawn({ + exec: IPFS, + initOptions: { bits: 512 } + }, (err, _ipfsd) => { + expect(err).to.not.exist() + ipfsd = _ipfsd + ipfs = _ipfsd.api + done() + }) + }) + + after((done) => { + if (ipfsd) { + ipfsd.stop(done) + } else { + done() + } + }) + + describe('get', () => { + it('should callback with error for invalid IPFS path input', (done) => { + const invalidPath = null + ipfs.files.get(invalidPath, (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_PATH') + done() + }) + }) + }) + + describe('getReadableStream', () => { + it('should return erroring stream for invalid IPFS path input', (done) => { + const invalidPath = null + const stream = ipfs.files.getReadableStream(invalidPath) + + stream.on('error', (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_PATH') + done() + }) + }) + }) + + describe('getPullStream', () => { + it('should return erroring stream for invalid IPFS path input', (done) => { + const invalidPath = null + pull( + ipfs.files.getPullStream(invalidPath), + pull.collect((err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_PATH') + done() + }) + ) + }) + }) +}) diff --git a/test/core/interface.spec.js b/test/core/interface.spec.js index 73742776a7..8ac1655b97 100644 --- a/test/core/interface.spec.js +++ b/test/core/interface.spec.js @@ -1,11 +1,27 @@ -/* eslint-env mocha */ +/* eslint-env mocha, browser */ 'use strict' const tests = require('interface-ipfs-core') const CommonFactory = require('../utils/interface-common-factory') const isNode = require('detect-node') +const dnsFetchStub = require('../utils/dns-fetch-stub') describe('interface-ipfs-core tests', () => { + // ipfs.dns in the browser calls out to https://ipfs.io/api/v0/dns. + // The following code stubs self.fetch to return a static CID for calls + // to https://ipfs.io/api/v0/dns?arg=ipfs.io. + if (!isNode) { + const fetch = self.fetch + + before(() => { + self.fetch = dnsFetchStub(fetch) + }) + + after(() => { + self.fetch = fetch + }) + } + const defaultCommonFactory = CommonFactory.create() tests.bitswap(defaultCommonFactory, { skip: !isNode }) @@ -36,7 +52,18 @@ describe('interface-ipfs-core tests', () => { tests.miscellaneous(CommonFactory.create({ // No need to stop, because the test suite does a 'stop' test. createTeardown: () => cb => cb() - })) + }), { + skip: [ + { + name: 'should resolve an IPNS DNS link', + reason: 'TODO IPNS not implemented yet' + }, + { + name: 'should resolve IPNS link recursively', + reason: 'TODO IPNS not implemented yet' + } + ] + }) tests.object(defaultCommonFactory) diff --git a/test/core/mfs-preload.spec.js b/test/core/mfs-preload.spec.js new file mode 100644 index 0000000000..d35bffd628 --- /dev/null +++ b/test/core/mfs-preload.spec.js @@ -0,0 +1,56 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) + +const mfsPreload = require('../../src/core/mfs-preload') + +const createMockFilesStat = (cids = []) => { + let n = 0 + return (path, cb) => cb(null, { hash: cids[n++] || 'QmHash' }) +} + +const createMockPreload = () => { + return function preload (cid, cb) { + preload.cids = preload.cids || [] + preload.cids.push(cid) + cb() + } +} + +describe('MFS preload', () => { + it('should preload MFS root periodically', function (done) { + this.timeout(80 * 1000) + + // CIDs returned from our mock files.stat function + const statCids = ['QmInitial', 'QmSame', 'QmSame', 'QmUpdated'] + // The CIDs we expect to have been preloaded + const expectedPreloadCids = ['QmSame', 'QmUpdated'] + + const mockPreload = createMockPreload() + const mockFilesStat = createMockFilesStat(statCids) + const mockIpfs = { files: { stat: mockFilesStat }, _preload: mockPreload } + + const interval = 10 + const preloader = mfsPreload(mockIpfs, { interval }) + + preloader.start((err) => { + expect(err).to.not.exist() + + setTimeout(() => { + preloader.stop((err) => { + expect(err).to.not.exist() + expect( + // Slice off any extra CIDs it processed + mockPreload.cids.slice(0, expectedPreloadCids.length) + ).to.deep.equal(expectedPreloadCids) + done() + }) + }, statCids.length * (interval * 2)) + }) + }) +}) diff --git a/test/core/object.spec.js b/test/core/object.spec.js new file mode 100644 index 0000000000..d126bb94b6 --- /dev/null +++ b/test/core/object.spec.js @@ -0,0 +1,59 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) + +const IPFSFactory = require('ipfsd-ctl') +const IPFS = require('../../src/core') + +describe('object', () => { + let ipfsd, ipfs + + before(function (done) { + this.timeout(20 * 1000) + + const factory = IPFSFactory.create({ type: 'proc' }) + + factory.spawn({ + exec: IPFS, + initOptions: { bits: 512 } + }, (err, _ipfsd) => { + expect(err).to.not.exist() + ipfsd = _ipfsd + ipfs = _ipfsd.api + done() + }) + }) + + after((done) => { + if (ipfsd) { + ipfsd.stop(done) + } else { + done() + } + }) + + describe('get', () => { + it('should callback with error for invalid CID input', (done) => { + ipfs.object.get('INVALID CID', (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_CID') + done() + }) + }) + }) + + describe('put', () => { + it('should callback with error for invalid CID input', (done) => { + ipfs.object.put({ multihash: 'INVALID CID' }, (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_CID') + done() + }) + }) + }) +}) diff --git a/test/core/pin-set.js b/test/core/pin-set.js index 78f30b9c65..64b8fb1389 100644 --- a/test/core/pin-set.js +++ b/test/core/pin-set.js @@ -31,7 +31,7 @@ function createNodes (num, callback) { const items = [] for (let i = 0; i < num; i++) { items.push(cb => - createNode(String(i), (err, node) => cb(err, node._multihash)) + createNode(String(i), (err, node) => cb(err, node.multihash)) ) } @@ -53,7 +53,7 @@ describe('pinSet', function () { let repo before(function (done) { - this.timeout(20 * 1000) + this.timeout(80 * 1000) repo = createTempRepo() ipfs = new IPFS({ repo }) ipfs.on('ready', () => { @@ -63,7 +63,7 @@ describe('pinSet', function () { }) after(function (done) { - this.timeout(20 * 1000) + this.timeout(80 * 1000) ipfs.stop(done) }) @@ -73,7 +73,7 @@ describe('pinSet', function () { createNode('data', (err, node) => { expect(err).to.not.exist() - const nodeHash = node._multihash + const nodeHash = node.multihash pinSet.storeSet([nodeHash], (err, rootNode) => { expect(err).to.not.exist() const node = rootNode.toJSON() diff --git a/test/core/pin.js b/test/core/pin.js index 32618422a0..2c1b87e329 100644 --- a/test/core/pin.js +++ b/test/core/pin.js @@ -81,7 +81,10 @@ describe('pin', function () { }) }) - after(done => ipfs.stop(done)) + after(function (done) { + this.timeout(20 * 1000) + ipfs.stop(done) + }) describe('isPinnedWithType', function () { beforeEach(function () { diff --git a/test/core/pin.spec.js b/test/core/pin.spec.js new file mode 100644 index 0000000000..3b530db147 --- /dev/null +++ b/test/core/pin.spec.js @@ -0,0 +1,57 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) + +const IPFSFactory = require('ipfsd-ctl') +const IPFS = require('../../src/core') + +describe('pin', () => { + let ipfsd, ipfs + + before(function (done) { + this.timeout(20 * 1000) + + const factory = IPFSFactory.create({ type: 'proc' }) + + factory.spawn({ + exec: IPFS, + initOptions: { bits: 512 } + }, (err, _ipfsd) => { + expect(err).to.not.exist() + ipfsd = _ipfsd + ipfs = _ipfsd.api + done() + }) + }) + + after((done) => { + if (ipfsd) { + ipfsd.stop(done) + } else { + done() + } + }) + + describe('ls', () => { + it('should callback with error for invalid non-string pin type option', (done) => { + ipfs.pin.ls({ type: 6 }, (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_PIN_TYPE') + done() + }) + }) + + it('should callback with error for invalid string pin type option', (done) => { + ipfs.pin.ls({ type: '__proto__' }, (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_PIN_TYPE') + done() + }) + }) + }) +}) diff --git a/test/core/preload.spec.js b/test/core/preload.spec.js new file mode 100644 index 0000000000..0d105f2837 --- /dev/null +++ b/test/core/preload.spec.js @@ -0,0 +1,323 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const path = require('path') +const os = require('os') +const hat = require('hat') +const CID = require('cids') +const parallel = require('async/parallel') +const waterfall = require('async/waterfall') +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) + +const MockPreloadNode = require('../utils/mock-preload-node') +const IPFS = require('../../src') + +describe('preload', () => { + let ipfs + + before(function (done) { + this.timeout(10 * 1000) + + ipfs = new IPFS({ + repo: path.join(os.tmpdir(), hat()), + config: { + Addresses: { + Swarm: [] + } + }, + preload: { + enabled: true, + addresses: [MockPreloadNode.defaultAddr] + } + }) + + ipfs.on('ready', done) + }) + + afterEach((done) => MockPreloadNode.clearPreloadCids(done)) + + after((done) => ipfs.stop(done)) + + it('should preload content added with files.add', (done) => { + ipfs.files.add(Buffer.from(hat()), (err, res) => { + expect(err).to.not.exist() + MockPreloadNode.waitForCids(res[0].hash, done) + }) + }) + + it('should preload multiple content added with files.add', (done) => { + ipfs.files.add([{ + content: Buffer.from(hat()) + }, { + content: Buffer.from(hat()) + }, { + content: Buffer.from(hat()) + }], (err, res) => { + expect(err).to.not.exist() + MockPreloadNode.waitForCids(res.map(file => file.hash), done) + }) + }) + + it('should preload multiple content and intermediate dirs added with files.add', (done) => { + ipfs.files.add([{ + path: 'dir0/dir1/file0', + content: Buffer.from(hat()) + }, { + path: 'dir0/dir1/file1', + content: Buffer.from(hat()) + }, { + path: 'dir0/file2', + content: Buffer.from(hat()) + }], (err, res) => { + expect(err).to.not.exist() + + const rootDir = res.find(file => file.path === 'dir0') + expect(rootDir).to.exist() + + MockPreloadNode.waitForCids(rootDir.hash, done) + }) + }) + + it('should preload multiple content and wrapping dir for content added with files.add and wrapWithDirectory option', (done) => { + ipfs.files.add([{ + path: 'dir0/dir1/file0', + content: Buffer.from(hat()) + }, { + path: 'dir0/dir1/file1', + content: Buffer.from(hat()) + }, { + path: 'dir0/file2', + content: Buffer.from(hat()) + }], { wrapWithDirectory: true }, (err, res) => { + expect(err).to.not.exist() + + const wrappingDir = res.find(file => file.path === '') + expect(wrappingDir).to.exist() + + MockPreloadNode.waitForCids(wrappingDir.hash, done) + }) + }) + + it('should preload content retrieved with files.cat', (done) => { + ipfs.files.add(Buffer.from(hat()), { preload: false }, (err, res) => { + expect(err).to.not.exist() + ipfs.files.cat(res[0].hash, (err) => { + expect(err).to.not.exist() + MockPreloadNode.waitForCids(res[0].hash, done) + }) + }) + }) + + it('should preload content retrieved with files.get', (done) => { + ipfs.files.add(Buffer.from(hat()), { preload: false }, (err, res) => { + expect(err).to.not.exist() + ipfs.files.get(res[0].hash, (err) => { + expect(err).to.not.exist() + MockPreloadNode.waitForCids(res[0].hash, done) + }) + }) + }) + + it('should preload content retrieved with ls', (done) => { + ipfs.files.add([{ + path: 'dir0/dir1/file0', + content: Buffer.from(hat()) + }, { + path: 'dir0/dir1/file1', + content: Buffer.from(hat()) + }, { + path: 'dir0/file2', + content: Buffer.from(hat()) + }], { wrapWithDirectory: true }, (err, res) => { + expect(err).to.not.exist() + + const wrappingDir = res.find(file => file.path === '') + expect(wrappingDir).to.exist() + + ipfs.ls(wrappingDir.hash, (err) => { + expect(err).to.not.exist() + MockPreloadNode.waitForCids(wrappingDir.hash, done) + }) + }) + }) + + it('should preload content added with object.new', (done) => { + ipfs.object.new((err, node) => { + expect(err).to.not.exist() + + const cid = new CID(node.multihash) + MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done) + }) + }) + + it('should preload content added with object.put', (done) => { + ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }, (err, node) => { + expect(err).to.not.exist() + + const cid = new CID(node.multihash) + MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done) + }) + }) + + it('should preload content added with object.patch.addLink', (done) => { + parallel({ + parent: (cb) => ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }, cb), + link: (cb) => ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }, cb) + }, (err, nodes) => { + expect(err).to.not.exist() + + ipfs.object.patch.addLink(nodes.parent.multihash, { + name: 'link', + multihash: nodes.link.multihash, + size: nodes.link.size + }, (err, node) => { + expect(err).to.not.exist() + + const cid = new CID(node.multihash) + MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done) + }) + }) + }) + + it('should preload content added with object.patch.rmLink', (done) => { + waterfall([ + (cb) => ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }, cb), + (link, cb) => { + ipfs.object.put({ + Data: Buffer.from(hat()), + Links: [{ + name: 'link', + multihash: link.multihash, + size: link.size + }] + }, cb) + } + ], (err, parent) => { + expect(err).to.not.exist() + + ipfs.object.patch.rmLink(parent.multihash, { name: 'link' }, (err, node) => { + expect(err).to.not.exist() + + const cid = new CID(node.multihash) + MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done) + }) + }) + }) + + it('should preload content added with object.patch.setData', (done) => { + ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }, (err, node) => { + expect(err).to.not.exist() + + ipfs.object.patch.setData(node.multihash, Buffer.from(hat()), (err, node) => { + expect(err).to.not.exist() + + const cid = new CID(node.multihash) + MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done) + }) + }) + }) + + it('should preload content added with object.patch.appendData', (done) => { + ipfs.object.put({ Data: Buffer.from(hat()), Links: [] }, (err, node) => { + expect(err).to.not.exist() + + ipfs.object.patch.appendData(node.multihash, Buffer.from(hat()), (err, node) => { + expect(err).to.not.exist() + + const cid = new CID(node.multihash) + MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done) + }) + }) + }) + + it('should preload content retrieved with object.get', (done) => { + ipfs.object.new(null, { preload: false }, (err, node) => { + expect(err).to.not.exist() + ipfs.object.get(node.multihash, (err) => { + expect(err).to.not.exist() + const cid = new CID(node.multihash) + MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done) + }) + }) + }) + + it('should preload content added with block.put', (done) => { + ipfs.block.put(Buffer.from(hat()), (err, block) => { + expect(err).to.not.exist() + MockPreloadNode.waitForCids(block.cid.toBaseEncodedString(), done) + }) + }) + + it('should preload content retrieved with block.get', (done) => { + ipfs.block.put(Buffer.from(hat()), { preload: false }, (err, block) => { + expect(err).to.not.exist() + ipfs.block.get(block.cid, (err) => { + expect(err).to.not.exist() + MockPreloadNode.waitForCids(block.cid.toBaseEncodedString(), done) + }) + }) + }) + + it('should preload content retrieved with block.stat', (done) => { + ipfs.block.put(Buffer.from(hat()), { preload: false }, (err, block) => { + expect(err).to.not.exist() + ipfs.block.stat(block.cid, (err) => { + expect(err).to.not.exist() + MockPreloadNode.waitForCids(block.cid.toBaseEncodedString(), done) + }) + }) + }) + + it('should preload content added with dag.put', (done) => { + const obj = { test: hat() } + ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }, (err, cid) => { + expect(err).to.not.exist() + MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done) + }) + }) + + it('should preload content retrieved with dag.get', (done) => { + const obj = { test: hat() } + const opts = { format: 'dag-cbor', hashAlg: 'sha2-256', preload: false } + ipfs.dag.put(obj, opts, (err, cid) => { + expect(err).to.not.exist() + ipfs.dag.get(cid, (err) => { + expect(err).to.not.exist() + MockPreloadNode.waitForCids(cid.toBaseEncodedString(), done) + }) + }) + }) + + it('should not preload if disabled', function (done) { + this.timeout(10 * 1000) + + const ipfs = new IPFS({ + repo: path.join(os.tmpdir(), hat()), + config: { + Addresses: { + Swarm: [] + } + }, + preload: { + enabled: false, + addresses: [MockPreloadNode.defaultAddr] + } + }) + + ipfs.on('ready', () => { + ipfs.files.add(Buffer.from(hat()), (err, res) => { + expect(err).to.not.exist() + + MockPreloadNode.waitForCids(res[0].hash, (err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_TIMEOUT') + done() + }) + }) + }) + }) +}) diff --git a/test/core/stats.spec.js b/test/core/stats.spec.js new file mode 100644 index 0000000000..15e872efaf --- /dev/null +++ b/test/core/stats.spec.js @@ -0,0 +1,53 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) + +const pull = require('pull-stream') +const IPFSFactory = require('ipfsd-ctl') +const IPFS = require('../../src/core') + +describe('stats', () => { + let ipfsd, ipfs + + before(function (done) { + this.timeout(20 * 1000) + + const factory = IPFSFactory.create({ type: 'proc' }) + + factory.spawn({ + exec: IPFS, + initOptions: { bits: 512 } + }, (err, _ipfsd) => { + expect(err).to.not.exist() + ipfsd = _ipfsd + ipfs = _ipfsd.api + done() + }) + }) + + after((done) => { + if (ipfsd) { + ipfsd.stop(done) + } else { + done() + } + }) + + describe('bwPullStream', () => { + it('should return erroring stream for invalid interval option', (done) => { + pull( + ipfs.stats.bwPullStream({ poll: true, interval: 'INVALID INTERVAL' }), + pull.collect((err) => { + expect(err).to.exist() + expect(err.code).to.equal('ERR_INVALID_POLL_INTERVAL') + done() + }) + ) + }) + }) +}) diff --git a/test/core/utils.js b/test/core/utils.js index b5c84b15c1..af73c30af6 100644 --- a/test/core/utils.js +++ b/test/core/utils.js @@ -157,4 +157,71 @@ describe('utils', () => { }) }) }) + + describe('parseChunkerString', () => { + it('handles an empty string', () => { + const options = utils.parseChunkerString('') + expect(options).to.have.property('chunker').to.equal('fixed') + }) + + it('handles a null chunker string', () => { + const options = utils.parseChunkerString(null) + expect(options).to.have.property('chunker').to.equal('fixed') + }) + + it('parses a fixed size string', () => { + const options = utils.parseChunkerString('size-512') + expect(options).to.have.property('chunker').to.equal('fixed') + expect(options) + .to.have.property('chunkerOptions') + .to.have.property('maxChunkSize') + .to.equal(512) + }) + + it('parses a rabin string without size', () => { + const options = utils.parseChunkerString('rabin') + expect(options).to.have.property('chunker').to.equal('rabin') + expect(options) + .to.have.property('chunkerOptions') + .to.have.property('avgChunkSize') + }) + + it('parses a rabin string with only avg size', () => { + const options = utils.parseChunkerString('rabin-512') + expect(options).to.have.property('chunker').to.equal('rabin') + expect(options) + .to.have.property('chunkerOptions') + .to.have.property('avgChunkSize') + .to.equal(512) + }) + + it('parses a rabin string with min, avg, and max', () => { + const options = utils.parseChunkerString('rabin-42-92-184') + expect(options).to.have.property('chunker').to.equal('rabin') + expect(options).to.have.property('chunkerOptions') + expect(options.chunkerOptions).to.have.property('minChunkSize').to.equal(42) + expect(options.chunkerOptions).to.have.property('avgChunkSize').to.equal(92) + expect(options.chunkerOptions).to.have.property('maxChunkSize').to.equal(184) + }) + + it('throws an error for unsupported chunker type', () => { + const fn = () => utils.parseChunkerString('fake-512') + expect(fn).to.throw(Error) + }) + + it('throws an error for incorrect format string', () => { + const fn = () => utils.parseChunkerString('fixed-abc') + expect(fn).to.throw(Error) + }) + + it('throws an error for incorrect rabin format string', () => { + let fn = () => utils.parseChunkerString('rabin-1-2-3-4') + expect(fn).to.throw(Error) + }) + + it('throws an error for non integer rabin parameters', () => { + const fn = () => utils.parseChunkerString('rabin-abc') + expect(fn).to.throw(Error) + }) + }) }) diff --git a/test/fixtures/go-ipfs-repo/config b/test/fixtures/go-ipfs-repo/config index 00f467f95f..162598cdfc 100644 --- a/test/fixtures/go-ipfs-repo/config +++ b/test/fixtures/go-ipfs-repo/config @@ -64,8 +64,8 @@ "/ip6/2a03:b0c0:0:1010::23:1001/tcp/4001/ipfs/QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd", "/ip6/2a03:b0c0:1:d0::e7:1/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3pNLQpmmEk35v6wYtsMGLzSr5QBU3", "/ip6/2604:a880:1:20::1d9:6001/tcp/4001/ipfs/QmSoLju6m7xTh3DuokvT3886QRYqxAzb1kShaanJgW36yx", - "/dns4/wss0.bootstrap.libp2p.io/tcp/443/wss/ipfs/QmZMxNdpMkewiVZLMRxaNxUeZpDUb34pWjZ1kZvsd16Zic", - "/dns4/wss1.bootstrap.libp2p.io/tcp/443/wss/ipfs/Qmbut9Ywz9YEDrz8ySBSgWyJk41Uvm2QJPhwDJzJyGFsD6" + "/dns4/node0.preload.ipfs.io/tcp/443/wss/ipfs/QmZMxNdpMkewiVZLMRxaNxUeZpDUb34pWjZ1kZvsd16Zic", + "/dns4/node1.preload.ipfs.io/tcp/443/wss/ipfs/Qmbut9Ywz9YEDrz8ySBSgWyJk41Uvm2QJPhwDJzJyGFsD6" ], "Tour": { "Last": "" @@ -106,4 +106,4 @@ "hash": "sha2-512" } } -} \ No newline at end of file +} diff --git a/test/fixtures/go-ipfs-repo/version b/test/fixtures/go-ipfs-repo/version index 62f9457511..c7930257df 100644 --- a/test/fixtures/go-ipfs-repo/version +++ b/test/fixtures/go-ipfs-repo/version @@ -1 +1 @@ -6 \ No newline at end of file +7 \ No newline at end of file diff --git a/test/fixtures/greater-than-default-max-chunk-size b/test/fixtures/greater-than-default-max-chunk-size new file mode 100644 index 0000000000..27347e687b Binary files /dev/null and b/test/fixtures/greater-than-default-max-chunk-size differ diff --git a/test/fixtures/less-than-default-max-chunk-size b/test/fixtures/less-than-default-max-chunk-size new file mode 100644 index 0000000000..ea6bec3f5f Binary files /dev/null and b/test/fixtures/less-than-default-max-chunk-size differ diff --git a/test/gateway/index.js b/test/gateway/index.js index 84ce39914a..1c319671e5 100644 --- a/test/gateway/index.js +++ b/test/gateway/index.js @@ -19,7 +19,9 @@ const directoryContent = { 'nested-folder/hello.txt': loadFixture('test/gateway/test-folder/nested-folder/hello.txt'), 'nested-folder/ipfs.txt': loadFixture('test/gateway/test-folder/nested-folder/ipfs.txt'), 'nested-folder/nested.html': loadFixture('test/gateway/test-folder/nested-folder/nested.html'), - 'cat-folder/cat.jpg': loadFixture('test/gateway/test-folder/cat-folder/cat.jpg') + 'cat-folder/cat.jpg': loadFixture('test/gateway/test-folder/cat-folder/cat.jpg'), + 'unsniffable-folder/hexagons-xml.svg': loadFixture('test/gateway/test-folder/unsniffable-folder/hexagons-xml.svg'), + 'unsniffable-folder/hexagons.svg': loadFixture('test/gateway/test-folder/unsniffable-folder/hexagons.svg') } describe('HTTP Gateway', function () { @@ -113,6 +115,22 @@ describe('HTTP Gateway', function () { expect(file.hash).to.equal(expectedMultihash) cb() }) + }, + (cb) => { + const expectedMultihash = 'QmVZoGxDvKM9KExc8gaL4uTbhdNtWhzQR7ndrY7J1gWs3F' + + let dir = [ + content('unsniffable-folder/hexagons-xml.svg'), + content('unsniffable-folder/hexagons.svg') + ] + + http.api.node.files.add(dir, (err, res) => { + expect(err).to.not.exist() + const file = res[res.length - 2] + expect(file.path).to.equal('test-folder/unsniffable-folder') + expect(file.hash).to.equal(expectedMultihash) + cb() + }) } ], done) }) @@ -166,7 +184,7 @@ describe('HTTP Gateway', function () { }) }) - it('load a non text file', (done) => { + it('load a jpg file', (done) => { let kitty = 'QmW2WQi7j6c7UgJTarActp7tDNikE4B2qXtFCfLPdsgaTQ/cat.jpg' gateway.inject({ @@ -184,6 +202,34 @@ describe('HTTP Gateway', function () { }) }) + it('load a svg file (unsniffable)', (done) => { + let hexagons = 'QmVZoGxDvKM9KExc8gaL4uTbhdNtWhzQR7ndrY7J1gWs3F/hexagons.svg' + + gateway.inject({ + method: 'GET', + url: '/ipfs/' + hexagons + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.headers['content-type']).to.equal('image/svg+xml') + + done() + }) + }) + + it('load a svg file with xml leading declaration (unsniffable)', (done) => { + let hexagons = 'QmVZoGxDvKM9KExc8gaL4uTbhdNtWhzQR7ndrY7J1gWs3F/hexagons-xml.svg' + + gateway.inject({ + method: 'GET', + url: '/ipfs/' + hexagons + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.headers['content-type']).to.equal('image/svg+xml') + + done() + }) + }) + it('load a directory', (done) => { let dir = 'QmW2WQi7j6c7UgJTarActp7tDNikE4B2qXtFCfLPdsgaTQ/' diff --git a/test/gateway/test-folder/unsniffable-folder/hexagons-xml.svg b/test/gateway/test-folder/unsniffable-folder/hexagons-xml.svg new file mode 100644 index 0000000000..fe6b79dfd2 --- /dev/null +++ b/test/gateway/test-folder/unsniffable-folder/hexagons-xml.svg @@ -0,0 +1,56 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/gateway/test-folder/unsniffable-folder/hexagons.svg b/test/gateway/test-folder/unsniffable-folder/hexagons.svg new file mode 100644 index 0000000000..557d927b39 --- /dev/null +++ b/test/gateway/test-folder/unsniffable-folder/hexagons.svg @@ -0,0 +1,55 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/http-api/interface.js b/test/http-api/interface.js index e123093687..ac33dfb370 100644 --- a/test/http-api/interface.js +++ b/test/http-api/interface.js @@ -37,7 +37,18 @@ describe('interface-ipfs-core over ipfs-api tests', () => { tests.miscellaneous(CommonFactory.create({ // No need to stop, because the test suite does a 'stop' test. createTeardown: () => cb => cb() - })) + }), { + skip: [ + { + name: 'should resolve an IPNS DNS link', + reason: 'TODO IPNS not implemented yet' + }, + { + name: 'should resolve IPNS link recursively', + reason: 'TODO IPNS not implemented yet' + } + ] + }) tests.object(defaultCommonFactory) diff --git a/test/utils/dns-fetch-stub.js b/test/utils/dns-fetch-stub.js new file mode 100644 index 0000000000..a1e24a122c --- /dev/null +++ b/test/utils/dns-fetch-stub.js @@ -0,0 +1,16 @@ +'use strict' + +// Create a fetch stub with a fall through to the provided fetch implementation +// if the URL doesn't match https://ipfs.io/api/v0/dns?arg=ipfs.io. +module.exports = (fetch) => { + return function () { + if (arguments[0].startsWith('https://ipfs.io/api/v0/dns?arg=ipfs.io')) { + return Promise.resolve({ + json: () => Promise.resolve({ + Path: '/ipfs/QmYNQJoKGNHTpPxCBPh9KkDpaExgd2duMa3aF6ytMpHdao' + }) + }) + } + return fetch.apply(this, arguments) + } +} diff --git a/test/utils/mock-preload-node.js b/test/utils/mock-preload-node.js new file mode 100644 index 0000000000..795413a31d --- /dev/null +++ b/test/utils/mock-preload-node.js @@ -0,0 +1,158 @@ +/* eslint-env browser */ +'use strict' + +const http = require('http') +const toUri = require('multiaddr-to-uri') +const URL = require('url').URL || self.URL +const errCode = require('err-code') + +const defaultPort = 1138 +const defaultAddr = `/dnsaddr/localhost/tcp/${defaultPort}` + +module.exports.defaultAddr = defaultAddr + +// Create a mock preload IPFS node with a gateway that'll respond 200 to a +// request for /api/v0/refs?arg=*. It remembers the preload CIDs it has been +// called with, and you can ask it for them and also clear them by issuing a +// GET/DELETE request to /cids. +module.exports.createNode = () => { + let cids = [] + + const server = http.createServer((req, res) => { + if (req.url.startsWith('/api/v0/refs')) { + const arg = new URL(`https://ipfs.io${req.url}`).searchParams.get('arg') + cids = cids.concat(arg) + } else if (req.method === 'DELETE' && req.url === '/cids') { + res.statusCode = 204 + cids = [] + } else if (req.method === 'GET' && req.url === '/cids') { + res.setHeader('Content-Type', 'application/json') + res.write(JSON.stringify(cids)) + } else { + res.statusCode = 500 + } + + res.end() + }) + + server.start = (opts, cb) => { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + return server.listen(Object.assign({ port: defaultPort }, opts), cb) + } + + server.stop = (cb) => server.close(cb) + + return server +} + +function parseMultiaddr (addr) { + if (!(addr.endsWith('http') || addr.endsWith('https'))) { + addr = addr + '/http' + } + return new URL(toUri(addr)) +} + +// Get the stored preload CIDs for the server at `addr` +const getPreloadCids = (addr, cb) => { + if (typeof addr === 'function') { + cb = addr + addr = defaultAddr + } + + addr = addr || defaultAddr + + const { protocol, hostname, port } = parseMultiaddr(addr) + + const req = http.get({ protocol, hostname, port, path: '/cids' }, (res) => { + if (res.statusCode !== 200) { + res.resume() + return cb(new Error('failed to get preloaded CIDs from mock preload node')) + } + + let data = '' + + res.on('error', cb) + res.on('data', chunk => { data += chunk }) + + res.on('end', () => { + let obj + try { + obj = JSON.parse(data) + } catch (err) { + return cb(err) + } + cb(null, obj) + }) + }) + + req.on('error', cb) +} + +module.exports.getPreloadCids = getPreloadCids + +// Clear the stored preload URLs for the server at `addr` +module.exports.clearPreloadCids = (addr, cb) => { + if (typeof addr === 'function') { + cb = addr + addr = defaultAddr + } + + addr = addr || defaultAddr + + const { protocol, hostname, port } = parseMultiaddr(addr) + + const req = http.request({ + method: 'DELETE', + protocol, + hostname, + port, + path: '/cids' + }, (res) => { + res.resume() + + if (res.statusCode !== 204) { + return cb(new Error('failed to clear CIDs from mock preload node')) + } + + cb() + }) + + req.on('error', cb) + req.end() +} + +// Wait for the passed CIDs to appear in the CID list from the preload node +module.exports.waitForCids = (cids, opts, cb) => { + if (typeof opts === 'function') { + cb = opts + opts = {} + } + + opts = opts || {} + opts.timeout = opts.timeout || 1000 + + cids = Array.isArray(cids) ? cids : [cids] + + const start = Date.now() + + const checkForCid = () => { + getPreloadCids(opts.addr, (err, preloadCids) => { + if (err) return cb(err) + + if (cids.every(cid => preloadCids.includes(cid))) { + return cb() + } + + if (Date.now() > start + opts.timeout) { + return cb(errCode(new Error('Timed out waiting for CIDs to be preloaded'), 'ERR_TIMEOUT')) + } + + setTimeout(checkForCid, 5) + }) + } + + checkForCid() +}