Skip to content
This repository was archived by the owner on Mar 10, 2020. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 34 additions & 0 deletions API/dag/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
dag API
=======

#### `dag.put`

> Store an IPLD format node

##### `Go` **WIP**

##### `JavaScript` - ipfs.dag.put(dagNode, formatMulticodec, hashAlg, callback)

`dagNode` - a DAG node that follows one of the supported IPLD formats.

`formatMulticodec` - The IPLD format multicodec.

`hashAlg` - The hash algorithm to be used over the serialized dagNode.

`callback` must follow `function (err) {}` signature, where `err` is an error if the operation was not successful.

If no `callback` is passed, a [promise][] is returned.

#### `dag.get`

> Retrieve an IPLD format node

##### `Go` **WIP**

##### `JavaScript` - ipfs.object.get(cid, callback)

`cid` is a [CID][https://github.com/ipfs/js-cid] instance.

`callback` must follow `function (err, dagNode) {}` signature, where `err` is an error if the operation was not successful and `dagNode` is the IPLD format DAG node retrieved.

If no `callback` is passed, a [promise][] is returned.
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "interface-ipfs-core",
"version": "0.15.0",
"description": "A test suite and interface you can use to implement a IPFS core interface.",
"main": "lib/index.js",
"main": "src/index.js",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TODO: remove before merge

"jsnext:main": "src/index.js",
"scripts": {
"test": "exit(0)",
Expand Down Expand Up @@ -51,4 +51,4 @@
"greenkeeperio-bot <[email protected]>",
"nginnever <[email protected]>"
]
}
}
40 changes: 34 additions & 6 deletions src/block.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
const expect = require('chai').expect
const Block = require('ipfs-block')
const multihash = require('multihashes')
const CID = require('cids')

module.exports = (common) => {
describe('.block', () => {
Expand Down Expand Up @@ -34,23 +35,37 @@ module.exports = (common) => {
describe('callback API', () => {
it('.put a buffer', (done) => {
const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ'
const cid = new CID(expectedHash)
const blob = Buffer('blorb')

ipfs.block.put(blob, (err, block) => {
ipfs.block.put(blob, cid, (err, block) => {
expect(err).to.not.exist
expect(block.key).to.eql(multihash.fromB58String(expectedHash))
expect(block.key('sha2-256')).to.eql(multihash.fromB58String(expectedHash))
expect(block).to.have.a.property('data', blob)
done()
})
})

it('.put a block', (done) => {
const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ'
const cid = new CID(expectedHash)
const blob = new Block(new Buffer('blorb'))

ipfs.block.put(blob, cid, (err, block) => {
expect(err).to.not.exist
expect(block.key('sha2-256')).to.eql(multihash.fromB58String(expectedHash))
expect(block.data).to.eql(new Buffer('blorb'))
done()
})
})

it('.put a block (without using CID, legacy mode)', (done) => {
const expectedHash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ'
const blob = new Block(new Buffer('blorb'))

ipfs.block.put(blob, (err, block) => {
expect(err).to.not.exist
expect(block.key).to.eql(multihash.fromB58String(expectedHash))
expect(block.key('sha2-256')).to.eql(multihash.fromB58String(expectedHash))
expect(block.data).to.eql(new Buffer('blorb'))
done()
})
Expand All @@ -59,26 +74,39 @@ module.exports = (common) => {
it('.put error with array of blocks', () => {
const blob = Buffer('blorb')

ipfs.block.put([blob, blob], (err) => {
ipfs.block.put([blob, blob], 'fake cids', (err) => {
expect(err).to.be.an.instanceof(Error)
})
})

it('block.get', (done) => {
const hash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ'
const cid = new CID(hash)

ipfs.block.get(cid, (err, block) => {
expect(err).to.not.exist
expect(block.key('sha2-256')).to.eql(cid.multihash)
expect(block.data).to.eql(new Buffer('blorb'))
done()
})
})

it('block.get (without using CID, legacy mode)', (done) => {
const hash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ'

ipfs.block.get(hash, (err, block) => {
expect(err).to.not.exist
expect(block.key).to.eql(multihash.fromB58String(hash))
expect(block.key('sha2-256')).to.eql(multihash.fromB58String(hash))
expect(block.data).to.eql(new Buffer('blorb'))
done()
})
})

it('block.stat', (done) => {
const hash = 'QmPv52ekjS75L4JmHpXVeuJ5uX2ecSfSZo88NSyxwA3rAQ'
const cid = new CID(hash)

ipfs.block.stat(hash, (err, stats) => {
ipfs.block.stat(cid, (err, stats) => {
expect(err).to.not.exist
expect(stats).to.have.property('key')
expect(stats).to.have.property('size')
Expand Down
30 changes: 17 additions & 13 deletions src/object.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
'use strict'

const expect = require('chai').expect
const DAGNode = require('ipfs-merkle-dag').DAGNode
const dagPB = require('ipld-dag-pb')
const DAGNode = dagPB.DAGNode
const bs58 = require('bs58')

module.exports = (common) => {
Expand Down Expand Up @@ -89,7 +90,7 @@ module.exports = (common) => {

it('of protobuf encoded buffer', (done) => {
const dNode = new DAGNode(new Buffer('Some data'))
const buf = dNode.marshal()
const buf = dagPB.util.serialize(dNode)

ipfs.object.put(buf, { enc: 'protobuf' }, (err, node) => {
expect(err).to.not.exist
Expand Down Expand Up @@ -185,8 +186,8 @@ module.exports = (common) => {

ipfs.object.get(node1.multihash(), (err, node2) => {
expect(err).to.not.exist
// because js-ipfs-api can't infer if the returned Data is Buffer
// or String
// because js-ipfs-api can't infer if the
// returned Data is Buffer or String
if (typeof node2.data === 'string') {
node2.data = new Buffer(node2.data)
}
Expand Down Expand Up @@ -488,12 +489,13 @@ module.exports = (common) => {
let testNode
let testNodeWithLink
let testLink
before((done) => {
const obj = {
Data: new Buffer('patch test object'),
Links: []
}

const obj = {
Data: new Buffer('patch test object'),
Links: []
}

before((done) => {
ipfs.object.put(obj, (err, node) => {
expect(err).to.not.exist
testNode = node
Expand All @@ -502,12 +504,14 @@ module.exports = (common) => {
})

it('.addLink', (done) => {
const dNode1 = testNode.copy()
const dNode1 = new DAGNode(obj.Data, obj.Links)
const dNode2 = new DAGNode(new Buffer('some other node'))
// note: we need to put the linked obj, otherwise IPFS won't timeout
// cause it needs the node to get its size

// note: we need to put the linked obj, otherwise IPFS won't
// timeout. Reason: it needs the node to get its size
ipfs.object.put(dNode2, (err) => {
expect(err).to.not.exist

dNode1.addNodeLink('link-to-node', dNode2)

ipfs.object.patch.addLink(testNode.multihash(), dNode1.links[0], (err, node3) => {
Expand Down Expand Up @@ -667,7 +671,7 @@ module.exports = (common) => {
})

it('.addLink', () => {
const dNode1 = testNode.copy()
const dNode1 = dagPB.util.deserialize(dagPB.util.serialize(testNode))
const dNode2 = new DAGNode(new Buffer('some other node'))
// note: we need to put the linked obj, otherwise IPFS won't timeout
// cause it needs the node to get its size
Expand Down