Skip to content
This repository has been archived by the owner on Mar 10, 2020. It is now read-only.

feat: use new IPLD API #35

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
"detect-node": "^2.0.4",
"detect-webworker": "^1.0.0",
"dirty-chai": "^2.0.1",
"ipld": "~0.21.1",
"ipld": "~0.22.0",
"ipld-in-memory": "^2.0.0",
"multihashes": "~0.4.14",
"pull-buffer-stream": "^1.0.1",
Expand All @@ -58,14 +58,15 @@
"interface-datastore": "~0.6.0",
"ipfs-multipart": "~0.1.0",
"ipfs-unixfs": "~0.1.16",
"ipfs-unixfs-exporter": "~0.36.1",
"ipfs-unixfs-importer": "~0.38.5",
"ipfs-unixfs-exporter": "git+https:/ipfs/js-ipfs-unixfs-exporter.git#new-ipld-api",
"ipfs-unixfs-importer": "git+https:/ipfs/js-ipfs-unixfs-importer.git#new-ipld-api",
"ipld-dag-pb": "~0.15.2",
"is-pull-stream": "~0.0.0",
"is-stream": "^1.1.0",
"joi": "^14.3.0",
"joi-browser": "^13.4.0",
"mortice": "^1.2.1",
"multicodec": "~0.5.0",
"once": "^1.4.0",
"promisify-es6": "^1.0.3",
"pull-cat": "^1.1.11",
Expand Down
15 changes: 10 additions & 5 deletions src/core/cp.js
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,12 @@ const copyToFile = (context, source, destination, destinationTrail, options, cal
const child = sourceTrail[sourceTrail.length - 1]

waterfall([
(next) => context.ipld.get(parent.cid, next),
(result, next) => addLink(context, {
parent: result.value,
(next) => context.ipld.get(parent.cid).then(
(node) => next(null, node),
(error) => next(error)
),
(node, next) => addLink(context, {
parent: node,
parentCid: parent.cid,
size: child.size,
cid: child.cid,
Expand Down Expand Up @@ -165,8 +168,10 @@ const copyToDirectory = (context, sources, destination, destinationTrail, option
const parent = destinationTrail[destinationTrail.length - 1]

waterfall([
(next) => context.ipld.get(parent.cid, next),
(result, next) => next(null, { cid: parent.cid, node: result.value })
(next) => context.ipld.get(parent.cid).then(
(node) => next(null, { cid: parent.cid, node }),
(error) => next(error)
)
].concat(
sourceTrails.map((sourceTrail, index) => {
return (parent, done) => {
Expand Down
54 changes: 30 additions & 24 deletions src/core/utils/add-link.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ const {
generatePath,
updateHamtDirectory
} = require('./hamt-utils')
const toMulticodecCode = require('./to-multicodec-code')

const defaultOptions = {
parent: undefined,
Expand Down Expand Up @@ -43,8 +44,10 @@ const addLink = (context, options, callback) => {
log('Loading parent node', options.parentCid.toBaseEncodedString())

return waterfall([
(cb) => context.ipld.get(options.parentCid, cb),
(result, cb) => cb(null, result.value),
(cb) => context.ipld.get(options.parentCid).then(
(node) => cb(null, node),
(error) => cb(error)
),
(node, cb) => addLink(context, {
...options,
parent: node
Expand Down Expand Up @@ -111,15 +114,18 @@ const addToDirectory = (context, options, callback) => {
(parent, done) => DAGNode.addLink(parent, new DAGLink(options.name, options.size, options.cid), done),
(parent, done) => {
// Persist the new parent DAGNode
context.ipld.put(parent, {
version: options.cidVersion,
format: options.codec,
hashAlg: options.hashAlg,
hashOnly: !options.flush
}, (error, cid) => done(error, {
node: parent,
cid
}))
context.ipld.put(
parent,
toMulticodecCode(options.codec),
{
cidVersion: options.cidVersion,
hashAlg: toMulticodecCode(options.hashAlg),
hashOnly: !options.flush
}
).then(
(cid) => done(null, { node: parent, cid }),
(error) => done(error)
)
}
], callback)
}
Expand Down Expand Up @@ -180,22 +186,22 @@ const updateShard = (context, positions, child, index, options, callback) => {

position++

context.ipld.get(shard.cid, (err, result) => {
if (err) {
return next(err)
}
const shardCid = shard.cid
context.ipld.get(shardCid).then(
(shardNode) => {
if (position < positions.length) {
const nextPrefix = positions[position].prefix
const nextShard = shardNode.links.find(link => link.name.substring(0, 2) === nextPrefix)

if (position < positions.length) {
const nextPrefix = positions[position].prefix
const nextShard = result.value.links.find(link => link.name.substring(0, 2) === nextPrefix)

if (nextShard) {
shard = nextShard
if (nextShard) {
shard = nextShard
}
}
}

next(err, { cid: result && result.cid, node: result && result.value })
})
next(null, { cid: shardCid, node: shardNode })
},
(error) => next(error)
)
},
done
)
Expand Down
20 changes: 12 additions & 8 deletions src/core/utils/create-node.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,22 @@ const UnixFS = require('ipfs-unixfs')
const {
DAGNode
} = require('ipld-dag-pb')
const toMulticodecCode = require('./to-multicodec-code')

const createNode = (context, type, options, callback) => {
waterfall([
(done) => DAGNode.create(new UnixFS(type).marshal(), [], done),
(node, done) => context.ipld.put(node, {
version: options.cidVersion,
format: options.format,
hashAlg: options.hashAlg
}, (err, cid) => done(err, {
cid,
node
}))
(node, done) => context.ipld.put(
node,
toMulticodecCode(options.format),
{
cidVersion: options.cidVersion,
hashAlg: toMulticodecCode(options.hashAlg)
}
).then(
(cid) => done(null, { cid, node }),
(error) => done(error)
)
], callback)
}

Expand Down
86 changes: 44 additions & 42 deletions src/core/utils/hamt-utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ const Bucket = require('hamt-sharding/src/bucket')
const DirSharded = require('ipfs-unixfs-importer/src/importer/dir-sharded')
const log = require('debug')('ipfs:mfs:core:utils:hamt-utils')
const UnixFS = require('ipfs-unixfs')
const toMulticodecCode = require('./to-multicodec-code')

const updateHamtDirectory = (context, links, bucket, options, callback) => {
// update parent with new bit field
Expand All @@ -23,15 +24,18 @@ const updateHamtDirectory = (context, links, bucket, options, callback) => {
},
(parent, done) => {
// Persist the new parent DAGNode
context.ipld.put(parent, {
version: options.cidVersion,
format: options.codec,
hashAlg: options.hashAlg,
hashOnly: !options.flush
}, (error, cid) => done(error, {
node: parent,
cid
}))
context.ipld.put(
parent,
toMulticodecCode(options.codec),
{
cidVersion: options.cidVersion,
hashAlg: toMulticodecCode(options.hashAlg),
hashOnly: !options.flush
}
).then(
(cid) => done(null, { cid, node: parent }),
(error) => done(error)
)
}
], callback)
}
Expand Down Expand Up @@ -133,43 +137,41 @@ const generatePath = (context, fileName, rootNode, callback) => {

// found subshard
log(`Found subshard ${segment.prefix}`)
context.ipld.get(link.cid, (err, result) => {
if (err) {
return next(err)
}

// subshard hasn't been loaded, descend to the next level of the HAMT
if (!path[index - 1]) {
log(`Loaded new subshard ${segment.prefix}`)
const node = result.value

return recreateHamtLevel(node.links, rootBucket, segment.bucket, parseInt(segment.prefix, 16), async (err, bucket) => {
if (err) {
return next(err)
}

const position = await rootBucket._findNewBucketAndPos(fileName)

index++
path.unshift({
bucket: position.bucket,
prefix: toPrefix(position.pos),
node: node
context.ipld.get(link.cid).then(
(node) => {
// subshard hasn't been loaded, descend to the next level of the HAMT
if (!path[index - 1]) {
log(`Loaded new subshard ${segment.prefix}`)

return recreateHamtLevel(node.links, rootBucket, segment.bucket, parseInt(segment.prefix, 16), async (err, bucket) => {
if (err) {
return next(err)
}

const position = await rootBucket._findNewBucketAndPos(fileName)

index++
path.unshift({
bucket: position.bucket,
prefix: toPrefix(position.pos),
node: node
})

next()
})
}

next()
})
}

const nextSegment = path[index - 1]
const nextSegment = path[index - 1]

// add intermediate links to bucket
addLinksToHamtBucket(result.value.links, nextSegment.bucket, rootBucket, (error) => {
nextSegment.node = result.value
// add intermediate links to bucket
addLinksToHamtBucket(node.links, nextSegment.bucket, rootBucket, (error) => {
nextSegment.node = node

next(error)
})
})
next(error)
})
},
(error) => next(error)
)
},
async (err, path) => {
await rootBucket.put(fileName, true)
Expand Down
12 changes: 4 additions & 8 deletions src/core/utils/load-node.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
'use strict'

const waterfall = require('async/waterfall')
const CID = require('cids')
const log = require('debug')('ipfs:mfs:utils:load-node')

Expand All @@ -9,13 +8,10 @@ const loadNode = (context, dagLink, callback) => {

log(`Loading DAGNode for child ${cid.toBaseEncodedString()}`)

waterfall([
(cb) => context.ipld.get(cid, cb),
(result, cb) => cb(null, {
node: result.value,
cid
})
], callback)
context.ipld.get(cid).then(
(node) => callback(null, { cid, node }),
(error) => callback(error)
)
}

module.exports = loadNode
45 changes: 27 additions & 18 deletions src/core/utils/remove-link.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ const {
generatePath,
updateHamtDirectory
} = require('./hamt-utils')
const toMulticodecCode = require('./to-multicodec-code')

const defaultOptions = {
parent: undefined,
Expand Down Expand Up @@ -39,8 +40,10 @@ const removeLink = (context, options, callback) => {
log('Loading parent node', options.parentCid.toBaseEncodedString())

return waterfall([
(cb) => context.ipld.get(options.parentCid, cb),
(result, cb) => cb(null, result.value),
(cb) => context.ipld.get(options.parentCid).then(
(node) => cb(null, node),
(error) => cb(error)
),
(node, cb) => removeLink(context, {
...options,
parent: node
Expand Down Expand Up @@ -69,14 +72,17 @@ const removeFromDirectory = (context, options, callback) => {
waterfall([
(cb) => DAGNode.rmLink(options.parent, options.name, cb),
(newParentNode, cb) => {
context.ipld.put(newParentNode, {
version: options.cidVersion,
format: options.codec,
hashAlg: options.hashAlg
}, (error, cid) => cb(error, {
node: newParentNode,
cid
}))
context.ipld.put(
newParentNode,
toMulticodecCode(options.codec),
{
cidVersion: options.cidVersion,
hashAlg: toMulticodecCode(options.hashAlg)
}
).then(
(cid) => cb(null, { cid, node: newParentNode }),
(error) => cb(error)
)
},
(result, cb) => {
log('Updated regular directory', result.cid.toBaseEncodedString())
Expand Down Expand Up @@ -126,15 +132,18 @@ const updateShard = (context, positions, child, options, callback) => {
return waterfall([
(done) => DAGNode.rmLink(node, link.name, done),
(node, done) => {
context.ipld.put(node, {
version: options.cidVersion,
format: options.codec,
hashAlg: options.hashAlg,
hashOnly: !options.flush
}, (error, cid) => done(error, {
context.ipld.put(
node,
cid
}))
toMulticodecCode(options.codec),
{
cidVersion: options.cidVersion,
hashAlg: toMulticodecCode(options.hashAlg),
hashOnly: !options.flush
}
).then(
(cid) => done(null, { cid, node }),
(error) => done(error)
)
},
(result, done) => {
bucket.del(child.name)
Expand Down
16 changes: 16 additions & 0 deletions src/core/utils/to-multicodec-code.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
'use strict'

const multicodec = require('multicodec')

// Converts a multicodec name to the corresponding code if it isn't a code
// already
const toMulticodecCode = (name) => {
if (typeof name === 'string') {
const constantName = name.toUpperCase().replace(/-/g, '_')
return multicodec[constantName]
} else {
return name
}
}

module.exports = toMulticodecCode
Loading