diff --git a/README.md b/README.md index eed8e2e..88295a8 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Kappa View Query -`kappa-view-query` is a materialised view to be used with kappa-core. It provides an API that allows you to define your own indexes and execute custom [map-filter-reduce](https://github.com/dominictarr/map-filter-reduce) queries over a collection of hypercores. +`kappa-view-query` is a materialised view to be used with kappa-core. It provides an API that allows you to define your own indexes and execute custom [map-filter-reduce](https://github.com/dominictarr/map-filter-reduce) queries over your indexes. `kappa-view-query` is inspired by [flumeview-query](https://github.com/flumedb/flumeview-query). It uses the same scoring system for determining the most efficient index relevant to the provided query. @@ -50,93 +50,119 @@ In the case of the above dataset and query, the closest matching index is the on ## Usage +### Hypercore + +This example uses a single hypercore and collects all messages at a given point in time. + ```js -const kappa = require('kappa-core') -const Query = require('./') +const Kappa = require('kappa-core') +const hypercore = require('hypercore') const ram = require('random-access-memory') +const collect = require('collect-stream') const memdb = require('memdb') +const sub = require('subleveldown') + +const Query = require('./') +const { validator, fromHypercore } = require('./util') +const { cleaup, tmp } = require('./test/util') + +const seedData = require('./test/seeds.json') -// Initialised your kappa-core back-end -const core = kappa(ram, { valueEncoding: 'json' }) +const core = new Kappa() +const feed = hypercore(ram, { valueEncoding: 'json' }) const db = memdb() -// Define a validator or a message decoder to determine if a message should be indexed or not -function validator (msg) { - if (typeof msg !== 'object') return null - if (typeof msg.value !== 'object') return null - if (typeof msg.value.timestamp !== 'number') return null - if (typeof msg.value.type !== 'string') return null - return msg -} +core.use('query', createHypercoreSource({ feed, db: sub(db, 'state') }), Query(sub(db, 'view'), { + indexes: [ + { key: 'log', value: [['value', 'timestamp']] }, + { key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] } + ], + // you can pass a custom validator function to ensure all messages entering a feed match a specific format + validator, + // implement your own getMessage function, and perform any desired validation on each message returned by the query + getMessage: fromHypercore(feed) +})) + +feed.append(seedData, (err, _) => { + core.ready('query', () => { + const query = [{ $filter: { value: { type: 'chat/message' } }] + + // grab then log all chat/message message types up until this point + collect(core.view.query.read({ query }), (err, chats) => { + if (err) return console.error(err) + console.log(chats) + + // grab then log all user/about message types up until this point + collect(core.view.query.read({ query: [{ $filter: { value: { type: 'user/about' } } }] }), (err, users) => { + if (err) return console.error(err) + console.log(users) + }) + }) + }) +}) +``` -// here's an alternative using protocol buffers, assuming a message schema exists -const { Message } = protobuf(fs.readFileSync(path.join(path.resolve(__dirname), 'message.proto'))) +### Multifeed -function validator (msg) { - try { msg.value = Message.decode(msg.value) } - catch (err) { return console.error(err) && false } - return msg -} +This example uses a multifeed instance for managing hypercores and sets up two live streams to dump messages to the console as they arrive. -// Define a set of indexes under a namespace -const indexes = [ - { key: 'log', value: [['value', 'timestamp']] }, - { key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] }, - { key: 'cha', value: [['value', 'type'], ['value', 'content', 'channel']] } -] - -core.use('query', Query(db, { indexes, validator })) - -core.writer('local', (err, feed) => { - // Populate our feed with some messages - const data = [{ - type: 'chat/message', - timestamp: Date.now(), - content: { body: 'Hi im new here...' } - }, { - type: 'user/about', - timestamp: Date.now(), - content: { name: 'Grace' } - }, { - type: 'chat/message', - timestamp: Date.now(), - content: { body: 'Second post' } - }, { - type: 'chat/message', - timestamp: Date.now(), - content: { channel: 'dogs', body: 'Lurchers rule' } - }, { - type: 'chat/message', - timestamp: Date.now(), - content: { channel: 'dogs', body: 'But sometimes I prefer labra-doodles' } - }, { - type: 'user/about', - timestamp: Date.now(), - content: { name: 'Poison Ivy' } - }] - - feed.append(data, (err, seq) => { - // Define a query: filter where the message value contains type 'chat/message', and the content references the channel 'dogs' - const query = [{ $filter: { value: { type: 'chat/message', content: { channel: 'dogs' } } } }] - - core.ready('query', () => { - // For static queries - collect(core.api.query.read({ query }), (err, msgs) => { - console.log(msgs) - - // Logs all messages of type chat/message that reference the dogs channel, and order by timestamp... - // { - // type: 'chat/message', - // timestamp: 1561996331743, - // content: { channel: 'dogs', body: 'Lurchers rule' } - // } - // { - // type: 'chat/message', - // timestamp: Date.now(), - // content: { channel: 'dogs', body: 'But sometimes I prefer labra-doodles' } - // } - }) +```js +const Kappa = require('kappa-core') +const multifeed = require('multifeed') +const ram = require('random-access-memory') +const collect = require('collect-stream') +const memdb = require('memdb') +const sub = require('subleveldown') + +const Query = require('./') +const { validator, fromMultifeed } = require('./util') +const { cleaup, tmp } = require('./test/util') + +const seedData = require('./test/seeds.json') + +const core = new Kappa() +const feeds = multifeed(ram, { valueEncoding: 'json' }) +const db = memdb() + +core.use('query', createMultifeedSource({ feeds, db: sub(db, 'state') }), Query(sub(db, 'view'), { + indexes: [ + { key: 'log', value: [['value', 'timestamp']] }, + { key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] } + ], + validator, + // make sure you define your own getMessage function, otherwise nothing will be returned by your queries + getMessage: fromMultifeed(feeds) +})) + +core.ready('query', () => { + // setup a live query to first log all chat/message + core.view.query.ready({ + query: [{ $filter: { value: { type: 'chat/message' } } }], + live: true, + old: false + }).on('data', (msg) => { + if (msg.sync) return next() + console.log(msg) + }) + + function next () { + // then to first log all user/about + core.view.query.read({ + query: [{ $filter: { value: { type: 'user/about' } } }], + live: true, + old: false + }).on('data', (msg) => { + console.log(msg) }) + } +}) + +// then append a bunch of data to two different feeds in a multifeed +feeds.writer('one', (err, one) => { + feeds.writer('two', (err, two) => { + + one.append(seedData.slice(0, 3)) + two.append(seedData.slice(3, 5)) }) }) ``` @@ -148,6 +174,7 @@ const View = require('kappa-view-query') ``` Expects a LevelUP or LevelDOWN instance `leveldb`. +Expects a `getMessage` function to use your defined index to grab the message from the feed. ```js // returns a readable stream @@ -180,3 +207,6 @@ kappa-view-query was built by [@kyphae](https://github.com/kyphae/) and assisted ### 2.0.7 - Fixed an outstanding issue where live streams were not working. Queries with `{ live: true }` setup will now properly pipe messages through as they are indexed. - Fixed an outstanding issue where messages with a matching timestamp were colliding, where the last indexed would over-writing the previous. Messages are now indexed, on top of provided values, on the sequence number and the feed id, for guaranteed uniqueness. + +### 3.0.0 (not yet released) +- Updated to use the experimental version of kappa-core which includes breaking API changes. See Frando's [kappa-core fork](https://github.com/Frando/kappa-core/tree/kappa5-new). diff --git a/example.js b/example.js deleted file mode 100644 index 0c06ef5..0000000 --- a/example.js +++ /dev/null @@ -1,63 +0,0 @@ -const kappa = require('kappa-core') -const ram = require('random-access-memory') -const collect = require('collect-stream') -const memdb = require('memdb') - -const Query = require('./') -const { validator } = require('./util') -const { cleaup, tmp } = require('./test/util') - -const core = kappa(ram, { valueEncoding: 'json' }) -const db = memdb() - -const indexes = [ - { key: 'log', value: [['value', 'timestamp']] }, - { key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] }, - { key: 'cha', value: [['value', 'type'], ['value', 'content', 'channel'], ['value', 'timestamp']] } -] - -core.use('query', Query(db, { indexes, validator })) - -core.writer('local', (err, feed) => { - const data = [{ - type: 'chat/message', - timestamp: Date.now(), - content: { body: 'Hi im new here...' } - }, { - type: 'user/about', - timestamp: Date.now(), - content: { name: 'Grace' } - }, { - type: 'chat/message', - timestamp: Date.now(), - content: { body: 'Second post' } - }, { - type: 'chat/message', - timestamp: Date.now(), - content: { channel: 'dogs', body: 'Lurchers rule' } - }, { - type: 'chat/message', - timestamp: Date.now(), - content: { channel: 'dogs', body: 'But sometimes I prefer labra-doodles' } - }, { - type: 'user/about', - timestamp: Date.now(), - content: { name: 'Poison Ivy' } - }] - - feed.append(data, (err, _) => { - core.ready('query', () => { - const query = [{ $filter: { value: { type: 'chat/message', content: { channel: 'dogs' } } } }] - - collect(core.api.query.read({ query }), (err, chats) => { - if (err) return console.error(err) - console.log(chats) - - collect(core.api.query.read({ query: [{ $filter: { value: { type: 'user/about' } } }] }), (err, users) => { - if (err) return console.error(err) - console.log(users) - }) - }) - }) - }) -}) diff --git a/examples/hypercore.example.js b/examples/hypercore.example.js new file mode 100644 index 0000000..199ee72 --- /dev/null +++ b/examples/hypercore.example.js @@ -0,0 +1,144 @@ +const Kappa = require('kappa-core') +const createHypercoreSource = require('kappa-core/sources/hypercore') +const hypercore = require('hypercore') +const ram = require('random-access-memory') +const collect = require('collect-stream') +const memdb = require('memdb') +const sub = require('subleveldown') + +const Query = require('../') +const { validator, fromHypercore } = require('../util') + +const seedData = [{ + type: "chat/message", + timestamp: 1574069723314, + content: { + body: "First message" + } +}, { + type: "user/about", + timestamp: 1574069723313, + content: { + name: "Grace" + } +}, { + type: "chat/message", + timestamp: 1574069723317, + content: { + body: "Third message" + } +}, { + type: "chat/message", + timestamp: 1574069723316, + content: { + body: "Second message" + } +},{ + type: "user/about", + timestamp: 1574069723315, + content: { + name: "Poison Ivy" + } +}] + +// An example using a single hypercore +function HypercoreExample () { + const core = new Kappa() + const feed = hypercore(ram, { valueEncoding: 'json' }) + const db = memdb() + + core.use('query', createHypercoreSource({ feed, db: sub(db, 'state') }), Query(sub(db, 'view'), { + // define a set of indexes + indexes: [ + { key: 'log', value: [['value', 'timestamp']] }, + { key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] } + ], + // you can pass a custom validator function to ensure all messages entering a feed match a specific format + validator, + // implement your own getMessage function, and perform any desired validation on each message returned by the query + getMessage: fromHypercore(feed) + })) + + feed.append(seedData, (err, _) => { + core.ready('query', () => { + const query = [{ $filter: { value: { type: 'chat/message' } } }] + + // grab then log all chat/message message types up until this point + collect(core.view.query.read({ query }), (err, chats) => { + if (err) return console.error(err) + console.log(chats) + + // grab then log all user/about message types up until this point + collect(core.view.query.read({ query: [{ $filter: { value: { type: 'user/about' } } }] }), (err, users) => { + if (err) return console.error(err) + console.log(users) + }) + }) + }) + }) +} + +// This example scopes the indexer to only use a chosen 'LOG' feed in our multifeed instance +function MultifeedLimitedLogsWithSparseIndexer () { + const core = new Kappa() + const feeds = multifeed(ram, { valueEncoding: 'json' }) + const db = memdb() + const idx = Indexer({ + db: sub(db, 'idx'), + name: 'example' + }) + + const header = Buffer.from('LOG') + feeds.on('feed', (feed) => { + idx.feed(feed.key) return + feed.get(0, (err, msg) => { + if (msg === header) idx.add(feed, { scan: true }) + }) + }) + + const indexes = [ + { key: 'log', value: [['value', 'timestamp']] }, + { key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] } + ] + const view = Query(sub(db, 'view'), { + validator, + getMessage: fromMultifeed(feeds) + }) + + core.use('query', idx.source(), view) + + core.ready('query', () => { + // setup a live query to first log all chat/message + core.view.query.ready({ + query: [{ $filter: { value: { type: 'chat/message' } } }], + live: true, + old: false + }).on('data', (msg) => { + if (msg.sync) return next() + console.log(msg) + }) + + function next () { + // then to first log all user/about + core.view.query.read({ + query: [{ $filter: { value: { type: 'user/about' } } }], + live: true, + old: false + }).on('data', (msg) => { + console.log(msg) + }) + } + }) + + // create our log feed, the one we want to index + feeds.writer('log', (err, log) => { + // create a second long, the one without the header + feeds.writer('ignore me', (err, ignore) => { + // append our header + log.append(header, (err, seq) => { + log.append(seedData.slice(0, 3)) + ignore.append(seedData.slice(3, 5)) + }) + }) + }) +} diff --git a/examples/kappa-indexer.example.js b/examples/kappa-indexer.example.js new file mode 100644 index 0000000..0c4dfb0 --- /dev/null +++ b/examples/kappa-indexer.example.js @@ -0,0 +1,152 @@ +const Kappa = require('kappa-core') +const Indexer = require('kappa-sparse-indexer') +const multifeed = require('multifeed') +const hypercore = require('hypercore') +const ram = require('random-access-memory') +const collect = require('collect-stream') +const memdb = require('memdb') +const sub = require('subleveldown') + +const Query = require('../') +const { validator, fromMultifeed } = require('../util') +const { tmp } = require('../test/util') + +// This example scopes the indexer to only use a chosen 'LOG' feed in our multifeed instance +// When multifeed learns about a new feed, either using feed.writer, or when emitted on replication +// our indexer will add the feed if it hasn't already, based on whether the first seq matches a specified header + +// Some dummy data +const logMsgs = [{ + type: "user/about", + timestamp: 1574069723313, + content: { + name: "Grace" + } +}, { + type: "chat/message", + timestamp: 1574069723314, + content: { + body: "First message" + } +}, { + type: "chat/message", + timestamp: 1574069723317, + content: { + body: "Third message" + } +}] + +const ignoredMsgs = [{ + type: "chat/message", + timestamp: 1574069723316, + content: { + body: "Second message" + } +},{ + type: "user/about", + timestamp: 1574069723315, + content: { + name: "Poison Ivy" + } +}] + +Example() + +function Example () { + const core = new Kappa() + const feeds = multifeed(tmp(), { valueEncoding: 'json' }) + const db = memdb() + const idx = new Indexer({ + db: sub(db, 'idx'), + name: 'example' + }) + + const header = 'LOG' + feeds.on('feed', (feed) => { + if (idx.feed(feed.key)) return + feed.get(0, (err, msg) => { + if (msg === header) idx.add(feed, { scan: true }) + }) + }) + + const indexes = [ + { key: 'log', value: [['value', 'timestamp']] }, + { key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] } + ] + const view = Query(sub(db, 'view'), { + indexes, + validator, + getMessage: fromMultifeed(feeds) + }) + + core.use('query', idx.source(), view) + + setupFirstFeed((err, log) => { + setupQueries(() => { + setupSecondFeed((err, ignoredFeed) => { + // append the two remaining chat messages to the log + log.append(logMsgs) + // append some data to our ignored log, this won't be indexed + ignoredFeed.append(ignoredMsgs) + }) + }) + }) + + function setupFirstFeed (callback) { + // create a log feed, the one we want to index + feeds.writer('log', (err, log) => { + log.append(header, (err) => { + // append our user/about message to the log + log.append(logMsgs.shift(), (err, seq) => { + callback(err, log) + }) + }) + }) + } + + function setupSecondFeed (callback) { + // create a second feed, the one we want to ignore + feeds.writer('ignored', callback) + } + + function setupQueries (callback) { + // make sure our indexes are ready before executing any queries + core.ready('query', () => { + // setup a sync query to log all user/about + collect(core.view.query.read({ + query: [{ $filter: { value: { type: 'user/about' } } }], + }), (err, msgs) => { + console.log(msgs) + // [{ + // type: 'user/about', + // timestamp: 1574069723313, + // content: { name: 'Grace' } + // }] + next() + }) + + function next () { + // then listen live for all chat/message + core.view.query.read({ + query: [{ $filter: { value: { type: 'chat/message', timestamp: { $gt: 0 } } } }], + live: true + }).on('data', (msg) => { + console.log(msg) + // { + // type: 'chat/message', + // timestamp: 1574069723314, + // content: { body: 'First message' } + // } + // + // { + // type: 'chat/message', + // timestamp: 1574069723317, + // content: { body: 'Third message' } + // } + }) + + callback() + } + }) + } +} diff --git a/examples/multifeed.example.js b/examples/multifeed.example.js new file mode 100644 index 0000000..1a1b696 --- /dev/null +++ b/examples/multifeed.example.js @@ -0,0 +1,90 @@ +const Kappa = require('kappa-core') +const createMultifeedSource = require('kappa-core/sources/multifeed') +const multifeed = require('multifeed') +const ram = require('random-access-memory') +const collect = require('collect-stream') +const memdb = require('memdb') +const sub = require('subleveldown') + +const Query = require('../') +const { validator, fromMultifeed } = require('../util') + +const seedData = [{ + type: "chat/message", + timestamp: 1574069723314, + content: { + body: "First message" + } +}, { + type: "user/about", + timestamp: 1574069723313, + content: { + name: "Grace" + } +}, { + type: "chat/message", + timestamp: 1574069723317, + content: { + body: "Third message" + } +}, { + type: "chat/message", + timestamp: 1574069723316, + content: { + body: "Second message" + } +},{ + type: "user/about", + timestamp: 1574069723315, + content: { + name: "Poison Ivy" + } +}] + +// an example using multifeed for aggregating and querying all feeds +function MultifeedExample () { + const core = new Kappa() + const feeds = multifeed(ram, { valueEncoding: 'json' }) + const db = memdb() + + core.use('query', createMultifeedSource({ feeds, db: sub(db, 'state') }), Query(sub(db, 'view'), { + indexes: [ + { key: 'log', value: [['value', 'timestamp']] }, + { key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] } + ], + validator, + getMessage: fromMultifeed(feeds) + })) + + core.ready('query', () => { + // setup a live query to first log all chat/message + core.view.query.ready({ + query: [{ $filter: { value: { type: 'chat/message' } } }], + live: true, + old: false + }).on('data', (msg) => { + if (msg.sync) return next() + console.log(msg) + }) + + function next () { + // then to first log all user/about + core.view.query.read({ + query: [{ $filter: { value: { type: 'user/about' } } }], + live: true, + old: false + }).on('data', (msg) => { + console.log(msg) + }) + } + }) + + // then append a bunch of data to two different feeds in a multifeed + feeds.writer('one', (err, one) => { + feeds.writer('two', (err, two) => { + + one.append(seedData.slice(0, 3)) + two.append(seedData.slice(3, 5)) + }) + }) +} diff --git a/index.js b/index.js index 48ce68c..3b57e1a 100644 --- a/index.js +++ b/index.js @@ -4,146 +4,125 @@ const charwise = require('charwise') const { EventEmitter } = require('events') const debug = require('debug')('kappa-view-query') const liveStream = require('level-live-stream') +const { isFunction } = require('util') const Explain = require('./explain') const Filter = require('./filter') -const { isFunction } = require('./util') - module.exports = function KappaViewQuery (db = memdb(), opts = {}) { const events = new EventEmitter() const { indexes = [], validator = (msg) => msg, - keyEncoding = charwise + keyEncoding = charwise, + getMessage, } = opts - const view = { + return { maxBatch: opts.maxBatch || 100, + map, + indexed, + api: { + read, + explain, + add, + onUpdate: (core, cb) => events.on('update', cb), + events + } + } - map: (msgs, next) => { - var ops = [] + function explain (core, _opts) { + var expl = Explain(indexes.map((idx) => Object.assign(idx, { + exact: typeof idx.exact === 'boolean' ? idx.exact : false, + createStream: (__opts) => { + var thru = through.obj(function (msg, enc, next) { + if (msg.sync) { + this.push(msg) + return next() + } - msgs.forEach((msg) => { - msg = validator(msg) - if (!msg) return + getMessage(msg, (err, msg) => { + if (err) return next() + this.push(msg) + next() + }) + }) - indexes.forEach((idx) => { - var indexKeys = getIndexValues(msg, idx.value) + var streamOpts = Object.assign(__opts, { + lte: [idx.key, ...__opts.lte], + gte: [idx.key, ...__opts.gte], + keyEncoding, + keys: true, + values: true + }) - if (indexKeys.length) { - ops.push({ - type: 'put', - key: [idx.key, ...indexKeys, msg.seq, msg.key], - value: [msg.key, msg.seq].join('@'), - keyEncoding, - }) - } + var stream = __opts.live + ? liveStream(db, streamOpts) + : db.createReadStream(streamOpts) - function getIndexValues (msg, value) { - var child = value[0] - if (Array.isArray(child)) { - return value - .map((val) => getIndexValues(msg, val)) - .reduce((acc, arr) => [...acc, ...arr], []) - .filter(Boolean) - } else if (typeof child === 'string') { - return [value.reduce((obj, val) => obj[val], msg)] - .filter(Boolean) - } else return [] - } - }) - }) + stream.pipe(thru) - db.batch(ops, next) - }, - indexed: (msgs) => { - debug(`indexing ${JSON.stringify(msgs, null, 2)}`) + return thru + } + }))) - msgs.forEach((msg) => { - events.emit('update', msg) + return expl(_opts) + } + + function read (core, _opts) { + var __opts = explain(core, _opts) + var source = __opts.createStream(__opts) + return Filter(source, _opts) + } + + function add (core, _opts) { + var isValid = _opts && isFunction(_opts.createStream) && Array.isArray(_opts.index || _opts.value) + if(!isValid) throw new Error('kappa-view-query.add: expected opts { index, createStream }') + _opts.value = _opts.index || _opts.value + indexes.push(_opts) + } + + function map (msgs, next) { + var ops = [] + + msgs.forEach((msg) => { + msg = validator(msg) + if (!msg) return + + indexes.forEach((idx) => { + var indexKeys = getIndexValues(msg, idx.value) + + if (indexKeys.length) { + ops.push({ + type: 'put', + key: [idx.key, ...indexKeys, msg.seq, msg.key], + value: [msg.key, msg.seq].join('@'), + keyEncoding, + }) + } + + function getIndexValues (msg, value) { + var child = value[0] + if (Array.isArray(child)) { + return value + .map((val) => getIndexValues(msg, val)) + .reduce((acc, arr) => [...acc, ...arr], []) + .filter(Boolean) + } else if (typeof child === 'string') { + return [value.reduce((obj, val) => obj[val], msg)] + .filter(Boolean) + } else return [] + } }) - }, - api: { - read: (core, _opts) => { - var __opts = view.api.explain(core, _opts) - var source = __opts.createStream(__opts) - return Filter(source, _opts) - }, - explain: (core, _opts) => { - var explain = Explain(indexes.map((idx) => Object.assign(idx, { - exact: typeof idx.exact === 'boolean' ? idx.exact : false, - createStream: (__opts) => { - var thru = through.obj(function (msg, enc, next) { - if (msg.sync) { - this.push(msg) - return next() - } - - var msgId = msg.value - var [ feedId, sequence ] = msgId.split('@') - var feed = core._logs.feed(feedId) - var seq = Number(sequence) - - feed.get(seq, (err, value) => { - if (err) return next() - - var msg = validator({ - key: feed.key.toString('hex'), - seq, - value - }) - - if (!msg) return next() - this.push(msg) - next() - }) - }) - - var streamOpts = Object.assign(__opts, { - lte: [idx.key, ...__opts.lte], - gte: [idx.key, ...__opts.gte], - keyEncoding, - keys: true, - values: true - }) - - var stream = __opts.live - ? liveStream(db, streamOpts) - : db.createReadStream(streamOpts) - - stream.pipe(thru) - - return thru - } - }))) - - return explain(_opts) - }, - add: (core, _opts) => { - var isValid = _opts && isFunction(_opts.createStream) && Array.isArray(_opts.index || _opts.value) - if(!isValid) throw new Error('kappa-view-query.add: expected opts { index, createStream }') - _opts.value = _opts.index || _opts.value - indexes.push(_opts) - }, - onUpdate: (core, cb) => { - events.on('update', cb) - }, - storeState: (state, cb) => { - state = state.toString('base64') - db.put('state', state, cb) - }, - fetchState: (cb) => { - db.get('state', function (err, state) { - if (err && err.notFound) cb() - else if (err) cb(err) - else cb(null, Buffer.from(state, 'base64')) - }) - }, - events - } + }) + + db.batch(ops, next) } - return view + function indexed (msgs) { + msgs.forEach((msg) => { + events.emit('update', msg) + }) + } } diff --git a/package-lock.json b/package-lock.json index 98a6ff4..47f01a5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "kappa-view-query", - "version": "2.0.9", + "version": "3.0.0-alpha1", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 97586cd..e26c3db 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "kappa-view-query", - "version": "2.0.9", + "version": "3.0.0-alpha1", "description": "define your own indexes and execute map-filter-reduce queries over a set of hypercores using kappa-core", "main": "index.js", "repository": { @@ -24,13 +24,17 @@ }, "devDependencies": { "collect-stream": "^1.2.1", - "kappa-core": "^3.0.2", + "hypercore": "^8.4.1", + "kappa-core": "github:Frando/kappa-core#exp-0.1.3", + "kappa-sparse-indexer": "github:Frando/kappa-sparse-indexer#master", "level": "^5.0.1", "memdb": "^1.3.1", "mkdirp": "^0.5.1", + "multifeed": "5.1.1", "nyc": "^14.1.1", "random-access-memory": "^3.1.1", "rimraf": "^2.6.3", + "subleveldown": "^4.1.4", "tap-spec": "^5.0.0", "tape": "^4.11.0", "tape-plus": "^1.0.0", diff --git a/test/index.test.js b/test/index.test.js index a9940f7..482328f 100644 --- a/test/index.test.js +++ b/test/index.test.js @@ -1,46 +1,45 @@ const { describe } = require('tape-plus') -const kappa = require('kappa-core') -const Query = require('../') +const Kappa = require('kappa-core') +const createMultifeedSource = require('kappa-core/sources/multifeed') +const createHypercoreSource = require('kappa-core/sources/hypercore') +const multifeed = require('multifeed') +const hypercore = require('hypercore') const ram = require('random-access-memory') const memdb = require('memdb') +const sub = require('subleveldown') const level = require('level') const collect = require('collect-stream') const crypto = require('crypto') const debug = require('debug')('kappa-view-query') -const seeds = require('./seeds.json') - .sort((a, b) => a.timestamp > b.timestamp ? +1 : -1) - -const drive = require('./drive.json') - .sort((a, b) => a.timestamp > b.timestamp ? +1 : -1) +const Query = require('../') +const { fromMultifeed, fromHypercore } = require('../util') const { cleanup, tmp, replicate } = require('./util') -describe('basic', (context) => { - let core, db, indexes - - context.beforeEach((c) => { - core = kappa(ram, { valueEncoding: 'json' }) - db = memdb() +const seeds = require('./seeds.json').sort((a, b) => a.timestamp > b.timestamp ? +1 : -1) +const drive = require('./drive.json').sort((a, b) => a.timestamp > b.timestamp ? +1 : -1) - indexes = [ - { key: 'log', value: [['value', 'timestamp']] }, - { key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] }, - { key: 'fil', value: [['value', 'filename'], ['value', 'timestamp']] } - ] +describe('hypercore', (context) => { + context('perform a query', (assert, next) => { + var core = new Kappa() + var feed = hypercore(ram, { valueEncoding: 'json' }) + var db = memdb() - core.use('query', Query(db, { indexes })) - }) + var source = createHypercoreSource({ feed, db: sub(db, 'state') }) + core.use('query', source, Query(sub(db, 'view'), { + indexes: [{ key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] }], + getMessage: fromHypercore(feed) + })) - context('perform a query', (assert, next) => { - core.writer('local', (err, feed) => { + core.ready('query', () => { feed.append(seeds, (err, _) => { assert.error(err, 'no error') let query = [{ $filter: { value: { type: 'chat/message' } } }] core.ready('query', () => { - collect(core.api.query.read({ query }), (err, msgs) => { + collect(core.view.query.read({ query }), (err, msgs) => { var check = seeds.filter((msg) => msg.type === 'chat/message') assert.same(msgs.map((msg) => msg.value), check, 'querys messages using correct index') @@ -52,45 +51,64 @@ describe('basic', (context) => { }) context('get all messages', (assert, next) => { - core.writer('local', (err, feed) => { - feed.append(seeds, (err, _) => { - assert.error(err, 'no error') + var core = new Kappa() + var feed = hypercore(ram, { valueEncoding: 'json' }) + var db = memdb() - let query = [{ $filter: { value: { timestamp: { $gt: 0 } } } }] + var source = createHypercoreSource({ feed, db: sub(db, 'state') }) + core.use('query', source, Query(sub(db, 'view'), { + indexes: [{ key: 'log', value: [['value', 'timestamp']] }], + getMessage: fromHypercore(feed) + })) - core.ready('query', () => { - collect(core.api.query.read({ query }), (err, msgs) => { - var check = seeds - assert.equal(msgs.length, check.length, 'gets the same number of messages') - assert.same(msgs.map((msg) => msg.value), check, 'querys messages using correct index') - next() - }) + feed.append(seeds, (err, _) => { + assert.error(err, 'no error') + + let query = [{ $filter: { value: { timestamp: { $gt: 0 } } } }] + + core.ready('query', () => { + collect(core.view.query.read({ query }), (err, msgs) => { + var check = seeds + assert.equal(msgs.length, check.length, 'gets the same number of messages') + assert.same(msgs.map((msg) => msg.value), check, 'querys messages using correct index') + next() }) }) }) }) - context('fil index - get all changes to a specific file, then get all changes to all files', (assert, next) => { - core.writer('local', (err, feed) => { - feed.append(drive, (err, _) => { - assert.error(err, 'no error') - let filename = 'hello.txt' - let helloQuery = [{ $filter: { value: { filename, timestamp: { $gt: 0 } } } }] + context('fil (used by cobox state feed)', (assert, next) => { + var core = new Kappa() + var feed = hypercore(ram, { valueEncoding: 'json' }) + var db = memdb() + + var source = createHypercoreSource({ feed, db: sub(db, 'state') }) + core.use('query', source, Query(sub(db, 'view'), { + indexes: [ + { key: 'log', value: [['value', 'timestamp']] }, + { key: 'fil', value: [['value', 'filename']] }, + ], + getMessage: fromHypercore(feed) + })) + + feed.append(drive, (err, _) => { + assert.error(err, 'no error') + let filename = 'hello.txt' + let helloQuery = [{ $filter: { value: { filename, timestamp: { $gt: 0 } } } }] - core.ready('query', () => { - collect(core.api.query.read({ query: helloQuery }), (err, msgs) => { - var check = drive.filter((msg) => msg.filename === filename) - assert.equal(msgs.length, check.length, 'gets the same number of messages') - assert.same(msgs.map((msg) => msg.value), check, 'querys messages using correct index') + core.ready('query', () => { + collect(core.view.query.read({ query: helloQuery }), (err, msgs) => { + var check = drive.filter((msg) => msg.filename === filename) + assert.equal(msgs.length, check.length, 'gets the same number of messages') + assert.same(msgs.map((msg) => msg.value), check, 'querys messages using correct index') - let fileQuery = [{ $filter: { value: { timestamp: { $gt: 0 } } } }] + let fileQuery = [{ $filter: { value: { timestamp: { $gt: 0 } } } }] - collect(core.api.query.read({ query: fileQuery }), (err, msgs) => { - var check = drive - assert.equal(msgs.length, check.length, 'gets the same number of messages') - assert.same(msgs.map((msg) => msg.value), check, 'querys messages using correct index') - next() - }) + collect(core.view.query.read({ query: fileQuery }), (err, msgs) => { + var check = drive + assert.equal(msgs.length, check.length, 'gets the same number of messages') + assert.same(msgs.map((msg) => msg.value), check, 'querys messages using correct index') + next() }) }) }) @@ -98,56 +116,60 @@ describe('basic', (context) => { }) context('live', (assert, next) => { - core.writer('local', (err, feed) => { - assert.error(err, 'no error') - feed.append(seeds.slice(0, 2), (err, _) => { - assert.error(err, 'no error') + var core = new Kappa() + var feed = hypercore(ram, { valueEncoding: 'json' }) + var db = memdb() - let count = 0 - let check = seeds.filter((msg) => msg.type === 'chat/message') + var source = createHypercoreSource({ feed, db: sub(db, 'state') }) + core.use('query', source, Query(sub(db, 'view'), { + indexes: [{ key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] }], + getMessage: fromHypercore(feed) + })) - let query = [{ $filter: { value: { type: 'chat/message' } } }] + feed.append(seeds.slice(0, 2), (err, _) => { + assert.error(err, 'no error') - core.ready('query', () => { - var stream = core.api.query.read({ live: true, query }) + let count = 0 + let check = seeds.filter((msg) => msg.type === 'chat/message') - stream.on('data', (msg) => { - if (msg.sync) return done() - assert.same(check[count], msg.value, 'streams each message live') - ++count - done() - }) + let query = [{ $filter: { value: { type: 'chat/message' } } }] - feed.append(seeds.slice(3, 5), (err, _) => { - assert.error(err, 'no error') - }) + core.ready('query', () => { + var stream = core.view.query.read({ live: true, query }) - function done (err) { - if (count === check.length) return next() - } + stream.on('data', (msg) => { + if (msg.sync) return done() + assert.same(check[count], msg.value, 'streams each message live') + ++count + done() }) + + feed.append(seeds.slice(3, 5), (err, _) => { + assert.error(err, 'no error') + }) + + function done (err) { + if (count === check.length) return next() + } }) }) }) }) -describe('multiple feeds', (context) => { - let core, db - let name1, name2 - - context.beforeEach((c) => { - core = kappa(ram, { valueEncoding: 'json' }) - db = memdb() - - indexes = [{ key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] }] - - name1 = crypto.randomBytes(16).toString('hex') - name2 = crypto.randomBytes(16).toString('hex') - - core.use('query', Query(db, { indexes })) - }) - +describe('multifeed', (context) => { context('aggregates all feeds', (assert, next) => { + var core = new Kappa() + var feeds = multifeed(ram, { valueEncoding: 'json' }) + var db = memdb() + + var source = createMultifeedSource({ feeds, db: sub(db, 'state') }) + core.use('query', source, Query(sub(db, 'view'), { + indexes: [{ key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] }], + getMessage: fromMultifeed(feeds) + })) + + var name1 = crypto.randomBytes(16).toString('hex') + var name2 = crypto.randomBytes(16).toString('hex') var query = [{ $filter: { value: { type: 'chat/message' } } }] var timestamp = Date.now() var count = 0 @@ -159,10 +181,10 @@ describe('multiple feeds', (context) => { assert.error(err, 'no error') debug(`initialised feed1: ${feed1.key.toString('hex')} feed2: ${feed2.key.toString('hex')}`) - assert.same(2, core.feeds().length, 'two local feeds') + assert.same(2, feeds.feeds().length, 'two local feeds') core.ready('query', () => { - collect(core.api.query.read({ query }), (err, msgs) => { + collect(core.view.query.read({ query }), (err, msgs) => { assert.error(err, 'no error') assert.ok(msgs.length === 2, 'returns two messages') assert.same(msgs, [ @@ -176,7 +198,7 @@ describe('multiple feeds', (context) => { }) function setup (name, cb) { - core.writer(name, (err, feed) => { + feeds.writer(name, (err, feed) => { feed.append({ type: 'chat/message', timestamp: timestamp + count, @@ -190,6 +212,18 @@ describe('multiple feeds', (context) => { }) context('aggregates all feeds, colliding timestamps', (assert, next) => { + var core = new Kappa() + var feeds = multifeed(ram, { valueEncoding: 'json' }) + var db = memdb() + + var source = createMultifeedSource({ feeds, db: sub(db, 'state') }) + core.use('query', source, Query(sub(db, 'view'), { + indexes: [{ key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] }], + getMessage: fromMultifeed(feeds) + })) + + var name1 = crypto.randomBytes(16).toString('hex') + var name2 = crypto.randomBytes(16).toString('hex') var query = [{ $filter: { value: { type: 'chat/message' } } }] var timestamp = Date.now() @@ -199,7 +233,7 @@ describe('multiple feeds', (context) => { assert.error(err, 'no error') core.ready('query', () => { - collect(core.api.query.read({ query }), (err, msgs) => { + collect(core.view.query.read({ query }), (err, msgs) => { assert.error(err, 'no error') assert.ok(msgs.length === 2, 'returns two messages') assert.same(msgs, [ @@ -213,7 +247,7 @@ describe('multiple feeds', (context) => { }) function setup (name, cb) { - core.writer(name1, (err, feed) => { + feeds.writer(name1, (err, feed) => { feed.append({ type: 'chat/message', timestamp, @@ -226,13 +260,25 @@ describe('multiple feeds', (context) => { }) context('live', (assert, next) => { + var core = new Kappa() + var feeds = multifeed(ram, { valueEncoding: 'json' }) + var db = memdb() + + var source = createMultifeedSource({ feeds, db: sub(db, 'state') }) + core.use('query', source, Query(sub(db, 'view'), { + indexes: [{ key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] }], + getMessage: fromMultifeed(feeds) + })) + + var name1 = crypto.randomBytes(16).toString('hex') + var name2 = crypto.randomBytes(16).toString('hex') var query = [{ $filter: { value: { type: 'chat/message' } } }] let timestamp = Date.now() - core.writer(name1, (err, feed1) => { + feeds.writer(name1, (err, feed1) => { assert.error(err, 'no error') - core.writer(name2, (err, feed2) => { + feeds.writer(name2, (err, feed2) => { assert.error(err, 'no error') let count = 0 @@ -241,7 +287,7 @@ describe('multiple feeds', (context) => { .filter((msg) => msg.type === 'chat/message') core.ready('query', () => { - var stream = core.api.query.read({ live: true, old: false, query }) + var stream = core.view.query.read({ live: true, old: false, query }) stream.on('data', (msg) => { assert.same(msg.value, check[count], 'streams each message live') @@ -272,46 +318,52 @@ describe('multiple feeds', (context) => { }) }) -describe('multiple cores', (context) => { - let core1, db1 - let core2, db2 - - context.beforeEach((c) => { - core1 = kappa(ram, { valueEncoding: 'json' }) - core2 = kappa(ram, { valueEncoding: 'json' }) - - indexes = [{ key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] }] - - core1.use('query', Query(memdb(), { indexes })) - core2.use('query', Query(memdb(), { indexes })) - }) - +describe('multiple multifeeds', (context) => { context('aggregates all valid messages from all feeds when querying', (assert, next) => { + var core1 = new Kappa() + var core2 = new Kappa() + var feeds1 = multifeed(ram, { valueEncoding: 'json' }) + var feeds2 = multifeed(ram, { valueEncoding: 'json' }) + var db1 = memdb() + var db2 = memdb() + + var source1 = createMultifeedSource({ feeds: feeds1, db: sub(db1, 'state') }) + core1.use('query', source1, Query(sub(db1, 'view'), { + indexes: [{ key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] }], + getMessage: fromMultifeed(feeds1) + })) + + var source2 = createMultifeedSource({ feeds: feeds2, db: sub(db2, 'state') }) + core2.use('query', source2, Query(sub(db2, 'view'), { + indexes: [{ key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] }], + getMessage: fromMultifeed(feeds2) + })) + var query = [{ $filter: { value: { type: 'chat/message' } } }] var timestamp = Date.now() var count = 0 - setup(core1, (err, feed1) => { + setup(feeds1, (err, feed1) => { assert.error(err, 'no error') - setup(core2, (err, feed2) => { + setup(feeds2, (err, feed2) => { assert.error(err, 'no error') debug(`initialised core1: ${feed1.key.toString('hex')} core2: ${feed2.key.toString('hex')}`) - assert.same(1, core1.feeds().length, 'one feed') - assert.same(1, core2.feeds().length, 'one feed') + assert.same(1, feeds1.feeds().length, 'one feed') + assert.same(1, feeds2.feeds().length, 'one feed') core1.ready('query', () => { - collect(core1.api.query.read({ query }), (err, msgs) => { + collect(core1.view.query.read({ query }), (err, msgs) => { assert.error(err, 'no error') assert.ok(msgs.length === 1, 'returns a single message') - replicate(core1, core2, (err) => { + replicate(feeds1, feeds2, (err) => { assert.error(err, 'no error') - assert.same(2, core1.feeds().length, `first core has second core's feed`) - assert.same(2, core2.feeds().length, `second core has first core's feed`) + assert.same(2, feeds1.feeds().length, `first core has second core's feed`) + assert.same(2, feeds2.feeds().length, `second core has first core's feed`) core2.ready('query', () => { - collect(core2.api.query.read({ query }), (err, msgs) => { + collect(core2.view.query.read({ query }), (err, msgs) => { assert.error(err, 'no error') assert.ok(msgs.length === 2, 'returns two messages') assert.same(msgs, [ @@ -327,8 +379,8 @@ describe('multiple cores', (context) => { }) }) - function setup (kcore, cb) { - kcore.writer('local', (err, feed) => { + function setup (multifeed, cb) { + multifeed.writer('local', (err, feed) => { feed.append({ type: 'chat/message', timestamp: timestamp + count, @@ -341,6 +393,25 @@ describe('multiple cores', (context) => { }) context('live', (assert, next) => { + var core1 = new Kappa() + var core2 = new Kappa() + var feeds1 = multifeed(ram, { valueEncoding: 'json' }) + var feeds2 = multifeed(ram, { valueEncoding: 'json' }) + var db1 = memdb() + var db2 = memdb() + + var source1 = createMultifeedSource({ feeds: feeds1, db: sub(db1, 'state') }) + core1.use('query', source1, Query(sub(db1, 'view'), { + indexes: [{ key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] }], + getMessage: fromMultifeed(feeds1) + })) + + var source2 = createMultifeedSource({ feeds: feeds2, db: sub(db2, 'state') }) + core2.use('query', source2, Query(sub(db2, 'view'), { + indexes: [{ key: 'typ', value: [['value', 'type'], ['value', 'timestamp']] }], + getMessage: fromMultifeed(feeds2) + })) + let query = [{ $filter: { value: { type: 'user/about' } } }] let timestamp = Date.now() let feed1Name = { type: 'user/about', timestamp, content: { name: 'Magpie' } } @@ -350,17 +421,17 @@ describe('multiple cores', (context) => { let check1 = [feed1Name, feed2Name] let check2 = [feed2Name, feed1Name] - setup(core1, (err, feed1) => { + setup(feeds1, (err, feed1) => { assert.error(err, 'no error') - setup(core2, (err, feed2) => { + setup(feeds2, (err, feed2) => { assert.error(err, 'no error') let core1ready, core2ready debug(`initialised core1: ${feed1.key.toString('hex')} core2: ${feed2.key.toString('hex')}`) core1.ready('query', () => { - let stream1 = core1.api.query.read({ live: true, old: false, query }) + let stream1 = core1.view.query.read({ live: true, old: false, query }) stream1.on('data', (msg) => { debug(`stream 1: ${JSON.stringify(msg, null, 2)}` ) @@ -374,7 +445,7 @@ describe('multiple cores', (context) => { }) core2.ready('query', () => { - let stream2 = core2.api.query.read({ live: true, old: false, query }) + let stream2 = core2.view.query.read({ live: true, old: false, query }) stream2.on('data', (msg) => { debug(`stream 2: ${JSON.stringify(msg, null, 2)}` ) @@ -397,10 +468,10 @@ describe('multiple cores', (context) => { assert.error(err, 'no error') debug('replicating') - replicate(core1, core2, (err) => { + replicate(feeds1, feeds2, (err) => { assert.error(err, 'no error') - assert.same(2, core1.feeds().length, `first core has replicated second core's feed`) - assert.same(2, core2.feeds().length, `second core has replicated first core's feed`) + assert.same(2, feeds1.feeds().length, `first core has replicated second core's feed`) + assert.same(2, feeds2.feeds().length, `second core has replicated first core's feed`) }) }) }) @@ -412,8 +483,8 @@ describe('multiple cores', (context) => { if (count1 === 2 && count2 === 2) return next() } - function setup (kcore, cb) { - kcore.writer('local', cb) + function setup (multifeed, cb) { + multifeed.writer('local', cb) } }) }) diff --git a/test/util.js b/test/util.js index f954a54..adf1449 100644 --- a/test/util.js +++ b/test/util.js @@ -33,7 +33,7 @@ function cleanup (dirs, cb) { } function tmp () { - var tmpDir = tmpdir().name + var tmpDir = './'+tmpdir().name mkdirp.sync(tmpDir) return tmpDir } @@ -43,10 +43,20 @@ function uniq (array) { return Array.from(new Set(array)) } -function replicate (core1, core2, cb) { - var stream = core1.replicate() - stream.pipe(core2.replicate()).pipe(stream) - stream.on('end', cb) +function replicate (a, b, opts, cb) { + if (typeof opts === 'function') return replicate(a, b, {}, opts) + if (!cb) cb = noop + + var s = a.replicate(true, Object.assign({ live: false }, opts)) + var d = b.replicate(false, Object.assign({ live: false }, opts)) + + s.pipe(d).pipe(s) + + s.on('error', (err) => { + if (err) return cb(err) + }) + + s.on('end', cb) } function noop () {} diff --git a/util.js b/util.js index 7038269..4deba47 100644 --- a/util.js +++ b/util.js @@ -38,23 +38,60 @@ function findByPath (indexes, path) { }) } -function isFunction (variable) { - return typeof variable === 'function' -} - function validator (msg) { if (typeof msg !== 'object') return null if (typeof msg.value !== 'object') return null if (typeof msg.value.timestamp !== 'number') return null - if (typeof msg.value.type !== 'string') return null return msg } +function fromMultifeed (feeds, opts = {}) { + var validate = opts.validator || function (msg) { return msg } + + return function getMessage (msg, cb) { + var msgId = msg.value + var [ feedId, sequence ] = msgId.split('@') + var feed = feeds.feed(feedId) + var seq = Number(sequence) + + feed.get(seq, (err, value) => { + var msg = validate({ + key: feed.key.toString('hex'), + seq, + value + }) + if (!msg) return cb(new Error('message failed to validate')) + return cb(null, msg) + }) + } +} + +function fromHypercore (feed, opts = {}) { + var validate = opts.validator || function (msg) { return msg } + + return function getMessage (msg, cb) { + var msgId = msg.value + var sequence = msgId.split('@')[1] + var seq = Number(sequence) + + feed.get(seq, (err, value) => { + var msg = validate({ + key: feed.key.toString('hex'), + seq, + value + }) + if (!msg) return cb(new Error('message failed to validate')) + return cb(null, msg) + }) + } +} + module.exports = { has, get, set, findByPath, - isFunction, - validator + validator, + fromMultifeed, + fromHypercore } diff --git a/yarn.lock b/yarn.lock index 10dc6b8..376ca45 100644 --- a/yarn.lock +++ b/yarn.lock @@ -89,7 +89,14 @@ lodash "^4.17.13" to-fast-properties "^2.0.0" -abstract-leveldown@^6.2.1, abstract-leveldown@~6.2.1: +abstract-extension@^3.0.1: + version "3.1.0" + resolved "https://registry.yarnpkg.com/abstract-extension/-/abstract-extension-3.1.0.tgz#104da6e40765216d60688e31ee17fed6f4ed2196" + integrity sha512-IhhwBFoP2l4xm0gp/YGzOfkie28OCT0X3OG4k9Zari/cM92QUU1tluUIBdhVrGXF8KrFGd8x9snuz152j6yi6A== + dependencies: + codecs "^2.0.0" + +abstract-leveldown@^6.1.1, abstract-leveldown@^6.2.1, abstract-leveldown@~6.2.1: version "6.2.2" resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-6.2.2.tgz#677425beeb28204367c7639e264e93ea4b49971a" integrity sha512-/a+Iwj0rn//CX0EJOasNyZJd2o8xur8Ce9C57Sznti/Ilt/cb6Qd8/k98A4ZOklXgTG+iAYYUs1OTG0s1eH+zQ== @@ -226,7 +233,7 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" -buffer-alloc-unsafe@^1.0.0, buffer-alloc-unsafe@^1.1.0: +buffer-alloc-unsafe@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== @@ -327,6 +334,11 @@ cliui@^5.0.0: strip-ansi "^5.2.0" wrap-ansi "^5.1.0" +clone@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" + integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= + codecs@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/codecs/-/codecs-2.0.0.tgz#680d1d4ac8ac3c8adbaa625c7ce06c6ee5792b50" @@ -487,6 +499,11 @@ define-properties@^1.1.2, define-properties@^1.1.3: dependencies: object-keys "^1.0.12" +defined@~0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/defined/-/defined-0.0.0.tgz#f35eea7d705e933baf13b2f03b3f83d921403b3e" + integrity sha1-817qfXBekzuvE7LwOz+D2SFAOz4= + defined@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" @@ -502,7 +519,7 @@ emoji-regex@^7.0.1: resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== -encoding-down@^6.3.0: +encoding-down@^6.2.0, encoding-down@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/encoding-down/-/encoding-down-6.3.0.tgz#b1c4eb0e1728c146ecaef8e32963c549e76d082b" integrity sha512-QKrV0iKR6MZVJV08QY0wp1e7vF6QbhnbQhb07bwpEyuz4uZiZgPlEGdkCROuFkUwdxlFaiPIhjyarH1ee/3vhw== @@ -583,6 +600,11 @@ fast-bitfield@^1.2.2: dependencies: count-trailing-zeros "^1.0.1" +fast-fifo@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fast-fifo/-/fast-fifo-1.0.0.tgz#9bc72e6860347bb045a876d1c5c0af11e9b984e7" + integrity sha512-4VEXmjxLj7sbs8J//cn2qhRap50dGzF5n8fjay8mau+Jn4hxSeR3xPFwxMaQq/pDaq7+KQk0PAbC2+nWDkJrmQ== + fd-lock@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/fd-lock/-/fd-lock-1.0.2.tgz#fb68e9f40830f96a098e090b79ab6ee9363ea89d" @@ -727,6 +749,14 @@ hasha@^3.0.0: dependencies: is-stream "^1.0.1" +hmac-blake2b@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/hmac-blake2b/-/hmac-blake2b-0.2.0.tgz#f8c71699dc834ce8066a512ba0592eda572bff4c" + integrity sha512-cJpnWOYMtaLr+3O32OII7DSTmQh+BKoeLXw49UAIc2QU68UwD2iBjItwxRVHmu1GBTuHeqME+rq7GpW2rBncCQ== + dependencies: + nanoassert "^1.1.0" + sodium-universal "^2.0.0" + hosted-git-info@^2.1.4: version "2.8.5" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.5.tgz#759cfcf2c4d156ade59b0b2dfabddc42a6b9c70c" @@ -742,25 +772,27 @@ hypercore-crypto@^1.0.0: sodium-universal "^2.0.0" uint64be "^2.0.2" -hypercore-protocol@^6.5.0, hypercore-protocol@^6.8.0: - version "6.12.0" - resolved "https://registry.yarnpkg.com/hypercore-protocol/-/hypercore-protocol-6.12.0.tgz#0fafa7c047a7e4c35b2d77639e2489f60d7b1a0d" - integrity sha512-T3oy9/7QFejqJX2RGcCUU1944e5/eKbLlSz9JPTNN1QbYFJgat/r7eTyOO8SMSLUimUmQx6YBMKhgYbdKzp7Bw== +hypercore-protocol@^7.1.0, hypercore-protocol@^7.6.0: + version "7.7.1" + resolved "https://registry.yarnpkg.com/hypercore-protocol/-/hypercore-protocol-7.7.1.tgz#6ed232aaa88cf77978a55537cf97ce94aaed294c" + integrity sha512-Yt8FgKtjDLFGaAjtzJNSyxnKpWJvhC+UGCE2UXPDcE4KlH/R21qaMpEeBl8ng2ztCyfpneKp+IruOdC165BjrQ== dependencies: - buffer-alloc-unsafe "^1.0.0" - buffer-from "^1.0.0" - inherits "^2.0.3" - protocol-buffers-encodings "^1.1.0" - readable-stream "^2.2.6" - sodium-universal "^2.0.0" - sorted-indexof "^1.0.0" - varint "^5.0.0" + abstract-extension "^3.0.1" + debug "^4.1.1" + hypercore-crypto "^1.0.0" + inspect-custom-symbol "^1.1.0" + nanoguard "^1.2.1" + pretty-hash "^1.0.1" + simple-hypercore-protocol "^1.4.0" + streamx "^2.1.0" + timeout-refresh "^1.0.0" -hypercore@^7.2.0: - version "7.7.1" - resolved "https://registry.yarnpkg.com/hypercore/-/hypercore-7.7.1.tgz#2a6b3fd84496b562fde07e5d39bfbab096a37fbc" - integrity sha512-boEiPCK848pNGACW1j111tJApu530e/UPpwbHytJZlrVf3YdgUIP1KL3aSi5xJFLUnuO8GLGl4lIsSeH8TaQQA== +hypercore@^8.3.0, hypercore@^8.4.1: + version "8.4.1" + resolved "https://registry.yarnpkg.com/hypercore/-/hypercore-8.4.1.tgz#9f22457af7b570e942c1bd6a1ea8d64c1d74e3b5" + integrity sha512-rsKnTIh+vklOByWZ+V9Rs5i0be7NwmxPQcUa8drRWEPQrRpl7i6WJJKwhDgNWk6M75d4ltt7Flryfp+76zMAzA== dependencies: + abstract-extension "^3.0.1" array-lru "^1.1.0" atomic-batcher "^1.0.2" bitfield-rle "^2.2.1" @@ -770,18 +802,19 @@ hypercore@^7.2.0: flat-tree "^1.6.0" from2 "^2.3.0" hypercore-crypto "^1.0.0" - hypercore-protocol "^6.5.0" + hypercore-protocol "^7.1.0" inherits "^2.0.3" inspect-custom-symbol "^1.1.0" last-one-wins "^1.0.4" memory-pager "^1.0.2" merkle-tree-stream "^3.0.3" nanoguard "^1.2.0" + nanoresource "^1.3.0" pretty-hash "^1.0.1" random-access-file "^2.1.0" sodium-universal "^2.0.0" sparse-bitfield "^3.0.0" - thunky "^1.0.1" + timeout-refresh "^1.0.1" uint64be "^2.0.1" unordered-array-remove "^1.0.2" unordered-set "^2.0.0" @@ -972,15 +1005,23 @@ json-parse-better-errors@^1.0.1: resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== -kappa-core@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/kappa-core/-/kappa-core-3.0.2.tgz#a4e593cddcaea42bc6496d41b10cd956a0e015d1" - integrity sha512-7SVMh++wGVZ0Zt4oUgCKhJyLqe4QCMvNrplMQYbvVHcpVMsiND9yw1p1VDUmJeQaHCCEQeOuxFmJP3xuiJDU7A== +"kappa-core@github:Frando/kappa-core#exp-0.1.3": + version "7.0.0-alpha1" + resolved "https://codeload.github.com/Frando/kappa-core/tar.gz/15777919bf820c7eaac6b76d1947cce93f922dac" dependencies: - hypercore "^7.2.0" - inherits "^2.0.3" - multifeed "^3.0.6" - multifeed-index "^3.2.2" + inherits "^2.0.4" + stream-collector "^1.0.1" + thunky "^1.1.0" + +"kappa-sparse-indexer@github:Frando/kappa-sparse-indexer#master": + version "0.4.0" + resolved "https://codeload.github.com/Frando/kappa-sparse-indexer/tar.gz/c49f3c8abbfe8a1c97c6001410e6949771343720" + dependencies: + debug "^4.1.1" + mutexify "^1.2.0" + pretty-hash "^1.0.1" + stream-collector "^1.0.1" + subleveldown "^4.1.4" last-one-wins@^1.0.4: version "1.0.4" @@ -1062,6 +1103,13 @@ level-live-stream@^1.4.12: pull-level "^2.0.3" pull-stream-to-stream "~1.2.4" +level-option-wrap@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/level-option-wrap/-/level-option-wrap-1.1.0.tgz#ad20e68d9f3c22c8897531cc6aa7af596b1ed129" + integrity sha1-rSDmjZ88IsiJdTHMaqevWWse0Sk= + dependencies: + defined "~0.0.0" + level-packager@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/level-packager/-/level-packager-5.1.0.tgz#9c01c6c8e2380d3196d61e56bd79c2eff4a9d5c3" @@ -1130,7 +1178,7 @@ levelup@^1.3.1: semver "~5.4.1" xtend "~4.0.0" -levelup@^4.3.2: +levelup@^4.3.1, levelup@^4.3.2: version "4.3.2" resolved "https://registry.yarnpkg.com/levelup/-/levelup-4.3.2.tgz#31c5b1b29f146d1d35d692e01a6da4d28fa55ebd" integrity sha512-cRTjU4ktWo59wf13PHEiOayHC3n0dOh4i5+FHr4tv4MX9+l7mqETicNq3Aj07HKlLdk0z5muVoDL2RD+ovgiyA== @@ -1309,23 +1357,18 @@ ms@^2.1.1: resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -multifeed-index@^3.2.2: - version "3.3.2" - resolved "https://registry.yarnpkg.com/multifeed-index/-/multifeed-index-3.3.2.tgz#cec16c618b761c801eab9afe92acb38d0f62daf1" - integrity sha512-UPEpvca/hx0UqanoMs9uMEpTrFbmW8CV9V4Oww44fQxLj/m6eAqhFyVMCanKjS4daL9K8hvfPtWj5V4KsKat+Q== - dependencies: - inherits "^2.0.3" - -multifeed@^3.0.6: - version "3.0.8" - resolved "https://registry.yarnpkg.com/multifeed/-/multifeed-3.0.8.tgz#31c623cbb79d0b72fd4d9dec01c8cd907ba82e74" - integrity sha512-OsNz8PFD3YUiPycZo093a/FnO88lc5xoFUgtie3loyfsR85cn6Ui4rvYTiJWpfhJk4AruditvoyX0Rx6Z513SQ== +multifeed@5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/multifeed/-/multifeed-5.1.1.tgz#d151c6fe641ec72a2f99b839f2155e4278f1baf6" + integrity sha512-a/AHR/MXUF24CvCNc/lHZSw2eBrxH6gn0l2Ou95sij+pyU/AFJT70+0o1cMRnbWvMxeoC4pIKqEKSHqxY00O3w== dependencies: debug "^4.1.0" - hypercore-protocol "^6.8.0" + hypercore "^8.3.0" + hypercore-protocol "^7.6.0" inherits "^2.0.3" mutexify "^1.2.0" random-access-file "^2.0.1" + random-access-memory "^3.1.1" through2 "^3.0.0" mutexify@^1.2.0: @@ -1338,16 +1381,28 @@ nan@^2.14.0: resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== -nanoassert@^1.0.0: +nanoassert@^1.0.0, nanoassert@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/nanoassert/-/nanoassert-1.1.0.tgz#4f3152e09540fde28c76f44b19bbcd1d5a42478d" integrity sha1-TzFS4JVA/eKMdvRLGbvNHVpCR40= -nanoguard@^1.2.0: +nanoassert@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/nanoassert/-/nanoassert-2.0.0.tgz#a05f86de6c7a51618038a620f88878ed1e490c09" + integrity sha512-7vO7n28+aYO4J+8w96AzhmU8G+Y/xpPDJz/se19ICsqj/momRbb9mh9ZUtkoJ5X3nTnPdhEJyc0qnM6yAsHBaA== + +nanoguard@^1.2.0, nanoguard@^1.2.1: version "1.2.2" resolved "https://registry.yarnpkg.com/nanoguard/-/nanoguard-1.2.2.tgz#a2069c563600a1853fd3ec31ed11e9681aa3dd80" integrity sha512-IMVIZkHP7Ep01foXurcJR59Hj/0yyApNK3JWpVHq2QVdLgo8wGU/ZsodlpL7jJ/m24+lxT0eyavrLCEuYQK2fg== +nanoresource@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/nanoresource/-/nanoresource-1.3.0.tgz#823945d9667ab3e81a8b2591ab8d734552878cd0" + integrity sha512-OI5dswqipmlYfyL3k/YMm7mbERlh4Bd1KuKdMHpeoVD1iVxqxaTMKleB4qaA2mbQZ6/zMNSxCXv9M9P/YbqTuQ== + dependencies: + inherits "^2.0.4" + napi-macros@^1.8.2: version "1.8.2" resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-1.8.2.tgz#299265c1d8aa401351ad0675107d751228c03eda" @@ -1383,6 +1438,16 @@ node-gyp-build@~4.1.0: resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.1.1.tgz#d7270b5d86717068d114cc57fff352f96d745feb" integrity sha512-dSq1xmcPDKPZ2EED2S6zw/b9NKsqzXRE6dVr8TVQnI3FJOTteUMuqF3Qqs6LZg+mLGYJWqQzMbIjMtJqTv87nQ== +noise-protocol@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/noise-protocol/-/noise-protocol-1.0.0.tgz#260e0447ddabc6f1d40f4c5829a64aba38c08768" + integrity sha512-MEseV3jGZGkPPlhJMHrjFHs9XCEcnoYg72hI89GMz/JfDjWEHzhTaTGqHM5gTGtLA9Z04XoGvEI5aCEAqplQrQ== + dependencies: + clone "^2.1.2" + hmac-blake2b "^0.2.0" + nanoassert "^1.1.0" + sodium-native "^2.2.1" + normalize-package-data@^2.3.2: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" @@ -1459,7 +1524,7 @@ object-keys@^1.0.12, object-keys@^1.1.1: resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== -once@^1.3.0: +once@^1.3.0, once@^1.3.1: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= @@ -1724,6 +1789,11 @@ re-emitter@1.1.3: resolved "https://registry.yarnpkg.com/re-emitter/-/re-emitter-1.1.3.tgz#fa9e319ffdeeeb35b27296ef0f3d374dac2f52a7" integrity sha1-+p4xn/3u6zWycpbvDz03TawvUqc= +reachdown@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/reachdown/-/reachdown-1.0.0.tgz#c63715190c9a0dd108bea3610bf9690ad0983edb" + integrity sha512-Ty7X/t52GwgRam3SMpZC2grmutuUarkiD4sVhjM8g8/5NlX8PAEsYO/pyx6nTTqS9udee1j1BxaAS/f6Rm8SMw== + read-pkg-up@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-4.0.0.tgz#1b221c6088ba7799601c808f91161c66e58f8978" @@ -1773,7 +1843,20 @@ readable-stream@^1.0.33: isarray "0.0.1" string_decoder "~0.10.x" -readable-stream@^2.0.0, readable-stream@^2.0.5, readable-stream@^2.1.4, readable-stream@^2.2.2, readable-stream@^2.2.6, readable-stream@~2.3.6: +readable-stream@^2.0.0, readable-stream@^2.0.5, readable-stream@^2.1.4: + version "2.3.7" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^2.2.2, readable-stream@~2.3.6: version "2.3.6" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== @@ -1905,6 +1988,32 @@ signed-varint@^2.0.1: dependencies: varint "~5.0.0" +simple-handshake@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/simple-handshake/-/simple-handshake-1.3.1.tgz#3f3d3ef103cb59e25f85c6b5952a631659e53411" + integrity sha512-3Q6FjXdVFCa5JiLsWFl9s/Wp9hfBI9OqGfnlA/fUqIgR8M6zykFMxgGmV7M3YFbBkXYXQYayj6D6aFDejQcPjA== + dependencies: + nanoassert "^1.1.0" + noise-protocol "^1.0.0" + +simple-hypercore-protocol@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/simple-hypercore-protocol/-/simple-hypercore-protocol-1.4.0.tgz#43957d7b1b0a045a346f1f566845d2a79d4e4d0c" + integrity sha512-an2jCnYD4e/saLb4vqD8FnIus2wP6wa+C+rI2uC8DBDFok7h41wz0DOOol7h+6+9VXu7Rb2cmkyXaqvkuJDFaA== + dependencies: + protocol-buffers-encodings "^1.1.0" + simple-handshake "^1.3.1" + simple-message-channels "^1.2.1" + sodium-universal "^2.0.0" + varint "^5.0.0" + +simple-message-channels@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/simple-message-channels/-/simple-message-channels-1.2.1.tgz#d827d3da0df1f862fd09b748457a01f9a0eb7ac3" + integrity sha512-knSr69GKW9sCjzpoy817xQelpOASUQ53TXCBcSLDKLE7GTGpUAhZzOZYrdbX2Ig//m+8AIrNp7sM7HDNHBRzXw== + dependencies: + varint "^5.0.0" + siphash24@^1.0.1: version "1.1.1" resolved "https://registry.yarnpkg.com/siphash24/-/siphash24-1.1.1.tgz#94ad021a2b2c62de381b546ee02df0cf778acd50" @@ -1922,7 +2031,7 @@ sodium-javascript@~0.5.0: siphash24 "^1.0.1" xsalsa20 "^1.0.0" -sodium-native@^2.0.0: +sodium-native@^2.0.0, sodium-native@^2.2.1: version "2.4.6" resolved "https://registry.yarnpkg.com/sodium-native/-/sodium-native-2.4.6.tgz#8a8173095e8cf4f997de393a2ba106c34870cac2" integrity sha512-Ro9lhTjot8M01nwKLXiqLSmjR7B8o+Wg4HmJUjEShw/q6XPlNMzjPkA1VJKaMH8SO8fJ/sggAKVwreTaFszS2Q== @@ -1940,11 +2049,6 @@ sodium-universal@^2.0.0: optionalDependencies: sodium-native "^2.0.0" -sorted-indexof@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/sorted-indexof/-/sorted-indexof-1.0.0.tgz#17c742ff7cf187e2f59a15df9b81f17a62ce0899" - integrity sha1-F8dC/3zxh+L1mhXfm4HxemLOCJk= - source-map@^0.5.0: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" @@ -2012,6 +2116,13 @@ sprintf-js@~1.0.2: resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= +stream-collector@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/stream-collector/-/stream-collector-1.0.1.tgz#4d4e55f171356121b2c5f6559f944705ab28db15" + integrity sha1-TU5V8XE1YSGyxfZVn5RHBaso2xU= + dependencies: + once "^1.3.1" + stream-to-pull-stream@^1.7.1, stream-to-pull-stream@^1.7.3: version "1.7.3" resolved "https://registry.yarnpkg.com/stream-to-pull-stream/-/stream-to-pull-stream-1.7.3.tgz#4161aa2d2eb9964de60bfa1af7feaf917e874ece" @@ -2020,6 +2131,14 @@ stream-to-pull-stream@^1.7.1, stream-to-pull-stream@^1.7.3: looper "^3.0.0" pull-stream "^3.2.3" +streamx@^2.1.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/streamx/-/streamx-2.5.0.tgz#4163d152f594cd386c4fd5ef116b8d8063f73db4" + integrity sha512-3HbwdI3Wnjj4dyi9Pk+rkEuQzfiUIUuCT9ayYa2Aohnxvod01TNwnC2e8KWlpjNqrlctNhgctogssukMebeFmg== + dependencies: + fast-fifo "^1.0.0" + nanoassert "^2.0.0" + string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" @@ -2099,6 +2218,18 @@ strip-bom@^3.0.0: resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= +subleveldown@^4.1.4: + version "4.1.4" + resolved "https://registry.yarnpkg.com/subleveldown/-/subleveldown-4.1.4.tgz#3579563e4de4b811008046ad33280679bc39dba4" + integrity sha512-njpSBP/Bxh7EahraG6IhR6goOH2ffMTMVt7Ud+k/OhNFHrrmuvK+XYfauI8KnjCm0w381cUF43pejlWeJMZChA== + dependencies: + abstract-leveldown "^6.1.1" + encoding-down "^6.2.0" + inherits "^2.0.3" + level-option-wrap "^1.1.0" + levelup "^4.3.1" + reachdown "^1.0.0" + supports-color@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" @@ -2198,11 +2329,16 @@ through@2, through@~2.3.4, through@~2.3.8: resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= -thunky@^1.0.1: +thunky@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== +timeout-refresh@^1.0.0, timeout-refresh@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/timeout-refresh/-/timeout-refresh-1.0.1.tgz#182b23c7cacd1420fa9c0eff2c6b31c3db7ce959" + integrity sha512-bW5oSShdwFCN9K7RpB5dkq5bqNlGt8Lwbfxr8vprysk8hDiK5yy7Mgf2Qlz2ssE0gfQfoYhk4VLY9Hhsnr9Ulw== + tmp@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.1.0.tgz#ee434a4e22543082e294ba6201dcc6eafefa2877"