forked from public/fvtt-cthulhu-eternal
Initial import with skill sheet working
This commit is contained in:
48
node_modules/abstract-level/CHANGELOG.md
generated
vendored
Normal file
48
node_modules/abstract-level/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
# Changelog
|
||||
|
||||
## [1.0.4] - 2024-01-20
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fix TypeScript definitions of `all()` and `nextv()` ([#67](https://github.com/Level/abstract-level/issues/67)) ([`8e85993`](https://github.com/Level/abstract-level/commit/8e85993), [`9f17757`](https://github.com/Level/abstract-level/commit/9f17757)) (Bryan)
|
||||
|
||||
## [1.0.3] - 2022-03-20
|
||||
|
||||
### Added
|
||||
|
||||
- Document error codes of `classic-level` and `many-level` ([#20](https://github.com/Level/abstract-level/issues/20)) ([`4b3464c`](https://github.com/Level/abstract-level/commit/4b3464c)) (Vincent Weevers)
|
||||
|
||||
### Fixed
|
||||
|
||||
- Add hidden `abortOnClose` option to iterators ([`2935180`](https://github.com/Level/abstract-level/commit/2935180)) (Vincent Weevers)
|
||||
- Make internal iterator decoding options enumerable ([`eb08363`](https://github.com/Level/abstract-level/commit/eb08363)) (Vincent Weevers)
|
||||
- Restore Sauce Labs browser tests ([`90b8816`](https://github.com/Level/abstract-level/commit/90b8816)) (Vincent Weevers)
|
||||
|
||||
## [1.0.2] - 2022-03-06
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fix TypeScript declaration of chained batch `write()` options ([`392b7f7`](https://github.com/Level/abstract-level/commit/392b7f7)) (Vincent Weevers)
|
||||
- Document the return type of `db.batch()` and add example ([`9739bba`](https://github.com/Level/abstract-level/commit/9739bba)) (Vincent Weevers)
|
||||
|
||||
## [1.0.1] - 2022-02-06
|
||||
|
||||
### Fixed
|
||||
|
||||
- Add `highWaterMarkBytes` option to tests where it matters ([`6b25a91`](https://github.com/Level/abstract-level/commit/6b25a91)) (Vincent Weevers)
|
||||
- Clarify the meaning of `db.status` ([`2e90b05`](https://github.com/Level/abstract-level/commit/2e90b05)) (Vincent Weevers)
|
||||
- Use `new` in README examples ([`379503e`](https://github.com/Level/abstract-level/commit/379503e)) (Vincent Weevers).
|
||||
|
||||
## [1.0.0] - 2022-01-30
|
||||
|
||||
_:seedling: Initial release. If you are upgrading from `abstract-leveldown` please see [`UPGRADING.md`](UPGRADING.md)_
|
||||
|
||||
[1.0.4]: https://github.com/Level/abstract-level/releases/tag/v1.0.4
|
||||
|
||||
[1.0.3]: https://github.com/Level/abstract-level/releases/tag/v1.0.3
|
||||
|
||||
[1.0.2]: https://github.com/Level/abstract-level/releases/tag/v1.0.2
|
||||
|
||||
[1.0.1]: https://github.com/Level/abstract-level/releases/tag/v1.0.1
|
||||
|
||||
[1.0.0]: https://github.com/Level/abstract-level/releases/tag/v1.0.0
|
21
node_modules/abstract-level/LICENSE
generated
vendored
Normal file
21
node_modules/abstract-level/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright © 2013 Rod Vagg and the contributors to abstract-level.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
1436
node_modules/abstract-level/README.md
generated
vendored
Normal file
1436
node_modules/abstract-level/README.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
479
node_modules/abstract-level/UPGRADING.md
generated
vendored
Normal file
479
node_modules/abstract-level/UPGRADING.md
generated
vendored
Normal file
@ -0,0 +1,479 @@
|
||||
# Upgrade Guide
|
||||
|
||||
This document describes breaking changes and how to upgrade. For a complete list of changes including minor and patch releases, please refer to the [changelog](CHANGELOG.md).
|
||||
|
||||
## Table of Contents
|
||||
|
||||
<details><summary>Click to expand</summary>
|
||||
|
||||
- [1.0.0](#100)
|
||||
- [1. API parity with `levelup`](#1-api-parity-with-levelup)
|
||||
- [1.1. New: promises](#11-new-promises)
|
||||
- [1.2. New: events](#12-new-events)
|
||||
- [1.3. New: idempotent open](#13-new-idempotent-open)
|
||||
- [1.4. New: deferred open](#14-new-deferred-open)
|
||||
- [1.5. No constructor callback](#15-no-constructor-callback)
|
||||
- [1.6. New: state checks](#16-new-state-checks)
|
||||
- [1.7. New: chained batch length](#17-new-chained-batch-length)
|
||||
- [2. API parity with `level`](#2-api-parity-with-level)
|
||||
- [2.1. For consumers](#21-for-consumers)
|
||||
- [2.2. For implementors](#22-for-implementors)
|
||||
- [2.3. Other notable changes](#23-other-notable-changes)
|
||||
- [3. Streams have moved](#3-streams-have-moved)
|
||||
- [4. Zero-length keys and range options are now valid](#4-zero-length-keys-and-range-options-are-now-valid)
|
||||
- [5. Resources are auto-closed](#5-resources-are-auto-closed)
|
||||
- [5.1. Closing iterators is idempotent](#51-closing-iterators-is-idempotent)
|
||||
- [5.2. Chained batch can be closed](#52-chained-batch-can-be-closed)
|
||||
- [6. Errors now use codes](#6-errors-now-use-codes)
|
||||
- [7. Semi-private properties have been removed](#7-semi-private-properties-have-been-removed)
|
||||
- [8. Changes to test suite](#8-changes-to-test-suite)
|
||||
- [9. Sublevels are builtin](#9-sublevels-are-builtin)
|
||||
|
||||
</details>
|
||||
|
||||
## 1.0.0
|
||||
|
||||
**Introducing `abstract-level`: a fork of [`abstract-leveldown`](https://github.com/Level/abstract-leveldown) that removes the need for [`levelup`](https://github.com/Level/levelup), [`encoding-down`](https://github.com/Level/encoding-down) and more. An `abstract-level` database is a complete solution that doesn't need to be wrapped. It has the same API as `level(up)` including encodings, promises and events. In addition, implementations can now choose to use Uint8Array instead of Buffer. Consumers of an implementation can use both. Sublevels are builtin.**
|
||||
|
||||
We've put together several upgrade guides for different modules. See the [FAQ](https://github.com/Level/community#faq) to find the best upgrade guide for you. This upgrade guide describes how to replace `abstract-leveldown` with `abstract-level`. Implementations that do so, can no longer be wrapped with `levelup`.
|
||||
|
||||
The npm package name is `abstract-level` and the main export is called `AbstractLevel` rather than `AbstractLevelDOWN`. It started using classes. Support of Node.js 10 has been dropped.
|
||||
|
||||
For most folks, a database that upgraded from `abstract-leveldown` to `abstract-level` can be a drop-in replacement for a `level(up)` database (with the exception of stream methods). Let's start this guide there: all methods have been enhanced to reach API parity with `levelup` and `level`.
|
||||
|
||||
### 1. API parity with `levelup`
|
||||
|
||||
#### 1.1. New: promises
|
||||
|
||||
Methods that take a callback now also support promises. They return a promise if no callback is provided, the same as `levelup`. Implementations that override public (non-underscored) methods _must_ do the same and any implementation _should_ do the same for additional methods if any.
|
||||
|
||||
#### 1.2. New: events
|
||||
|
||||
An `abstract-level` database emits the same events as `levelup` would.
|
||||
|
||||
#### 1.3. New: idempotent open
|
||||
|
||||
Opening and closing a database is idempotent and safe, similar to `levelup` but more precise. If `open()` and `close()` are called repeatedly, the last call dictates the final status. Callbacks are not called (or promises not resolved) until any pending state changes are done. Same for events. Unlike on `levelup` it is safe to call `open()` while status is `'closing'`: the database will wait for closing to complete and then reopen. None of these changes are likely to constitute a breaking change; they increase state consistency in edge cases.
|
||||
|
||||
The `open()` method has a new option called `passive`. If set to `true` the call will wait for, but not initiate, opening of the database. To similar effect as `db.once('open', callback)` with added benefit that it also works if the database is already open. Implementations that wrap another database can use the `passive` option to open themselves without taking full control of the database that they wrap.
|
||||
|
||||
#### 1.4. New: deferred open
|
||||
|
||||
Deferred open is built-in. This means a database opens itself a tick after its constructor returns (unless `open()` was called manually). Any operations made until opening has completed are queued up in memory. When opening completes the operations are replayed. If opening failed (and this is a new behavior compared to `levelup`) the operations will yield errors. The `AbstractLevel` class has a new `defer()` method for an implementation to defer custom operations.
|
||||
|
||||
The initial `status` of a database is `'opening'` rather than `'new'`, which no longer exists. Wrapping a database with [`deferred-leveldown`](https://github.com/Level/deferred-leveldown) is not supported and will exhibit undefined behavior.
|
||||
|
||||
Implementations must also accept options for `open()` in their constructor, which was previously done by `levelup`. For example, usage of the [`classic-level`](https://github.com/Level/classic-level) implementation is as follows:
|
||||
|
||||
```js
|
||||
const db = new ClassicLevel('./db', {
|
||||
createIfMissing: false,
|
||||
compression: false
|
||||
})
|
||||
```
|
||||
|
||||
This works by first forwarding options to the `AbstractLevel` constructor, which in turn forwards them to `open(options)`. If `open(options)` is called manually those options will be shallowly merged with options from the constructor:
|
||||
|
||||
```js
|
||||
// Results in { createIfMissing: false, compression: true }
|
||||
await db.open({ compression: true })
|
||||
```
|
||||
|
||||
A database is not "patch-safe". If some form of plugin monkey-patches a database like in the following example, it must now also take the responsibility of deferring the operation (as well as handling promises and callbacks) using `db.defer()`. I.e. this example is incomplete:
|
||||
|
||||
```js
|
||||
function plugin (db) {
|
||||
const original = db.get
|
||||
|
||||
db.get = function (...args) {
|
||||
original.call(this, ...args)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### 1.5. No constructor callback
|
||||
|
||||
The database constructor does not take a callback argument, unlike `levelup`. This goes for `abstract-level` as well as implementations - which is to say, implementors don't have to (and should not) support this old pattern.
|
||||
|
||||
Instead call `db.open()` if you wish to wait for opening (which is not necessary to use the database) or to capture an error. If that's your reason for using the callback and you previously initialized a database like so (simplified):
|
||||
|
||||
```js
|
||||
levelup(function (err, db) {
|
||||
// ..
|
||||
})
|
||||
```
|
||||
|
||||
You must now do:
|
||||
|
||||
```js
|
||||
db.open(function (err) {
|
||||
// ..
|
||||
})
|
||||
```
|
||||
|
||||
Or using promises:
|
||||
|
||||
```js
|
||||
await db.open()
|
||||
```
|
||||
|
||||
#### 1.6. New: state checks
|
||||
|
||||
On any operation, an `abstract-level` database checks if it's open. If not, it will either throw an error (if the relevant API is synchronous) or asynchronously yield an error. For example:
|
||||
|
||||
```js
|
||||
await db.close()
|
||||
|
||||
try {
|
||||
db.iterator()
|
||||
} catch (err) {
|
||||
console.log(err.code) // LEVEL_DATABASE_NOT_OPEN
|
||||
}
|
||||
```
|
||||
|
||||
_Errors now have a `code` property. More on that below\._
|
||||
|
||||
This may be a breaking change downstream because it changes error messages for implementations that had their own safety checks (which will now be ineffective because `abstract-level` checks are performed first) or implicitly relied on `levelup` checks. By safety we mean mainly that yielding a JavaScript error is preferred over segmentation faults, though non-native implementations also benefit from detecting incorrect usage.
|
||||
|
||||
Implementations that have additional methods should add or align their own safety checks for consistency. Like so:
|
||||
|
||||
<details>
|
||||
<summary>Click to expand</summary>
|
||||
|
||||
```js
|
||||
const ModuleError = require('module-error')
|
||||
|
||||
class ExampleLevel extends AbstractLevel {
|
||||
// For brevity this example does not implement promises or encodings
|
||||
approximateSize (start, end, callback) {
|
||||
if (this.status === 'opening') {
|
||||
this.defer(() => this.approximateSize(start, end, callback))
|
||||
} else if (this.status !== 'open') {
|
||||
this.nextTick(callback, new ModuleError('Database is not open', {
|
||||
code: 'LEVEL_DATABASE_NOT_OPEN'
|
||||
}))
|
||||
} else {
|
||||
// ..
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
#### 1.7. New: chained batch length
|
||||
|
||||
The `AbstractChainedBatch` prototype has a new `length` property that, like a chained batch in `levelup`, returns the number of queued operations in the batch. Implementations should not have to make changes for this unless they monkey-patched public methods of `AbstractChainedBatch`.
|
||||
|
||||
### 2. API parity with `level`
|
||||
|
||||
It was previously necessary to use [`level`](https://github.com/Level/level) to get the "full experience". Or similar modules like [`level-mem`](https://github.com/Level/mem), [`level-rocksdb`](https://github.com/Level/level-rocksdb) and more. These modules combined an `abstract-leveldown` implementation with [`encoding-down`](https://github.com/Level/encoding-down) and [`levelup`](https://github.com/Level/levelup). Encodings are now built-in to `abstract-level`, using [`level-transcoder`](https://github.com/Level/transcoder) rather than [`level-codec`](https://github.com/Level/codec). The main change is that logic from the existing public API has been expanded down into the storage layer.
|
||||
|
||||
The `level` module still has a place, for its support of both Node.js and browsers and for being the main entrypoint into the Level ecosystem. The next major version of `level`, that's v8.0.0, will likely simply export [`classic-level`](https://github.com/Level/classic-level) in Node.js and [`browser-level`](https://github.com/Level/browser-level) in browsers. To differentiate, the text below will refer to the old version as `level@7`.
|
||||
|
||||
#### 2.1. For consumers
|
||||
|
||||
All relevant methods including the database constructor now accept `keyEncoding` and `valueEncoding` options, the same as `level@7`. Read operations now yield strings rather than buffers by default, having the same default `'utf8'` encoding as `level@7` and friends.
|
||||
|
||||
There are a few differences from `level@7` and `encoding-down`. Some breaking:
|
||||
|
||||
- The lesser-used `'ascii'`, `'ucs2'` and `'utf16le'` encodings are not supported
|
||||
- The `'id'` encoding, which was not supported by any active `abstract-leveldown` implementation and aliased as `'none'`, has been removed
|
||||
- The undocumented `encoding` option (as an alias for `valueEncoding`) is not supported.
|
||||
|
||||
And some non-breaking:
|
||||
|
||||
- The `'binary'` encoding has been renamed to `'buffer'`, with `'binary'` as an alias
|
||||
- The `'utf8'` encoding previously did not touch Buffers. Now it will call `buffer.toString('utf8')` for consistency. Consumers can use the `'buffer'` encoding to avoid this conversion.
|
||||
|
||||
If you previously did one of the following (on a database that's defaulting to the `'utf8'` encoding):
|
||||
|
||||
```js
|
||||
await db.put('a', Buffer.from('x'))
|
||||
await db.put('a', Buffer.from('x'), { valueEncoding: 'binary' })
|
||||
```
|
||||
|
||||
Both examples will still work (assuming the buffer contains only UTF8 data) but you should now do:
|
||||
|
||||
```js
|
||||
await db.put('a', Buffer.from('x'), { valueEncoding: 'buffer' })
|
||||
```
|
||||
|
||||
Or use the new `'view'` encoding which accepts Uint8Arrays (and therefore also Buffer):
|
||||
|
||||
```js
|
||||
await db.put('a', new Uint8Array(...), { valueEncoding: 'view' })
|
||||
```
|
||||
|
||||
#### 2.2. For implementors
|
||||
|
||||
_You can skip this section if you're consuming (rather than writing) an `abstract-level` implementation._
|
||||
|
||||
Both the public and private API of `abstract-level` are encoding-aware. This means that private methods receive `keyEncoding` and `valueEncoding` options too, instead of the `keyAsBuffer`, `valueAsBuffer` and `asBuffer` options that `abstract-leveldown` had. Implementations don't need to perform encoding or decoding themselves. In fact they can do less: the `_serializeKey()` and `_serializeValue()` methods are also gone and implementations are less likely to have to convert between strings and buffers.
|
||||
|
||||
For example: a call like `db.put(key, { x: 2 }, { valueEncoding: 'json' })` will encode the `{ x: 2 }` value and might forward it to the private API as `db._put(key, '{"x":2}', { valueEncoding: 'utf8' }, callback)`. Same for the key, omitted for brevity. We say "might" because it depends on the implementation, which can now declare which encodings it supports.
|
||||
|
||||
To first give a concrete example for `get()`, if your implementation previously did:
|
||||
|
||||
```js
|
||||
class ExampleLeveldown extends AbstractLevelDOWN {
|
||||
_get (key, options, callback) {
|
||||
if (options.asBuffer) {
|
||||
this.nextTick(callback, null, Buffer.from('abc'))
|
||||
} else {
|
||||
this.nextTick(callback, null, 'abc')
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You must now do (if still relevant):
|
||||
|
||||
```js
|
||||
class ExampleLevel extends AbstractLevel {
|
||||
_get (key, options, callback) {
|
||||
if (options.valueEncoding === 'buffer') {
|
||||
this.nextTick(callback, null, Buffer.from('abc'))
|
||||
} else {
|
||||
this.nextTick(callback, null, 'abc')
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The encoding options and data received by the private API depend on which encodings it supports. It must declare those via the manifest passed to the `AbstractLevel` constructor. See the [`README`](README.md) for details. For example, an implementation might only support storing data as Uint8Arrays, known here as the `'view'` encoding:
|
||||
|
||||
```js
|
||||
class ExampleLevel extends AbstractLevel {
|
||||
constructor (location, options) {
|
||||
super({ encodings: { view: true } }, options)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The earlier `put()` example would then result in `db._put(key, value, { valueEncoding: 'view' }, callback)` where `value` is a Uint8Array containing JSON in binary form. And the earlier `_get()` example can be simplified to:
|
||||
|
||||
```js
|
||||
class ExampleLevel extends AbstractLevel {
|
||||
_get (key, options, callback) {
|
||||
// No need to check valueEncoding as it's always 'view'
|
||||
this.nextTick(callback, null, new Uint8Array(...))
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Implementations can also declare support of multiple encodings; keys and values will then be encoded via the most optimal path. For example:
|
||||
|
||||
```js
|
||||
super({
|
||||
encodings: {
|
||||
view: true,
|
||||
utf8: true
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
#### 2.3. Other notable changes
|
||||
|
||||
- The `AbstractIterator` constructor now requires an `options` argument, for encoding options
|
||||
- The `AbstractIterator#_seek()` method got a new `options` argument, for a `keyEncoding` option
|
||||
- The `db.supports.bufferKeys` property has been removed. Use `db.supports.encodings.buffer` instead.
|
||||
|
||||
### 3. Streams have moved
|
||||
|
||||
Node.js readable streams must now be created with a new standalone module called [`level-read-stream`](https://github.com/Level/read-stream), rather than database methods like `db.createReadStream()`. Please see its [upgrade guide](https://github.com/Level/read-stream/blob/main/UPGRADING.md#100) for details.
|
||||
|
||||
To offer an alternative to `db.createKeyStream()` and `db.createValueStream()`, two new types of iterators have been added: `db.keys()` and `db.values()`. Their default implementations are functional but implementors may want to override them for optimal performance. The same goes for two new methods on iterators: `nextv()` and `all()`. To achieve this and honor the `limit` option, abstract iterators now count how many items they yielded, which may remove the need for implementations to do so on their own. Please see the README for details.
|
||||
|
||||
### 4. Zero-length keys and range options are now valid
|
||||
|
||||
These keys sort before anything else. Historically they weren't supported for causing segmentation faults in `leveldown`. That doesn't apply to today's codebase. Implementations must now support:
|
||||
|
||||
```js
|
||||
await db.put('', 'example')
|
||||
|
||||
console.log(await db.get('')) // 'example'
|
||||
|
||||
for await (const [key, value] of db.iterator({ lte: '' })) {
|
||||
console.log(value) // 'example'
|
||||
}
|
||||
```
|
||||
|
||||
Same goes for zero-length Buffer and Uint8Array keys. Zero-length keys would previously result in an error and never reach the private API.
|
||||
|
||||
### 5. Resources are auto-closed
|
||||
|
||||
To further improve safety and consistency, additional changes were made that make an `abstract-level` database safer to use than `abstract-leveldown` wrapped with `levelup`.
|
||||
|
||||
#### 5.1. Closing iterators is idempotent
|
||||
|
||||
The `iterator.end()` method has been renamed to `iterator.close()`, with `end()` being an alias until a next major version in the future. The term "close" makes it easier to differentiate between the iterator having reached its natural end (data-wise) versus closing it to cleanup resources. If you previously did:
|
||||
|
||||
```js
|
||||
const iterator = db.iterator()
|
||||
iterator.end(callback)
|
||||
```
|
||||
|
||||
You should now do one of:
|
||||
|
||||
```js
|
||||
iterator.close(callback)
|
||||
await iterator.close()
|
||||
```
|
||||
|
||||
Likewise, in the private API for implementors, `_end()` has been renamed to `_close()` but without an alias. This method is no longer allowed to yield an error.
|
||||
|
||||
On `db.close()`, non-closed iterators are now automatically closed. This may be a breaking change but only if an implementation has (at its own risk) overridden the public `end()` method, because `close()` or `end()` is now an idempotent operation rather than yielding an `end() already called on iterator` error. If a `next()` call is in progress, closing the iterator (or database) will wait for that.
|
||||
|
||||
The error message `cannot call next() after end()` has been replaced with code `LEVEL_ITERATOR_NOT_OPEN`, the error `cannot call seek() after end()` has been removed in favor of a silent return, and `cannot call next() before previous next() has completed` and `cannot call seek() before next() has completed` have been replaced with code `LEVEL_ITERATOR_BUSY`.
|
||||
|
||||
The `next()` method no longer returns `this` (when a callback is provided).
|
||||
|
||||
#### 5.2. Chained batch can be closed
|
||||
|
||||
Chained batch has a new method `close()` which is an idempotent operation and automatically called after `write()` (for backwards compatibility) or on `db.close()`. This to ensure batches can't be used after closing and reopening a db. If a `write()` is in progress, closing will wait for that. If `write()` is never called then `close()` must be. For example:
|
||||
|
||||
```js
|
||||
const batch = db.batch()
|
||||
.put('abc', 'zyz')
|
||||
.del('foo')
|
||||
|
||||
if (someCondition) {
|
||||
await batch.write()
|
||||
} else {
|
||||
// Decided not to commit
|
||||
await batch.close()
|
||||
}
|
||||
|
||||
// In either case this will throw
|
||||
batch.put('more', 'data')
|
||||
```
|
||||
|
||||
These changes could be breaking for an implementation that has (at its own risk) overridden the public `write()` method. In addition, the error message `write() already called on this batch` has been replaced with code `LEVEL_BATCH_NOT_OPEN`.
|
||||
|
||||
An implementation can optionally override `AbstractChainedBatch#_close()` if it has resources to free and wishes to free them earlier than GC would.
|
||||
|
||||
### 6. Errors now use codes
|
||||
|
||||
The [`level-errors`](https://github.com/Level/errors) module as used by `levelup` and friends, is not used or exposed by `abstract-level`. Instead errors thrown or yielded from a database have a `code` property. See the [`README`](./README.md#errors) for details. Going forward, the semver contract will be on `code` and error messages will change without a semver-major bump.
|
||||
|
||||
To minimize breakage, the most used error as yielded by `get()` when an entry is not found, has the same properties that `level-errors` added (`notFound` and `status`) in addition to code `LEVEL_NOT_FOUND`. Those properties will be removed in a future version. Implementations can still yield an error that matches `/NotFound/i.test(err)` or they can start using the code. Either way `abstract-level` will normalize the error.
|
||||
|
||||
If you previously did:
|
||||
|
||||
```js
|
||||
db.get('abc', function (err, value) {
|
||||
if (err && err.notFound) {
|
||||
// Handle missing entry
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
That will still work but it's preferred to do:
|
||||
|
||||
```js
|
||||
db.get('abc', function (err, value) {
|
||||
if (err && err.code === 'LEVEL_NOT_FOUND') {
|
||||
// Handle missing entry
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
Or using promises:
|
||||
|
||||
```js
|
||||
try {
|
||||
const value = await db.get('abc')
|
||||
} catch (err) {
|
||||
if (err.code === 'LEVEL_NOT_FOUND') {
|
||||
// Handle missing entry
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 7. Semi-private properties have been removed
|
||||
|
||||
The following properties and methods can no longer be accessed, as they've been removed or replaced with internal [symbols](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol):
|
||||
|
||||
- `AbstractIterator#_nexting`
|
||||
- `AbstractIterator#_ended`
|
||||
- `AbstractChainedBatch#_written`
|
||||
- `AbstractChainedBatch#_checkWritten()`
|
||||
- `AbstractChainedBatch#_operations`
|
||||
- `AbstractLevel#_setupIteratorOptions()`
|
||||
|
||||
### 8. Changes to test suite
|
||||
|
||||
_You can skip this section if you're consuming (rather than writing) an `abstract-level` implementation._
|
||||
|
||||
The abstract test suite of `abstract-level` has some breaking changes compared to `abstract-leveldown`:
|
||||
|
||||
- Options to skip tests have been removed in favor of `db.supports`
|
||||
- Support of `db.clear()` and `db.getMany()` is now mandatory. The default (slow) implementation of `_clear()` has been removed.
|
||||
- Added tests that `gte` and `lte` range options take precedence over `gt` and `lt` respectively. This is incompatible with [`ltgt`](https://github.com/dominictarr/ltgt) but aligns with `subleveldown`, [`level-option-wrap`](https://github.com/substack/level-option-wrap) and half of `leveldown`. There was no good choice.
|
||||
- The `setUp` and `tearDown` functions have been removed from the test suite and `suite.common()`.
|
||||
- Added ability to access manifests via `testCommon.supports`, by lazily copying it from `testCommon.factory().supports`. This requires that the manifest does not change during the lifetime of a `db`.
|
||||
- Your `factory()` function must now accept an `options` argument.
|
||||
|
||||
Many tests were imported from `levelup`, `encoding-down`, `deferred-leveldown`, `memdown`, `level-js` and `leveldown`. They test the changes described above and improve coverage of existing behavior.
|
||||
|
||||
Lastly, it's recommended to revisit any custom tests of an implementation. In particular if those tests relied upon the previously loose state checking of `abstract-leveldown`. For example, making a `db.put()` call before `db.open()`. Such a test now has a different meaning. The previous meaning can typically be restored by inserting `db.once('open', ...)` or `await db.open()` logic.
|
||||
|
||||
### 9. Sublevels are builtin
|
||||
|
||||
_This section is only relevant if you use [`subleveldown`](https://github.com/Level/subleveldown) (which can not wrap an `abstract-level` database)._
|
||||
|
||||
Sublevels are now builtin. If you previously did:
|
||||
|
||||
```js
|
||||
const sub = require('subleveldown')
|
||||
const example1 = sub(db, 'example1')
|
||||
const example2 = sub(db, 'example2', { valueEncoding: 'json' })
|
||||
```
|
||||
|
||||
You must now do:
|
||||
|
||||
```js
|
||||
const example1 = db.sublevel('example1')
|
||||
const example2 = db.sublevel('example2', { valueEncoding: 'json' })
|
||||
```
|
||||
|
||||
The key structure is equal to that of `subleveldown`. This means that an `abstract-level` sublevel can read sublevels previously created with (and populated by) `subleveldown`. There are some new features:
|
||||
|
||||
- `db.batch(..)` takes a `sublevel` option on operations, to atomically commit data to multiple sublevels
|
||||
- Sublevels support Uint8Array in addition to Buffer
|
||||
- `AbstractLevel#_sublevel()` can be overridden to add additional methods to sublevels.
|
||||
|
||||
To reduce function overloads, the prefix argument (`example1` above) is now required and it's called `name` here. If you previously did one of the following, resulting in an empty name:
|
||||
|
||||
```js
|
||||
subleveldown(db)
|
||||
subleveldown(db, { separator: '@' })
|
||||
```
|
||||
|
||||
You must now use an explicit empty name:
|
||||
|
||||
```js
|
||||
db.sublevel('')
|
||||
db.sublevel('', { separator: '@' })
|
||||
```
|
||||
|
||||
The string shorthand for `{ separator }` has also been removed. If you previously did:
|
||||
|
||||
```js
|
||||
subleveldown(db, 'example', '@')
|
||||
```
|
||||
|
||||
You must now do:
|
||||
|
||||
```js
|
||||
db.sublevel('example', { separator: '@' })
|
||||
```
|
||||
|
||||
Third, the `open` option has been removed. If you need an asynchronous open hook, feel free to open an issue to discuss restoring this API. Should it support promises? Should `abstract-level` support it on any database and not just sublevels?
|
||||
|
||||
Lastly, the error message `Parent database is not open` (courtesy of `subleveldown` which had to check open state to prevent segmentation faults from underlying databases) changed to error code [`LEVEL_DATABASE_NOT_OPEN`](https://github.com/Level/abstract-level#errors) (courtesy of `abstract-level` which does those checks on any database).
|
||||
|
||||
---
|
||||
|
||||
_For earlier releases, before `abstract-level` was forked from `abstract-leveldown` (v7.2.0), please see [the upgrade guide of `abstract-leveldown`](https://github.com/Level/abstract-leveldown/blob/master/UPGRADING.md)._
|
181
node_modules/abstract-level/abstract-chained-batch.js
generated
vendored
Normal file
181
node_modules/abstract-level/abstract-chained-batch.js
generated
vendored
Normal file
@ -0,0 +1,181 @@
|
||||
'use strict'
|
||||
|
||||
const { fromCallback } = require('catering')
|
||||
const ModuleError = require('module-error')
|
||||
const { getCallback, getOptions } = require('./lib/common')
|
||||
|
||||
const kPromise = Symbol('promise')
|
||||
const kStatus = Symbol('status')
|
||||
const kOperations = Symbol('operations')
|
||||
const kFinishClose = Symbol('finishClose')
|
||||
const kCloseCallbacks = Symbol('closeCallbacks')
|
||||
|
||||
class AbstractChainedBatch {
|
||||
constructor (db) {
|
||||
if (typeof db !== 'object' || db === null) {
|
||||
const hint = db === null ? 'null' : typeof db
|
||||
throw new TypeError(`The first argument must be an abstract-level database, received ${hint}`)
|
||||
}
|
||||
|
||||
this[kOperations] = []
|
||||
this[kCloseCallbacks] = []
|
||||
this[kStatus] = 'open'
|
||||
this[kFinishClose] = this[kFinishClose].bind(this)
|
||||
|
||||
this.db = db
|
||||
this.db.attachResource(this)
|
||||
this.nextTick = db.nextTick
|
||||
}
|
||||
|
||||
get length () {
|
||||
return this[kOperations].length
|
||||
}
|
||||
|
||||
put (key, value, options) {
|
||||
if (this[kStatus] !== 'open') {
|
||||
throw new ModuleError('Batch is not open: cannot call put() after write() or close()', {
|
||||
code: 'LEVEL_BATCH_NOT_OPEN'
|
||||
})
|
||||
}
|
||||
|
||||
const err = this.db._checkKey(key) || this.db._checkValue(value)
|
||||
if (err) throw err
|
||||
|
||||
const db = options && options.sublevel != null ? options.sublevel : this.db
|
||||
const original = options
|
||||
const keyEncoding = db.keyEncoding(options && options.keyEncoding)
|
||||
const valueEncoding = db.valueEncoding(options && options.valueEncoding)
|
||||
const keyFormat = keyEncoding.format
|
||||
|
||||
// Forward encoding options
|
||||
options = { ...options, keyEncoding: keyFormat, valueEncoding: valueEncoding.format }
|
||||
|
||||
// Prevent double prefixing
|
||||
if (db !== this.db) {
|
||||
options.sublevel = null
|
||||
}
|
||||
|
||||
const mappedKey = db.prefixKey(keyEncoding.encode(key), keyFormat)
|
||||
const mappedValue = valueEncoding.encode(value)
|
||||
|
||||
this._put(mappedKey, mappedValue, options)
|
||||
this[kOperations].push({ ...original, type: 'put', key, value })
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
_put (key, value, options) {}
|
||||
|
||||
del (key, options) {
|
||||
if (this[kStatus] !== 'open') {
|
||||
throw new ModuleError('Batch is not open: cannot call del() after write() or close()', {
|
||||
code: 'LEVEL_BATCH_NOT_OPEN'
|
||||
})
|
||||
}
|
||||
|
||||
const err = this.db._checkKey(key)
|
||||
if (err) throw err
|
||||
|
||||
const db = options && options.sublevel != null ? options.sublevel : this.db
|
||||
const original = options
|
||||
const keyEncoding = db.keyEncoding(options && options.keyEncoding)
|
||||
const keyFormat = keyEncoding.format
|
||||
|
||||
// Forward encoding options
|
||||
options = { ...options, keyEncoding: keyFormat }
|
||||
|
||||
// Prevent double prefixing
|
||||
if (db !== this.db) {
|
||||
options.sublevel = null
|
||||
}
|
||||
|
||||
this._del(db.prefixKey(keyEncoding.encode(key), keyFormat), options)
|
||||
this[kOperations].push({ ...original, type: 'del', key })
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
_del (key, options) {}
|
||||
|
||||
clear () {
|
||||
if (this[kStatus] !== 'open') {
|
||||
throw new ModuleError('Batch is not open: cannot call clear() after write() or close()', {
|
||||
code: 'LEVEL_BATCH_NOT_OPEN'
|
||||
})
|
||||
}
|
||||
|
||||
this._clear()
|
||||
this[kOperations] = []
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
_clear () {}
|
||||
|
||||
write (options, callback) {
|
||||
callback = getCallback(options, callback)
|
||||
callback = fromCallback(callback, kPromise)
|
||||
options = getOptions(options)
|
||||
|
||||
if (this[kStatus] !== 'open') {
|
||||
this.nextTick(callback, new ModuleError('Batch is not open: cannot call write() after write() or close()', {
|
||||
code: 'LEVEL_BATCH_NOT_OPEN'
|
||||
}))
|
||||
} else if (this.length === 0) {
|
||||
this.close(callback)
|
||||
} else {
|
||||
this[kStatus] = 'writing'
|
||||
this._write(options, (err) => {
|
||||
this[kStatus] = 'closing'
|
||||
this[kCloseCallbacks].push(() => callback(err))
|
||||
|
||||
// Emit after setting 'closing' status, because event may trigger a
|
||||
// db close which in turn triggers (idempotently) closing this batch.
|
||||
if (!err) this.db.emit('batch', this[kOperations])
|
||||
|
||||
this._close(this[kFinishClose])
|
||||
})
|
||||
}
|
||||
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
_write (options, callback) {}
|
||||
|
||||
close (callback) {
|
||||
callback = fromCallback(callback, kPromise)
|
||||
|
||||
if (this[kStatus] === 'closing') {
|
||||
this[kCloseCallbacks].push(callback)
|
||||
} else if (this[kStatus] === 'closed') {
|
||||
this.nextTick(callback)
|
||||
} else {
|
||||
this[kCloseCallbacks].push(callback)
|
||||
|
||||
if (this[kStatus] !== 'writing') {
|
||||
this[kStatus] = 'closing'
|
||||
this._close(this[kFinishClose])
|
||||
}
|
||||
}
|
||||
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
_close (callback) {
|
||||
this.nextTick(callback)
|
||||
}
|
||||
|
||||
[kFinishClose] () {
|
||||
this[kStatus] = 'closed'
|
||||
this.db.detachResource(this)
|
||||
|
||||
const callbacks = this[kCloseCallbacks]
|
||||
this[kCloseCallbacks] = []
|
||||
|
||||
for (const cb of callbacks) {
|
||||
cb()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.AbstractChainedBatch = AbstractChainedBatch
|
490
node_modules/abstract-level/abstract-iterator.js
generated
vendored
Normal file
490
node_modules/abstract-level/abstract-iterator.js
generated
vendored
Normal file
@ -0,0 +1,490 @@
|
||||
'use strict'
|
||||
|
||||
const { fromCallback } = require('catering')
|
||||
const ModuleError = require('module-error')
|
||||
const { getOptions, getCallback } = require('./lib/common')
|
||||
|
||||
const kPromise = Symbol('promise')
|
||||
const kCallback = Symbol('callback')
|
||||
const kWorking = Symbol('working')
|
||||
const kHandleOne = Symbol('handleOne')
|
||||
const kHandleMany = Symbol('handleMany')
|
||||
const kAutoClose = Symbol('autoClose')
|
||||
const kFinishWork = Symbol('finishWork')
|
||||
const kReturnMany = Symbol('returnMany')
|
||||
const kClosing = Symbol('closing')
|
||||
const kHandleClose = Symbol('handleClose')
|
||||
const kClosed = Symbol('closed')
|
||||
const kCloseCallbacks = Symbol('closeCallbacks')
|
||||
const kKeyEncoding = Symbol('keyEncoding')
|
||||
const kValueEncoding = Symbol('valueEncoding')
|
||||
const kAbortOnClose = Symbol('abortOnClose')
|
||||
const kLegacy = Symbol('legacy')
|
||||
const kKeys = Symbol('keys')
|
||||
const kValues = Symbol('values')
|
||||
const kLimit = Symbol('limit')
|
||||
const kCount = Symbol('count')
|
||||
|
||||
const emptyOptions = Object.freeze({})
|
||||
const noop = () => {}
|
||||
let warnedEnd = false
|
||||
|
||||
// This class is an internal utility for common functionality between AbstractIterator,
|
||||
// AbstractKeyIterator and AbstractValueIterator. It's not exported.
|
||||
class CommonIterator {
|
||||
constructor (db, options, legacy) {
|
||||
if (typeof db !== 'object' || db === null) {
|
||||
const hint = db === null ? 'null' : typeof db
|
||||
throw new TypeError(`The first argument must be an abstract-level database, received ${hint}`)
|
||||
}
|
||||
|
||||
if (typeof options !== 'object' || options === null) {
|
||||
throw new TypeError('The second argument must be an options object')
|
||||
}
|
||||
|
||||
this[kClosed] = false
|
||||
this[kCloseCallbacks] = []
|
||||
this[kWorking] = false
|
||||
this[kClosing] = false
|
||||
this[kAutoClose] = false
|
||||
this[kCallback] = null
|
||||
this[kHandleOne] = this[kHandleOne].bind(this)
|
||||
this[kHandleMany] = this[kHandleMany].bind(this)
|
||||
this[kHandleClose] = this[kHandleClose].bind(this)
|
||||
this[kKeyEncoding] = options[kKeyEncoding]
|
||||
this[kValueEncoding] = options[kValueEncoding]
|
||||
this[kLegacy] = legacy
|
||||
this[kLimit] = Number.isInteger(options.limit) && options.limit >= 0 ? options.limit : Infinity
|
||||
this[kCount] = 0
|
||||
|
||||
// Undocumented option to abort pending work on close(). Used by the
|
||||
// many-level module as a temporary solution to a blocked close().
|
||||
// TODO (next major): consider making this the default behavior. Native
|
||||
// implementations should have their own logic to safely close iterators.
|
||||
this[kAbortOnClose] = !!options.abortOnClose
|
||||
|
||||
this.db = db
|
||||
this.db.attachResource(this)
|
||||
this.nextTick = db.nextTick
|
||||
}
|
||||
|
||||
get count () {
|
||||
return this[kCount]
|
||||
}
|
||||
|
||||
get limit () {
|
||||
return this[kLimit]
|
||||
}
|
||||
|
||||
next (callback) {
|
||||
let promise
|
||||
|
||||
if (callback === undefined) {
|
||||
promise = new Promise((resolve, reject) => {
|
||||
callback = (err, key, value) => {
|
||||
if (err) reject(err)
|
||||
else if (!this[kLegacy]) resolve(key)
|
||||
else if (key === undefined && value === undefined) resolve()
|
||||
else resolve([key, value])
|
||||
}
|
||||
})
|
||||
} else if (typeof callback !== 'function') {
|
||||
throw new TypeError('Callback must be a function')
|
||||
}
|
||||
|
||||
if (this[kClosing]) {
|
||||
this.nextTick(callback, new ModuleError('Iterator is not open: cannot call next() after close()', {
|
||||
code: 'LEVEL_ITERATOR_NOT_OPEN'
|
||||
}))
|
||||
} else if (this[kWorking]) {
|
||||
this.nextTick(callback, new ModuleError('Iterator is busy: cannot call next() until previous call has completed', {
|
||||
code: 'LEVEL_ITERATOR_BUSY'
|
||||
}))
|
||||
} else {
|
||||
this[kWorking] = true
|
||||
this[kCallback] = callback
|
||||
|
||||
if (this[kCount] >= this[kLimit]) this.nextTick(this[kHandleOne], null)
|
||||
else this._next(this[kHandleOne])
|
||||
}
|
||||
|
||||
return promise
|
||||
}
|
||||
|
||||
_next (callback) {
|
||||
this.nextTick(callback)
|
||||
}
|
||||
|
||||
nextv (size, options, callback) {
|
||||
callback = getCallback(options, callback)
|
||||
callback = fromCallback(callback, kPromise)
|
||||
options = getOptions(options, emptyOptions)
|
||||
|
||||
if (!Number.isInteger(size)) {
|
||||
this.nextTick(callback, new TypeError("The first argument 'size' must be an integer"))
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
if (this[kClosing]) {
|
||||
this.nextTick(callback, new ModuleError('Iterator is not open: cannot call nextv() after close()', {
|
||||
code: 'LEVEL_ITERATOR_NOT_OPEN'
|
||||
}))
|
||||
} else if (this[kWorking]) {
|
||||
this.nextTick(callback, new ModuleError('Iterator is busy: cannot call nextv() until previous call has completed', {
|
||||
code: 'LEVEL_ITERATOR_BUSY'
|
||||
}))
|
||||
} else {
|
||||
if (size < 1) size = 1
|
||||
if (this[kLimit] < Infinity) size = Math.min(size, this[kLimit] - this[kCount])
|
||||
|
||||
this[kWorking] = true
|
||||
this[kCallback] = callback
|
||||
|
||||
if (size <= 0) this.nextTick(this[kHandleMany], null, [])
|
||||
else this._nextv(size, options, this[kHandleMany])
|
||||
}
|
||||
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
_nextv (size, options, callback) {
|
||||
const acc = []
|
||||
const onnext = (err, key, value) => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
} else if (this[kLegacy] ? key === undefined && value === undefined : key === undefined) {
|
||||
return callback(null, acc)
|
||||
}
|
||||
|
||||
acc.push(this[kLegacy] ? [key, value] : key)
|
||||
|
||||
if (acc.length === size) {
|
||||
callback(null, acc)
|
||||
} else {
|
||||
this._next(onnext)
|
||||
}
|
||||
}
|
||||
|
||||
this._next(onnext)
|
||||
}
|
||||
|
||||
all (options, callback) {
|
||||
callback = getCallback(options, callback)
|
||||
callback = fromCallback(callback, kPromise)
|
||||
options = getOptions(options, emptyOptions)
|
||||
|
||||
if (this[kClosing]) {
|
||||
this.nextTick(callback, new ModuleError('Iterator is not open: cannot call all() after close()', {
|
||||
code: 'LEVEL_ITERATOR_NOT_OPEN'
|
||||
}))
|
||||
} else if (this[kWorking]) {
|
||||
this.nextTick(callback, new ModuleError('Iterator is busy: cannot call all() until previous call has completed', {
|
||||
code: 'LEVEL_ITERATOR_BUSY'
|
||||
}))
|
||||
} else {
|
||||
this[kWorking] = true
|
||||
this[kCallback] = callback
|
||||
this[kAutoClose] = true
|
||||
|
||||
if (this[kCount] >= this[kLimit]) this.nextTick(this[kHandleMany], null, [])
|
||||
else this._all(options, this[kHandleMany])
|
||||
}
|
||||
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
_all (options, callback) {
|
||||
// Must count here because we're directly calling _nextv()
|
||||
let count = this[kCount]
|
||||
const acc = []
|
||||
|
||||
const nextv = () => {
|
||||
// Not configurable, because implementations should optimize _all().
|
||||
const size = this[kLimit] < Infinity ? Math.min(1e3, this[kLimit] - count) : 1e3
|
||||
|
||||
if (size <= 0) {
|
||||
this.nextTick(callback, null, acc)
|
||||
} else {
|
||||
this._nextv(size, emptyOptions, onnextv)
|
||||
}
|
||||
}
|
||||
|
||||
const onnextv = (err, items) => {
|
||||
if (err) {
|
||||
callback(err)
|
||||
} else if (items.length === 0) {
|
||||
callback(null, acc)
|
||||
} else {
|
||||
acc.push.apply(acc, items)
|
||||
count += items.length
|
||||
nextv()
|
||||
}
|
||||
}
|
||||
|
||||
nextv()
|
||||
}
|
||||
|
||||
[kFinishWork] () {
|
||||
const cb = this[kCallback]
|
||||
|
||||
// Callback will be null if work was aborted on close
|
||||
if (this[kAbortOnClose] && cb === null) return noop
|
||||
|
||||
this[kWorking] = false
|
||||
this[kCallback] = null
|
||||
|
||||
if (this[kClosing]) this._close(this[kHandleClose])
|
||||
|
||||
return cb
|
||||
}
|
||||
|
||||
[kReturnMany] (cb, err, items) {
|
||||
if (this[kAutoClose]) {
|
||||
this.close(cb.bind(null, err, items))
|
||||
} else {
|
||||
cb(err, items)
|
||||
}
|
||||
}
|
||||
|
||||
seek (target, options) {
|
||||
options = getOptions(options, emptyOptions)
|
||||
|
||||
if (this[kClosing]) {
|
||||
// Don't throw here, to be kind to implementations that wrap
|
||||
// another db and don't necessarily control when the db is closed
|
||||
} else if (this[kWorking]) {
|
||||
throw new ModuleError('Iterator is busy: cannot call seek() until next() has completed', {
|
||||
code: 'LEVEL_ITERATOR_BUSY'
|
||||
})
|
||||
} else {
|
||||
const keyEncoding = this.db.keyEncoding(options.keyEncoding || this[kKeyEncoding])
|
||||
const keyFormat = keyEncoding.format
|
||||
|
||||
if (options.keyEncoding !== keyFormat) {
|
||||
options = { ...options, keyEncoding: keyFormat }
|
||||
}
|
||||
|
||||
const mapped = this.db.prefixKey(keyEncoding.encode(target), keyFormat)
|
||||
this._seek(mapped, options)
|
||||
}
|
||||
}
|
||||
|
||||
_seek (target, options) {
|
||||
throw new ModuleError('Iterator does not support seek()', {
|
||||
code: 'LEVEL_NOT_SUPPORTED'
|
||||
})
|
||||
}
|
||||
|
||||
close (callback) {
|
||||
callback = fromCallback(callback, kPromise)
|
||||
|
||||
if (this[kClosed]) {
|
||||
this.nextTick(callback)
|
||||
} else if (this[kClosing]) {
|
||||
this[kCloseCallbacks].push(callback)
|
||||
} else {
|
||||
this[kClosing] = true
|
||||
this[kCloseCallbacks].push(callback)
|
||||
|
||||
if (!this[kWorking]) {
|
||||
this._close(this[kHandleClose])
|
||||
} else if (this[kAbortOnClose]) {
|
||||
// Don't wait for work to finish. Subsequently ignore the result.
|
||||
const cb = this[kFinishWork]()
|
||||
|
||||
cb(new ModuleError('Aborted on iterator close()', {
|
||||
code: 'LEVEL_ITERATOR_NOT_OPEN'
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
_close (callback) {
|
||||
this.nextTick(callback)
|
||||
}
|
||||
|
||||
[kHandleClose] () {
|
||||
this[kClosed] = true
|
||||
this.db.detachResource(this)
|
||||
|
||||
const callbacks = this[kCloseCallbacks]
|
||||
this[kCloseCallbacks] = []
|
||||
|
||||
for (const cb of callbacks) {
|
||||
cb()
|
||||
}
|
||||
}
|
||||
|
||||
async * [Symbol.asyncIterator] () {
|
||||
try {
|
||||
let item
|
||||
|
||||
while ((item = (await this.next())) !== undefined) {
|
||||
yield item
|
||||
}
|
||||
} finally {
|
||||
if (!this[kClosed]) await this.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For backwards compatibility this class is not (yet) called AbstractEntryIterator.
|
||||
class AbstractIterator extends CommonIterator {
|
||||
constructor (db, options) {
|
||||
super(db, options, true)
|
||||
this[kKeys] = options.keys !== false
|
||||
this[kValues] = options.values !== false
|
||||
}
|
||||
|
||||
[kHandleOne] (err, key, value) {
|
||||
const cb = this[kFinishWork]()
|
||||
if (err) return cb(err)
|
||||
|
||||
try {
|
||||
key = this[kKeys] && key !== undefined ? this[kKeyEncoding].decode(key) : undefined
|
||||
value = this[kValues] && value !== undefined ? this[kValueEncoding].decode(value) : undefined
|
||||
} catch (err) {
|
||||
return cb(new IteratorDecodeError('entry', err))
|
||||
}
|
||||
|
||||
if (!(key === undefined && value === undefined)) {
|
||||
this[kCount]++
|
||||
}
|
||||
|
||||
cb(null, key, value)
|
||||
}
|
||||
|
||||
[kHandleMany] (err, entries) {
|
||||
const cb = this[kFinishWork]()
|
||||
if (err) return this[kReturnMany](cb, err)
|
||||
|
||||
try {
|
||||
for (const entry of entries) {
|
||||
const key = entry[0]
|
||||
const value = entry[1]
|
||||
|
||||
entry[0] = this[kKeys] && key !== undefined ? this[kKeyEncoding].decode(key) : undefined
|
||||
entry[1] = this[kValues] && value !== undefined ? this[kValueEncoding].decode(value) : undefined
|
||||
}
|
||||
} catch (err) {
|
||||
return this[kReturnMany](cb, new IteratorDecodeError('entries', err))
|
||||
}
|
||||
|
||||
this[kCount] += entries.length
|
||||
this[kReturnMany](cb, null, entries)
|
||||
}
|
||||
|
||||
end (callback) {
|
||||
if (!warnedEnd && typeof console !== 'undefined') {
|
||||
warnedEnd = true
|
||||
console.warn(new ModuleError(
|
||||
'The iterator.end() method was renamed to close() and end() is an alias that will be removed in a future version',
|
||||
{ code: 'LEVEL_LEGACY' }
|
||||
))
|
||||
}
|
||||
|
||||
return this.close(callback)
|
||||
}
|
||||
}
|
||||
|
||||
class AbstractKeyIterator extends CommonIterator {
|
||||
constructor (db, options) {
|
||||
super(db, options, false)
|
||||
}
|
||||
|
||||
[kHandleOne] (err, key) {
|
||||
const cb = this[kFinishWork]()
|
||||
if (err) return cb(err)
|
||||
|
||||
try {
|
||||
key = key !== undefined ? this[kKeyEncoding].decode(key) : undefined
|
||||
} catch (err) {
|
||||
return cb(new IteratorDecodeError('key', err))
|
||||
}
|
||||
|
||||
if (key !== undefined) this[kCount]++
|
||||
cb(null, key)
|
||||
}
|
||||
|
||||
[kHandleMany] (err, keys) {
|
||||
const cb = this[kFinishWork]()
|
||||
if (err) return this[kReturnMany](cb, err)
|
||||
|
||||
try {
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i]
|
||||
keys[i] = key !== undefined ? this[kKeyEncoding].decode(key) : undefined
|
||||
}
|
||||
} catch (err) {
|
||||
return this[kReturnMany](cb, new IteratorDecodeError('keys', err))
|
||||
}
|
||||
|
||||
this[kCount] += keys.length
|
||||
this[kReturnMany](cb, null, keys)
|
||||
}
|
||||
}
|
||||
|
||||
class AbstractValueIterator extends CommonIterator {
|
||||
constructor (db, options) {
|
||||
super(db, options, false)
|
||||
}
|
||||
|
||||
[kHandleOne] (err, value) {
|
||||
const cb = this[kFinishWork]()
|
||||
if (err) return cb(err)
|
||||
|
||||
try {
|
||||
value = value !== undefined ? this[kValueEncoding].decode(value) : undefined
|
||||
} catch (err) {
|
||||
return cb(new IteratorDecodeError('value', err))
|
||||
}
|
||||
|
||||
if (value !== undefined) this[kCount]++
|
||||
cb(null, value)
|
||||
}
|
||||
|
||||
[kHandleMany] (err, values) {
|
||||
const cb = this[kFinishWork]()
|
||||
if (err) return this[kReturnMany](cb, err)
|
||||
|
||||
try {
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
const value = values[i]
|
||||
values[i] = value !== undefined ? this[kValueEncoding].decode(value) : undefined
|
||||
}
|
||||
} catch (err) {
|
||||
return this[kReturnMany](cb, new IteratorDecodeError('values', err))
|
||||
}
|
||||
|
||||
this[kCount] += values.length
|
||||
this[kReturnMany](cb, null, values)
|
||||
}
|
||||
}
|
||||
|
||||
// Internal utility, not typed or exported
|
||||
class IteratorDecodeError extends ModuleError {
|
||||
constructor (subject, cause) {
|
||||
super(`Iterator could not decode ${subject}`, {
|
||||
code: 'LEVEL_DECODE_ERROR',
|
||||
cause
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// To help migrating to abstract-level
|
||||
for (const k of ['_ended property', '_nexting property', '_end method']) {
|
||||
Object.defineProperty(AbstractIterator.prototype, k.split(' ')[0], {
|
||||
get () { throw new ModuleError(`The ${k} has been removed`, { code: 'LEVEL_LEGACY' }) },
|
||||
set () { throw new ModuleError(`The ${k} has been removed`, { code: 'LEVEL_LEGACY' }) }
|
||||
})
|
||||
}
|
||||
|
||||
// Exposed so that AbstractLevel can set these options
|
||||
AbstractIterator.keyEncoding = kKeyEncoding
|
||||
AbstractIterator.valueEncoding = kValueEncoding
|
||||
|
||||
exports.AbstractIterator = AbstractIterator
|
||||
exports.AbstractKeyIterator = AbstractKeyIterator
|
||||
exports.AbstractValueIterator = AbstractValueIterator
|
818
node_modules/abstract-level/abstract-level.js
generated
vendored
Normal file
818
node_modules/abstract-level/abstract-level.js
generated
vendored
Normal file
@ -0,0 +1,818 @@
|
||||
'use strict'
|
||||
|
||||
const { supports } = require('level-supports')
|
||||
const { Transcoder } = require('level-transcoder')
|
||||
const { EventEmitter } = require('events')
|
||||
const { fromCallback } = require('catering')
|
||||
const ModuleError = require('module-error')
|
||||
const { AbstractIterator } = require('./abstract-iterator')
|
||||
const { DefaultKeyIterator, DefaultValueIterator } = require('./lib/default-kv-iterator')
|
||||
const { DeferredIterator, DeferredKeyIterator, DeferredValueIterator } = require('./lib/deferred-iterator')
|
||||
const { DefaultChainedBatch } = require('./lib/default-chained-batch')
|
||||
const { getCallback, getOptions } = require('./lib/common')
|
||||
const rangeOptions = require('./lib/range-options')
|
||||
|
||||
const kPromise = Symbol('promise')
|
||||
const kLanded = Symbol('landed')
|
||||
const kResources = Symbol('resources')
|
||||
const kCloseResources = Symbol('closeResources')
|
||||
const kOperations = Symbol('operations')
|
||||
const kUndefer = Symbol('undefer')
|
||||
const kDeferOpen = Symbol('deferOpen')
|
||||
const kOptions = Symbol('options')
|
||||
const kStatus = Symbol('status')
|
||||
const kDefaultOptions = Symbol('defaultOptions')
|
||||
const kTranscoder = Symbol('transcoder')
|
||||
const kKeyEncoding = Symbol('keyEncoding')
|
||||
const kValueEncoding = Symbol('valueEncoding')
|
||||
const noop = () => {}
|
||||
|
||||
class AbstractLevel extends EventEmitter {
|
||||
constructor (manifest, options) {
|
||||
super()
|
||||
|
||||
if (typeof manifest !== 'object' || manifest === null) {
|
||||
throw new TypeError("The first argument 'manifest' must be an object")
|
||||
}
|
||||
|
||||
options = getOptions(options)
|
||||
const { keyEncoding, valueEncoding, passive, ...forward } = options
|
||||
|
||||
this[kResources] = new Set()
|
||||
this[kOperations] = []
|
||||
this[kDeferOpen] = true
|
||||
this[kOptions] = forward
|
||||
this[kStatus] = 'opening'
|
||||
|
||||
this.supports = supports(manifest, {
|
||||
status: true,
|
||||
promises: true,
|
||||
clear: true,
|
||||
getMany: true,
|
||||
deferredOpen: true,
|
||||
|
||||
// TODO (next major): add seek
|
||||
snapshots: manifest.snapshots !== false,
|
||||
permanence: manifest.permanence !== false,
|
||||
|
||||
// TODO: remove from level-supports because it's always supported
|
||||
keyIterator: true,
|
||||
valueIterator: true,
|
||||
iteratorNextv: true,
|
||||
iteratorAll: true,
|
||||
|
||||
encodings: manifest.encodings || {},
|
||||
events: Object.assign({}, manifest.events, {
|
||||
opening: true,
|
||||
open: true,
|
||||
closing: true,
|
||||
closed: true,
|
||||
put: true,
|
||||
del: true,
|
||||
batch: true,
|
||||
clear: true
|
||||
})
|
||||
})
|
||||
|
||||
this[kTranscoder] = new Transcoder(formats(this))
|
||||
this[kKeyEncoding] = this[kTranscoder].encoding(keyEncoding || 'utf8')
|
||||
this[kValueEncoding] = this[kTranscoder].encoding(valueEncoding || 'utf8')
|
||||
|
||||
// Add custom and transcoder encodings to manifest
|
||||
for (const encoding of this[kTranscoder].encodings()) {
|
||||
if (!this.supports.encodings[encoding.commonName]) {
|
||||
this.supports.encodings[encoding.commonName] = true
|
||||
}
|
||||
}
|
||||
|
||||
this[kDefaultOptions] = {
|
||||
empty: Object.freeze({}),
|
||||
entry: Object.freeze({
|
||||
keyEncoding: this[kKeyEncoding].commonName,
|
||||
valueEncoding: this[kValueEncoding].commonName
|
||||
}),
|
||||
key: Object.freeze({
|
||||
keyEncoding: this[kKeyEncoding].commonName
|
||||
})
|
||||
}
|
||||
|
||||
// Let subclass finish its constructor
|
||||
this.nextTick(() => {
|
||||
if (this[kDeferOpen]) {
|
||||
this.open({ passive: false }, noop)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
get status () {
|
||||
return this[kStatus]
|
||||
}
|
||||
|
||||
keyEncoding (encoding) {
|
||||
return this[kTranscoder].encoding(encoding != null ? encoding : this[kKeyEncoding])
|
||||
}
|
||||
|
||||
valueEncoding (encoding) {
|
||||
return this[kTranscoder].encoding(encoding != null ? encoding : this[kValueEncoding])
|
||||
}
|
||||
|
||||
open (options, callback) {
|
||||
callback = getCallback(options, callback)
|
||||
callback = fromCallback(callback, kPromise)
|
||||
|
||||
options = { ...this[kOptions], ...getOptions(options) }
|
||||
|
||||
options.createIfMissing = options.createIfMissing !== false
|
||||
options.errorIfExists = !!options.errorIfExists
|
||||
|
||||
const maybeOpened = (err) => {
|
||||
if (this[kStatus] === 'closing' || this[kStatus] === 'opening') {
|
||||
// Wait until pending state changes are done
|
||||
this.once(kLanded, err ? () => maybeOpened(err) : maybeOpened)
|
||||
} else if (this[kStatus] !== 'open') {
|
||||
callback(new ModuleError('Database is not open', {
|
||||
code: 'LEVEL_DATABASE_NOT_OPEN',
|
||||
cause: err
|
||||
}))
|
||||
} else {
|
||||
callback()
|
||||
}
|
||||
}
|
||||
|
||||
if (options.passive) {
|
||||
if (this[kStatus] === 'opening') {
|
||||
this.once(kLanded, maybeOpened)
|
||||
} else {
|
||||
this.nextTick(maybeOpened)
|
||||
}
|
||||
} else if (this[kStatus] === 'closed' || this[kDeferOpen]) {
|
||||
this[kDeferOpen] = false
|
||||
this[kStatus] = 'opening'
|
||||
this.emit('opening')
|
||||
|
||||
this._open(options, (err) => {
|
||||
if (err) {
|
||||
this[kStatus] = 'closed'
|
||||
|
||||
// Resources must be safe to close in any db state
|
||||
this[kCloseResources](() => {
|
||||
this.emit(kLanded)
|
||||
maybeOpened(err)
|
||||
})
|
||||
|
||||
this[kUndefer]()
|
||||
return
|
||||
}
|
||||
|
||||
this[kStatus] = 'open'
|
||||
this[kUndefer]()
|
||||
this.emit(kLanded)
|
||||
|
||||
// Only emit public event if pending state changes are done
|
||||
if (this[kStatus] === 'open') this.emit('open')
|
||||
|
||||
// TODO (next major): remove this alias
|
||||
if (this[kStatus] === 'open') this.emit('ready')
|
||||
|
||||
maybeOpened()
|
||||
})
|
||||
} else if (this[kStatus] === 'open') {
|
||||
this.nextTick(maybeOpened)
|
||||
} else {
|
||||
this.once(kLanded, () => this.open(options, callback))
|
||||
}
|
||||
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
_open (options, callback) {
|
||||
this.nextTick(callback)
|
||||
}
|
||||
|
||||
close (callback) {
|
||||
callback = fromCallback(callback, kPromise)
|
||||
|
||||
const maybeClosed = (err) => {
|
||||
if (this[kStatus] === 'opening' || this[kStatus] === 'closing') {
|
||||
// Wait until pending state changes are done
|
||||
this.once(kLanded, err ? maybeClosed(err) : maybeClosed)
|
||||
} else if (this[kStatus] !== 'closed') {
|
||||
callback(new ModuleError('Database is not closed', {
|
||||
code: 'LEVEL_DATABASE_NOT_CLOSED',
|
||||
cause: err
|
||||
}))
|
||||
} else {
|
||||
callback()
|
||||
}
|
||||
}
|
||||
|
||||
if (this[kStatus] === 'open') {
|
||||
this[kStatus] = 'closing'
|
||||
this.emit('closing')
|
||||
|
||||
const cancel = (err) => {
|
||||
this[kStatus] = 'open'
|
||||
this[kUndefer]()
|
||||
this.emit(kLanded)
|
||||
maybeClosed(err)
|
||||
}
|
||||
|
||||
this[kCloseResources](() => {
|
||||
this._close((err) => {
|
||||
if (err) return cancel(err)
|
||||
|
||||
this[kStatus] = 'closed'
|
||||
this[kUndefer]()
|
||||
this.emit(kLanded)
|
||||
|
||||
// Only emit public event if pending state changes are done
|
||||
if (this[kStatus] === 'closed') this.emit('closed')
|
||||
|
||||
maybeClosed()
|
||||
})
|
||||
})
|
||||
} else if (this[kStatus] === 'closed') {
|
||||
this.nextTick(maybeClosed)
|
||||
} else {
|
||||
this.once(kLanded, () => this.close(callback))
|
||||
}
|
||||
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
[kCloseResources] (callback) {
|
||||
if (this[kResources].size === 0) {
|
||||
return this.nextTick(callback)
|
||||
}
|
||||
|
||||
let pending = this[kResources].size
|
||||
let sync = true
|
||||
|
||||
const next = () => {
|
||||
if (--pending === 0) {
|
||||
// We don't have tests for generic resources, so dezalgo
|
||||
if (sync) this.nextTick(callback)
|
||||
else callback()
|
||||
}
|
||||
}
|
||||
|
||||
// In parallel so that all resources know they are closed
|
||||
for (const resource of this[kResources]) {
|
||||
resource.close(next)
|
||||
}
|
||||
|
||||
sync = false
|
||||
this[kResources].clear()
|
||||
}
|
||||
|
||||
_close (callback) {
|
||||
this.nextTick(callback)
|
||||
}
|
||||
|
||||
get (key, options, callback) {
|
||||
callback = getCallback(options, callback)
|
||||
callback = fromCallback(callback, kPromise)
|
||||
options = getOptions(options, this[kDefaultOptions].entry)
|
||||
|
||||
if (this[kStatus] === 'opening') {
|
||||
this.defer(() => this.get(key, options, callback))
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
if (maybeError(this, callback)) {
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
const err = this._checkKey(key)
|
||||
|
||||
if (err) {
|
||||
this.nextTick(callback, err)
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
const keyEncoding = this.keyEncoding(options.keyEncoding)
|
||||
const valueEncoding = this.valueEncoding(options.valueEncoding)
|
||||
const keyFormat = keyEncoding.format
|
||||
const valueFormat = valueEncoding.format
|
||||
|
||||
// Forward encoding options to the underlying store
|
||||
if (options.keyEncoding !== keyFormat || options.valueEncoding !== valueFormat) {
|
||||
// Avoid spread operator because of https://bugs.chromium.org/p/chromium/issues/detail?id=1204540
|
||||
options = Object.assign({}, options, { keyEncoding: keyFormat, valueEncoding: valueFormat })
|
||||
}
|
||||
|
||||
this._get(this.prefixKey(keyEncoding.encode(key), keyFormat), options, (err, value) => {
|
||||
if (err) {
|
||||
// Normalize not found error for backwards compatibility with abstract-leveldown and level(up)
|
||||
if (err.code === 'LEVEL_NOT_FOUND' || err.notFound || /NotFound/i.test(err)) {
|
||||
if (!err.code) err.code = 'LEVEL_NOT_FOUND' // Preferred way going forward
|
||||
if (!err.notFound) err.notFound = true // Same as level-errors
|
||||
if (!err.status) err.status = 404 // Same as level-errors
|
||||
}
|
||||
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
try {
|
||||
value = valueEncoding.decode(value)
|
||||
} catch (err) {
|
||||
return callback(new ModuleError('Could not decode value', {
|
||||
code: 'LEVEL_DECODE_ERROR',
|
||||
cause: err
|
||||
}))
|
||||
}
|
||||
|
||||
callback(null, value)
|
||||
})
|
||||
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
_get (key, options, callback) {
|
||||
this.nextTick(callback, new Error('NotFound'))
|
||||
}
|
||||
|
||||
getMany (keys, options, callback) {
|
||||
callback = getCallback(options, callback)
|
||||
callback = fromCallback(callback, kPromise)
|
||||
options = getOptions(options, this[kDefaultOptions].entry)
|
||||
|
||||
if (this[kStatus] === 'opening') {
|
||||
this.defer(() => this.getMany(keys, options, callback))
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
if (maybeError(this, callback)) {
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
if (!Array.isArray(keys)) {
|
||||
this.nextTick(callback, new TypeError("The first argument 'keys' must be an array"))
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
if (keys.length === 0) {
|
||||
this.nextTick(callback, null, [])
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
const keyEncoding = this.keyEncoding(options.keyEncoding)
|
||||
const valueEncoding = this.valueEncoding(options.valueEncoding)
|
||||
const keyFormat = keyEncoding.format
|
||||
const valueFormat = valueEncoding.format
|
||||
|
||||
// Forward encoding options
|
||||
if (options.keyEncoding !== keyFormat || options.valueEncoding !== valueFormat) {
|
||||
options = Object.assign({}, options, { keyEncoding: keyFormat, valueEncoding: valueFormat })
|
||||
}
|
||||
|
||||
const mappedKeys = new Array(keys.length)
|
||||
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i]
|
||||
const err = this._checkKey(key)
|
||||
|
||||
if (err) {
|
||||
this.nextTick(callback, err)
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
mappedKeys[i] = this.prefixKey(keyEncoding.encode(key), keyFormat)
|
||||
}
|
||||
|
||||
this._getMany(mappedKeys, options, (err, values) => {
|
||||
if (err) return callback(err)
|
||||
|
||||
try {
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
if (values[i] !== undefined) {
|
||||
values[i] = valueEncoding.decode(values[i])
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
return callback(new ModuleError(`Could not decode one or more of ${values.length} value(s)`, {
|
||||
code: 'LEVEL_DECODE_ERROR',
|
||||
cause: err
|
||||
}))
|
||||
}
|
||||
|
||||
callback(null, values)
|
||||
})
|
||||
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
_getMany (keys, options, callback) {
|
||||
this.nextTick(callback, null, new Array(keys.length).fill(undefined))
|
||||
}
|
||||
|
||||
put (key, value, options, callback) {
|
||||
callback = getCallback(options, callback)
|
||||
callback = fromCallback(callback, kPromise)
|
||||
options = getOptions(options, this[kDefaultOptions].entry)
|
||||
|
||||
if (this[kStatus] === 'opening') {
|
||||
this.defer(() => this.put(key, value, options, callback))
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
if (maybeError(this, callback)) {
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
const err = this._checkKey(key) || this._checkValue(value)
|
||||
|
||||
if (err) {
|
||||
this.nextTick(callback, err)
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
const keyEncoding = this.keyEncoding(options.keyEncoding)
|
||||
const valueEncoding = this.valueEncoding(options.valueEncoding)
|
||||
const keyFormat = keyEncoding.format
|
||||
const valueFormat = valueEncoding.format
|
||||
|
||||
// Forward encoding options
|
||||
if (options.keyEncoding !== keyFormat || options.valueEncoding !== valueFormat) {
|
||||
options = Object.assign({}, options, { keyEncoding: keyFormat, valueEncoding: valueFormat })
|
||||
}
|
||||
|
||||
const mappedKey = this.prefixKey(keyEncoding.encode(key), keyFormat)
|
||||
const mappedValue = valueEncoding.encode(value)
|
||||
|
||||
this._put(mappedKey, mappedValue, options, (err) => {
|
||||
if (err) return callback(err)
|
||||
this.emit('put', key, value)
|
||||
callback()
|
||||
})
|
||||
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
_put (key, value, options, callback) {
|
||||
this.nextTick(callback)
|
||||
}
|
||||
|
||||
del (key, options, callback) {
|
||||
callback = getCallback(options, callback)
|
||||
callback = fromCallback(callback, kPromise)
|
||||
options = getOptions(options, this[kDefaultOptions].key)
|
||||
|
||||
if (this[kStatus] === 'opening') {
|
||||
this.defer(() => this.del(key, options, callback))
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
if (maybeError(this, callback)) {
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
const err = this._checkKey(key)
|
||||
|
||||
if (err) {
|
||||
this.nextTick(callback, err)
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
const keyEncoding = this.keyEncoding(options.keyEncoding)
|
||||
const keyFormat = keyEncoding.format
|
||||
|
||||
// Forward encoding options
|
||||
if (options.keyEncoding !== keyFormat) {
|
||||
options = Object.assign({}, options, { keyEncoding: keyFormat })
|
||||
}
|
||||
|
||||
this._del(this.prefixKey(keyEncoding.encode(key), keyFormat), options, (err) => {
|
||||
if (err) return callback(err)
|
||||
this.emit('del', key)
|
||||
callback()
|
||||
})
|
||||
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
_del (key, options, callback) {
|
||||
this.nextTick(callback)
|
||||
}
|
||||
|
||||
batch (operations, options, callback) {
|
||||
if (!arguments.length) {
|
||||
if (this[kStatus] === 'opening') return new DefaultChainedBatch(this)
|
||||
if (this[kStatus] !== 'open') {
|
||||
throw new ModuleError('Database is not open', {
|
||||
code: 'LEVEL_DATABASE_NOT_OPEN'
|
||||
})
|
||||
}
|
||||
return this._chainedBatch()
|
||||
}
|
||||
|
||||
if (typeof operations === 'function') callback = operations
|
||||
else callback = getCallback(options, callback)
|
||||
|
||||
callback = fromCallback(callback, kPromise)
|
||||
options = getOptions(options, this[kDefaultOptions].empty)
|
||||
|
||||
if (this[kStatus] === 'opening') {
|
||||
this.defer(() => this.batch(operations, options, callback))
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
if (maybeError(this, callback)) {
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
if (!Array.isArray(operations)) {
|
||||
this.nextTick(callback, new TypeError("The first argument 'operations' must be an array"))
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
if (operations.length === 0) {
|
||||
this.nextTick(callback)
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
const mapped = new Array(operations.length)
|
||||
const { keyEncoding: ke, valueEncoding: ve, ...forward } = options
|
||||
|
||||
for (let i = 0; i < operations.length; i++) {
|
||||
if (typeof operations[i] !== 'object' || operations[i] === null) {
|
||||
this.nextTick(callback, new TypeError('A batch operation must be an object'))
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
const op = Object.assign({}, operations[i])
|
||||
|
||||
if (op.type !== 'put' && op.type !== 'del') {
|
||||
this.nextTick(callback, new TypeError("A batch operation must have a type property that is 'put' or 'del'"))
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
const err = this._checkKey(op.key)
|
||||
|
||||
if (err) {
|
||||
this.nextTick(callback, err)
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
const db = op.sublevel != null ? op.sublevel : this
|
||||
const keyEncoding = db.keyEncoding(op.keyEncoding || ke)
|
||||
const keyFormat = keyEncoding.format
|
||||
|
||||
op.key = db.prefixKey(keyEncoding.encode(op.key), keyFormat)
|
||||
op.keyEncoding = keyFormat
|
||||
|
||||
if (op.type === 'put') {
|
||||
const valueErr = this._checkValue(op.value)
|
||||
|
||||
if (valueErr) {
|
||||
this.nextTick(callback, valueErr)
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
const valueEncoding = db.valueEncoding(op.valueEncoding || ve)
|
||||
|
||||
op.value = valueEncoding.encode(op.value)
|
||||
op.valueEncoding = valueEncoding.format
|
||||
}
|
||||
|
||||
// Prevent double prefixing
|
||||
if (db !== this) {
|
||||
op.sublevel = null
|
||||
}
|
||||
|
||||
mapped[i] = op
|
||||
}
|
||||
|
||||
this._batch(mapped, forward, (err) => {
|
||||
if (err) return callback(err)
|
||||
this.emit('batch', operations)
|
||||
callback()
|
||||
})
|
||||
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
_batch (operations, options, callback) {
|
||||
this.nextTick(callback)
|
||||
}
|
||||
|
||||
sublevel (name, options) {
|
||||
return this._sublevel(name, AbstractSublevel.defaults(options))
|
||||
}
|
||||
|
||||
_sublevel (name, options) {
|
||||
return new AbstractSublevel(this, name, options)
|
||||
}
|
||||
|
||||
prefixKey (key, keyFormat) {
|
||||
return key
|
||||
}
|
||||
|
||||
clear (options, callback) {
|
||||
callback = getCallback(options, callback)
|
||||
callback = fromCallback(callback, kPromise)
|
||||
options = getOptions(options, this[kDefaultOptions].empty)
|
||||
|
||||
if (this[kStatus] === 'opening') {
|
||||
this.defer(() => this.clear(options, callback))
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
if (maybeError(this, callback)) {
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
const original = options
|
||||
const keyEncoding = this.keyEncoding(options.keyEncoding)
|
||||
|
||||
options = rangeOptions(options, keyEncoding)
|
||||
options.keyEncoding = keyEncoding.format
|
||||
|
||||
if (options.limit === 0) {
|
||||
this.nextTick(callback)
|
||||
} else {
|
||||
this._clear(options, (err) => {
|
||||
if (err) return callback(err)
|
||||
this.emit('clear', original)
|
||||
callback()
|
||||
})
|
||||
}
|
||||
|
||||
return callback[kPromise]
|
||||
}
|
||||
|
||||
_clear (options, callback) {
|
||||
this.nextTick(callback)
|
||||
}
|
||||
|
||||
iterator (options) {
|
||||
const keyEncoding = this.keyEncoding(options && options.keyEncoding)
|
||||
const valueEncoding = this.valueEncoding(options && options.valueEncoding)
|
||||
|
||||
options = rangeOptions(options, keyEncoding)
|
||||
options.keys = options.keys !== false
|
||||
options.values = options.values !== false
|
||||
|
||||
// We need the original encoding options in AbstractIterator in order to decode data
|
||||
options[AbstractIterator.keyEncoding] = keyEncoding
|
||||
options[AbstractIterator.valueEncoding] = valueEncoding
|
||||
|
||||
// Forward encoding options to private API
|
||||
options.keyEncoding = keyEncoding.format
|
||||
options.valueEncoding = valueEncoding.format
|
||||
|
||||
if (this[kStatus] === 'opening') {
|
||||
return new DeferredIterator(this, options)
|
||||
} else if (this[kStatus] !== 'open') {
|
||||
throw new ModuleError('Database is not open', {
|
||||
code: 'LEVEL_DATABASE_NOT_OPEN'
|
||||
})
|
||||
}
|
||||
|
||||
return this._iterator(options)
|
||||
}
|
||||
|
||||
_iterator (options) {
|
||||
return new AbstractIterator(this, options)
|
||||
}
|
||||
|
||||
keys (options) {
|
||||
// Also include valueEncoding (though unused) because we may fallback to _iterator()
|
||||
const keyEncoding = this.keyEncoding(options && options.keyEncoding)
|
||||
const valueEncoding = this.valueEncoding(options && options.valueEncoding)
|
||||
|
||||
options = rangeOptions(options, keyEncoding)
|
||||
|
||||
// We need the original encoding options in AbstractKeyIterator in order to decode data
|
||||
options[AbstractIterator.keyEncoding] = keyEncoding
|
||||
options[AbstractIterator.valueEncoding] = valueEncoding
|
||||
|
||||
// Forward encoding options to private API
|
||||
options.keyEncoding = keyEncoding.format
|
||||
options.valueEncoding = valueEncoding.format
|
||||
|
||||
if (this[kStatus] === 'opening') {
|
||||
return new DeferredKeyIterator(this, options)
|
||||
} else if (this[kStatus] !== 'open') {
|
||||
throw new ModuleError('Database is not open', {
|
||||
code: 'LEVEL_DATABASE_NOT_OPEN'
|
||||
})
|
||||
}
|
||||
|
||||
return this._keys(options)
|
||||
}
|
||||
|
||||
_keys (options) {
|
||||
return new DefaultKeyIterator(this, options)
|
||||
}
|
||||
|
||||
values (options) {
|
||||
const keyEncoding = this.keyEncoding(options && options.keyEncoding)
|
||||
const valueEncoding = this.valueEncoding(options && options.valueEncoding)
|
||||
|
||||
options = rangeOptions(options, keyEncoding)
|
||||
|
||||
// We need the original encoding options in AbstractValueIterator in order to decode data
|
||||
options[AbstractIterator.keyEncoding] = keyEncoding
|
||||
options[AbstractIterator.valueEncoding] = valueEncoding
|
||||
|
||||
// Forward encoding options to private API
|
||||
options.keyEncoding = keyEncoding.format
|
||||
options.valueEncoding = valueEncoding.format
|
||||
|
||||
if (this[kStatus] === 'opening') {
|
||||
return new DeferredValueIterator(this, options)
|
||||
} else if (this[kStatus] !== 'open') {
|
||||
throw new ModuleError('Database is not open', {
|
||||
code: 'LEVEL_DATABASE_NOT_OPEN'
|
||||
})
|
||||
}
|
||||
|
||||
return this._values(options)
|
||||
}
|
||||
|
||||
_values (options) {
|
||||
return new DefaultValueIterator(this, options)
|
||||
}
|
||||
|
||||
defer (fn) {
|
||||
if (typeof fn !== 'function') {
|
||||
throw new TypeError('The first argument must be a function')
|
||||
}
|
||||
|
||||
this[kOperations].push(fn)
|
||||
}
|
||||
|
||||
[kUndefer] () {
|
||||
if (this[kOperations].length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
const operations = this[kOperations]
|
||||
this[kOperations] = []
|
||||
|
||||
for (const op of operations) {
|
||||
op()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: docs and types
|
||||
attachResource (resource) {
|
||||
if (typeof resource !== 'object' || resource === null ||
|
||||
typeof resource.close !== 'function') {
|
||||
throw new TypeError('The first argument must be a resource object')
|
||||
}
|
||||
|
||||
this[kResources].add(resource)
|
||||
}
|
||||
|
||||
// TODO: docs and types
|
||||
detachResource (resource) {
|
||||
this[kResources].delete(resource)
|
||||
}
|
||||
|
||||
_chainedBatch () {
|
||||
return new DefaultChainedBatch(this)
|
||||
}
|
||||
|
||||
_checkKey (key) {
|
||||
if (key === null || key === undefined) {
|
||||
return new ModuleError('Key cannot be null or undefined', {
|
||||
code: 'LEVEL_INVALID_KEY'
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
_checkValue (value) {
|
||||
if (value === null || value === undefined) {
|
||||
return new ModuleError('Value cannot be null or undefined', {
|
||||
code: 'LEVEL_INVALID_VALUE'
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Expose browser-compatible nextTick for dependents
|
||||
// TODO: after we drop node 10, also use queueMicrotask in node
|
||||
AbstractLevel.prototype.nextTick = require('./lib/next-tick')
|
||||
|
||||
const { AbstractSublevel } = require('./lib/abstract-sublevel')({ AbstractLevel })
|
||||
|
||||
exports.AbstractLevel = AbstractLevel
|
||||
exports.AbstractSublevel = AbstractSublevel
|
||||
|
||||
const maybeError = function (db, callback) {
|
||||
if (db[kStatus] !== 'open') {
|
||||
db.nextTick(callback, new ModuleError('Database is not open', {
|
||||
code: 'LEVEL_DATABASE_NOT_OPEN'
|
||||
}))
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
const formats = function (db) {
|
||||
return Object.keys(db.supports.encodings)
|
||||
.filter(k => !!db.supports.encodings[k])
|
||||
}
|
42
node_modules/abstract-level/index.d.ts
generated
vendored
Normal file
42
node_modules/abstract-level/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
export {
|
||||
AbstractLevel,
|
||||
AbstractDatabaseOptions,
|
||||
AbstractOpenOptions,
|
||||
AbstractGetOptions,
|
||||
AbstractGetManyOptions,
|
||||
AbstractPutOptions,
|
||||
AbstractDelOptions,
|
||||
AbstractBatchOptions,
|
||||
AbstractBatchOperation,
|
||||
AbstractBatchPutOperation,
|
||||
AbstractBatchDelOperation,
|
||||
AbstractClearOptions
|
||||
} from './types/abstract-level'
|
||||
|
||||
export {
|
||||
AbstractIterator,
|
||||
AbstractIteratorOptions,
|
||||
AbstractSeekOptions,
|
||||
AbstractKeyIterator,
|
||||
AbstractKeyIteratorOptions,
|
||||
AbstractValueIterator,
|
||||
AbstractValueIteratorOptions
|
||||
} from './types/abstract-iterator'
|
||||
|
||||
export {
|
||||
AbstractChainedBatch,
|
||||
AbstractChainedBatchPutOptions,
|
||||
AbstractChainedBatchDelOptions,
|
||||
AbstractChainedBatchWriteOptions
|
||||
} from './types/abstract-chained-batch'
|
||||
|
||||
export {
|
||||
AbstractSublevel,
|
||||
AbstractSublevelOptions
|
||||
} from './types/abstract-sublevel'
|
||||
|
||||
export {
|
||||
NodeCallback
|
||||
} from './types/interfaces'
|
||||
|
||||
export * as Transcoder from 'level-transcoder'
|
8
node_modules/abstract-level/index.js
generated
vendored
Normal file
8
node_modules/abstract-level/index.js
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
'use strict'
|
||||
|
||||
exports.AbstractLevel = require('./abstract-level').AbstractLevel
|
||||
exports.AbstractSublevel = require('./abstract-level').AbstractSublevel
|
||||
exports.AbstractIterator = require('./abstract-iterator').AbstractIterator
|
||||
exports.AbstractKeyIterator = require('./abstract-iterator').AbstractKeyIterator
|
||||
exports.AbstractValueIterator = require('./abstract-iterator').AbstractValueIterator
|
||||
exports.AbstractChainedBatch = require('./abstract-chained-batch').AbstractChainedBatch
|
124
node_modules/abstract-level/lib/abstract-sublevel-iterator.js
generated
vendored
Normal file
124
node_modules/abstract-level/lib/abstract-sublevel-iterator.js
generated
vendored
Normal file
@ -0,0 +1,124 @@
|
||||
'use strict'
|
||||
|
||||
const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../abstract-iterator')
|
||||
|
||||
const kUnfix = Symbol('unfix')
|
||||
const kIterator = Symbol('iterator')
|
||||
const kHandleOne = Symbol('handleOne')
|
||||
const kHandleMany = Symbol('handleMany')
|
||||
const kCallback = Symbol('callback')
|
||||
|
||||
// TODO: unfix natively if db supports it
|
||||
class AbstractSublevelIterator extends AbstractIterator {
|
||||
constructor (db, options, iterator, unfix) {
|
||||
super(db, options)
|
||||
|
||||
this[kIterator] = iterator
|
||||
this[kUnfix] = unfix
|
||||
this[kHandleOne] = this[kHandleOne].bind(this)
|
||||
this[kHandleMany] = this[kHandleMany].bind(this)
|
||||
this[kCallback] = null
|
||||
}
|
||||
|
||||
[kHandleOne] (err, key, value) {
|
||||
const callback = this[kCallback]
|
||||
if (err) return callback(err)
|
||||
if (key !== undefined) key = this[kUnfix](key)
|
||||
callback(err, key, value)
|
||||
}
|
||||
|
||||
[kHandleMany] (err, entries) {
|
||||
const callback = this[kCallback]
|
||||
if (err) return callback(err)
|
||||
|
||||
for (const entry of entries) {
|
||||
const key = entry[0]
|
||||
if (key !== undefined) entry[0] = this[kUnfix](key)
|
||||
}
|
||||
|
||||
callback(err, entries)
|
||||
}
|
||||
}
|
||||
|
||||
class AbstractSublevelKeyIterator extends AbstractKeyIterator {
|
||||
constructor (db, options, iterator, unfix) {
|
||||
super(db, options)
|
||||
|
||||
this[kIterator] = iterator
|
||||
this[kUnfix] = unfix
|
||||
this[kHandleOne] = this[kHandleOne].bind(this)
|
||||
this[kHandleMany] = this[kHandleMany].bind(this)
|
||||
this[kCallback] = null
|
||||
}
|
||||
|
||||
[kHandleOne] (err, key) {
|
||||
const callback = this[kCallback]
|
||||
if (err) return callback(err)
|
||||
if (key !== undefined) key = this[kUnfix](key)
|
||||
callback(err, key)
|
||||
}
|
||||
|
||||
[kHandleMany] (err, keys) {
|
||||
const callback = this[kCallback]
|
||||
if (err) return callback(err)
|
||||
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i]
|
||||
if (key !== undefined) keys[i] = this[kUnfix](key)
|
||||
}
|
||||
|
||||
callback(err, keys)
|
||||
}
|
||||
}
|
||||
|
||||
class AbstractSublevelValueIterator extends AbstractValueIterator {
|
||||
constructor (db, options, iterator) {
|
||||
super(db, options)
|
||||
this[kIterator] = iterator
|
||||
}
|
||||
}
|
||||
|
||||
for (const Iterator of [AbstractSublevelIterator, AbstractSublevelKeyIterator]) {
|
||||
Iterator.prototype._next = function (callback) {
|
||||
this[kCallback] = callback
|
||||
this[kIterator].next(this[kHandleOne])
|
||||
}
|
||||
|
||||
Iterator.prototype._nextv = function (size, options, callback) {
|
||||
this[kCallback] = callback
|
||||
this[kIterator].nextv(size, options, this[kHandleMany])
|
||||
}
|
||||
|
||||
Iterator.prototype._all = function (options, callback) {
|
||||
this[kCallback] = callback
|
||||
this[kIterator].all(options, this[kHandleMany])
|
||||
}
|
||||
}
|
||||
|
||||
for (const Iterator of [AbstractSublevelValueIterator]) {
|
||||
Iterator.prototype._next = function (callback) {
|
||||
this[kIterator].next(callback)
|
||||
}
|
||||
|
||||
Iterator.prototype._nextv = function (size, options, callback) {
|
||||
this[kIterator].nextv(size, options, callback)
|
||||
}
|
||||
|
||||
Iterator.prototype._all = function (options, callback) {
|
||||
this[kIterator].all(options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
for (const Iterator of [AbstractSublevelIterator, AbstractSublevelKeyIterator, AbstractSublevelValueIterator]) {
|
||||
Iterator.prototype._seek = function (target, options) {
|
||||
this[kIterator].seek(target, options)
|
||||
}
|
||||
|
||||
Iterator.prototype._close = function (callback) {
|
||||
this[kIterator].close(callback)
|
||||
}
|
||||
}
|
||||
|
||||
exports.AbstractSublevelIterator = AbstractSublevelIterator
|
||||
exports.AbstractSublevelKeyIterator = AbstractSublevelKeyIterator
|
||||
exports.AbstractSublevelValueIterator = AbstractSublevelValueIterator
|
258
node_modules/abstract-level/lib/abstract-sublevel.js
generated
vendored
Normal file
258
node_modules/abstract-level/lib/abstract-sublevel.js
generated
vendored
Normal file
@ -0,0 +1,258 @@
|
||||
'use strict'
|
||||
|
||||
const ModuleError = require('module-error')
|
||||
const { Buffer } = require('buffer') || {}
|
||||
const {
|
||||
AbstractSublevelIterator,
|
||||
AbstractSublevelKeyIterator,
|
||||
AbstractSublevelValueIterator
|
||||
} = require('./abstract-sublevel-iterator')
|
||||
|
||||
const kPrefix = Symbol('prefix')
|
||||
const kUpperBound = Symbol('upperBound')
|
||||
const kPrefixRange = Symbol('prefixRange')
|
||||
const kParent = Symbol('parent')
|
||||
const kUnfix = Symbol('unfix')
|
||||
|
||||
const textEncoder = new TextEncoder()
|
||||
const defaults = { separator: '!' }
|
||||
|
||||
// Wrapped to avoid circular dependency
|
||||
module.exports = function ({ AbstractLevel }) {
|
||||
class AbstractSublevel extends AbstractLevel {
|
||||
static defaults (options) {
|
||||
// To help migrating from subleveldown to abstract-level
|
||||
if (typeof options === 'string') {
|
||||
throw new ModuleError('The subleveldown string shorthand for { separator } has been removed', {
|
||||
code: 'LEVEL_LEGACY'
|
||||
})
|
||||
} else if (options && options.open) {
|
||||
throw new ModuleError('The subleveldown open option has been removed', {
|
||||
code: 'LEVEL_LEGACY'
|
||||
})
|
||||
}
|
||||
|
||||
if (options == null) {
|
||||
return defaults
|
||||
} else if (!options.separator) {
|
||||
return { ...options, separator: '!' }
|
||||
} else {
|
||||
return options
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: add autoClose option, which if true, does parent.attachResource(this)
|
||||
constructor (db, name, options) {
|
||||
// Don't forward AbstractSublevel options to AbstractLevel
|
||||
const { separator, manifest, ...forward } = AbstractSublevel.defaults(options)
|
||||
name = trim(name, separator)
|
||||
|
||||
// Reserve one character between separator and name to give us an upper bound
|
||||
const reserved = separator.charCodeAt(0) + 1
|
||||
const parent = db[kParent] || db
|
||||
|
||||
// Keys should sort like ['!a!', '!a!!a!', '!a"', '!aa!', '!b!'].
|
||||
// Use ASCII for consistent length between string, Buffer and Uint8Array
|
||||
if (!textEncoder.encode(name).every(x => x > reserved && x < 127)) {
|
||||
throw new ModuleError(`Prefix must use bytes > ${reserved} < ${127}`, {
|
||||
code: 'LEVEL_INVALID_PREFIX'
|
||||
})
|
||||
}
|
||||
|
||||
super(mergeManifests(parent, manifest), forward)
|
||||
|
||||
const prefix = (db.prefix || '') + separator + name + separator
|
||||
const upperBound = prefix.slice(0, -1) + String.fromCharCode(reserved)
|
||||
|
||||
this[kParent] = parent
|
||||
this[kPrefix] = new MultiFormat(prefix)
|
||||
this[kUpperBound] = new MultiFormat(upperBound)
|
||||
this[kUnfix] = new Unfixer()
|
||||
|
||||
this.nextTick = parent.nextTick
|
||||
}
|
||||
|
||||
prefixKey (key, keyFormat) {
|
||||
if (keyFormat === 'utf8') {
|
||||
return this[kPrefix].utf8 + key
|
||||
} else if (key.byteLength === 0) {
|
||||
// Fast path for empty key (no copy)
|
||||
return this[kPrefix][keyFormat]
|
||||
} else if (keyFormat === 'view') {
|
||||
const view = this[kPrefix].view
|
||||
const result = new Uint8Array(view.byteLength + key.byteLength)
|
||||
|
||||
result.set(view, 0)
|
||||
result.set(key, view.byteLength)
|
||||
|
||||
return result
|
||||
} else {
|
||||
const buffer = this[kPrefix].buffer
|
||||
return Buffer.concat([buffer, key], buffer.byteLength + key.byteLength)
|
||||
}
|
||||
}
|
||||
|
||||
// Not exposed for now.
|
||||
[kPrefixRange] (range, keyFormat) {
|
||||
if (range.gte !== undefined) {
|
||||
range.gte = this.prefixKey(range.gte, keyFormat)
|
||||
} else if (range.gt !== undefined) {
|
||||
range.gt = this.prefixKey(range.gt, keyFormat)
|
||||
} else {
|
||||
range.gte = this[kPrefix][keyFormat]
|
||||
}
|
||||
|
||||
if (range.lte !== undefined) {
|
||||
range.lte = this.prefixKey(range.lte, keyFormat)
|
||||
} else if (range.lt !== undefined) {
|
||||
range.lt = this.prefixKey(range.lt, keyFormat)
|
||||
} else {
|
||||
range.lte = this[kUpperBound][keyFormat]
|
||||
}
|
||||
}
|
||||
|
||||
get prefix () {
|
||||
return this[kPrefix].utf8
|
||||
}
|
||||
|
||||
get db () {
|
||||
return this[kParent]
|
||||
}
|
||||
|
||||
_open (options, callback) {
|
||||
// The parent db must open itself or be (re)opened by the user because
|
||||
// a sublevel should not initiate state changes on the rest of the db.
|
||||
this[kParent].open({ passive: true }, callback)
|
||||
}
|
||||
|
||||
_put (key, value, options, callback) {
|
||||
this[kParent].put(key, value, options, callback)
|
||||
}
|
||||
|
||||
_get (key, options, callback) {
|
||||
this[kParent].get(key, options, callback)
|
||||
}
|
||||
|
||||
_getMany (keys, options, callback) {
|
||||
this[kParent].getMany(keys, options, callback)
|
||||
}
|
||||
|
||||
_del (key, options, callback) {
|
||||
this[kParent].del(key, options, callback)
|
||||
}
|
||||
|
||||
_batch (operations, options, callback) {
|
||||
this[kParent].batch(operations, options, callback)
|
||||
}
|
||||
|
||||
_clear (options, callback) {
|
||||
// TODO (refactor): move to AbstractLevel
|
||||
this[kPrefixRange](options, options.keyEncoding)
|
||||
this[kParent].clear(options, callback)
|
||||
}
|
||||
|
||||
_iterator (options) {
|
||||
// TODO (refactor): move to AbstractLevel
|
||||
this[kPrefixRange](options, options.keyEncoding)
|
||||
const iterator = this[kParent].iterator(options)
|
||||
const unfix = this[kUnfix].get(this[kPrefix].utf8.length, options.keyEncoding)
|
||||
return new AbstractSublevelIterator(this, options, iterator, unfix)
|
||||
}
|
||||
|
||||
_keys (options) {
|
||||
this[kPrefixRange](options, options.keyEncoding)
|
||||
const iterator = this[kParent].keys(options)
|
||||
const unfix = this[kUnfix].get(this[kPrefix].utf8.length, options.keyEncoding)
|
||||
return new AbstractSublevelKeyIterator(this, options, iterator, unfix)
|
||||
}
|
||||
|
||||
_values (options) {
|
||||
this[kPrefixRange](options, options.keyEncoding)
|
||||
const iterator = this[kParent].values(options)
|
||||
return new AbstractSublevelValueIterator(this, options, iterator)
|
||||
}
|
||||
}
|
||||
|
||||
return { AbstractSublevel }
|
||||
}
|
||||
|
||||
const mergeManifests = function (parent, manifest) {
|
||||
return {
|
||||
// Inherit manifest of parent db
|
||||
...parent.supports,
|
||||
|
||||
// Disable unsupported features
|
||||
createIfMissing: false,
|
||||
errorIfExists: false,
|
||||
|
||||
// Unset additional events because we're not forwarding them
|
||||
events: {},
|
||||
|
||||
// Unset additional methods (like approximateSize) which we can't support here unless
|
||||
// the AbstractSublevel class is overridden by an implementation of `abstract-level`.
|
||||
additionalMethods: {},
|
||||
|
||||
// Inherit manifest of custom AbstractSublevel subclass. Such a class is not
|
||||
// allowed to override encodings.
|
||||
...manifest,
|
||||
|
||||
encodings: {
|
||||
utf8: supportsEncoding(parent, 'utf8'),
|
||||
buffer: supportsEncoding(parent, 'buffer'),
|
||||
view: supportsEncoding(parent, 'view')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const supportsEncoding = function (parent, encoding) {
|
||||
// Prefer a non-transcoded encoding for optimal performance
|
||||
return parent.supports.encodings[encoding]
|
||||
? parent.keyEncoding(encoding).name === encoding
|
||||
: false
|
||||
}
|
||||
|
||||
class MultiFormat {
|
||||
constructor (key) {
|
||||
this.utf8 = key
|
||||
this.view = textEncoder.encode(key)
|
||||
this.buffer = Buffer ? Buffer.from(this.view.buffer, 0, this.view.byteLength) : {}
|
||||
}
|
||||
}
|
||||
|
||||
class Unfixer {
|
||||
constructor () {
|
||||
this.cache = new Map()
|
||||
}
|
||||
|
||||
get (prefixLength, keyFormat) {
|
||||
let unfix = this.cache.get(keyFormat)
|
||||
|
||||
if (unfix === undefined) {
|
||||
if (keyFormat === 'view') {
|
||||
unfix = function (prefixLength, key) {
|
||||
// Avoid Uint8Array#slice() because it copies
|
||||
return key.subarray(prefixLength)
|
||||
}.bind(null, prefixLength)
|
||||
} else {
|
||||
unfix = function (prefixLength, key) {
|
||||
// Avoid Buffer#subarray() because it's slow
|
||||
return key.slice(prefixLength)
|
||||
}.bind(null, prefixLength)
|
||||
}
|
||||
|
||||
this.cache.set(keyFormat, unfix)
|
||||
}
|
||||
|
||||
return unfix
|
||||
}
|
||||
}
|
||||
|
||||
const trim = function (str, char) {
|
||||
let start = 0
|
||||
let end = str.length
|
||||
|
||||
while (start < end && str[start] === char) start++
|
||||
while (end > start && str[end - 1] === char) end--
|
||||
|
||||
return str.slice(start, end)
|
||||
}
|
17
node_modules/abstract-level/lib/common.js
generated
vendored
Normal file
17
node_modules/abstract-level/lib/common.js
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
'use strict'
|
||||
|
||||
exports.getCallback = function (options, callback) {
|
||||
return typeof options === 'function' ? options : callback
|
||||
}
|
||||
|
||||
exports.getOptions = function (options, def) {
|
||||
if (typeof options === 'object' && options !== null) {
|
||||
return options
|
||||
}
|
||||
|
||||
if (def !== undefined) {
|
||||
return def
|
||||
}
|
||||
|
||||
return {}
|
||||
}
|
41
node_modules/abstract-level/lib/default-chained-batch.js
generated
vendored
Normal file
41
node_modules/abstract-level/lib/default-chained-batch.js
generated
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
'use strict'
|
||||
|
||||
const { AbstractChainedBatch } = require('../abstract-chained-batch')
|
||||
const ModuleError = require('module-error')
|
||||
const kEncoded = Symbol('encoded')
|
||||
|
||||
// Functional default for chained batch, with support of deferred open
|
||||
class DefaultChainedBatch extends AbstractChainedBatch {
|
||||
constructor (db) {
|
||||
super(db)
|
||||
this[kEncoded] = []
|
||||
}
|
||||
|
||||
_put (key, value, options) {
|
||||
this[kEncoded].push({ ...options, type: 'put', key, value })
|
||||
}
|
||||
|
||||
_del (key, options) {
|
||||
this[kEncoded].push({ ...options, type: 'del', key })
|
||||
}
|
||||
|
||||
_clear () {
|
||||
this[kEncoded] = []
|
||||
}
|
||||
|
||||
// Assumes this[kEncoded] cannot change after write()
|
||||
_write (options, callback) {
|
||||
if (this.db.status === 'opening') {
|
||||
this.db.defer(() => this._write(options, callback))
|
||||
} else if (this.db.status === 'open') {
|
||||
if (this[kEncoded].length === 0) this.nextTick(callback)
|
||||
else this.db._batch(this[kEncoded], options, callback)
|
||||
} else {
|
||||
this.nextTick(callback, new ModuleError('Batch is not open: cannot call write() after write() or close()', {
|
||||
code: 'LEVEL_BATCH_NOT_OPEN'
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.DefaultChainedBatch = DefaultChainedBatch
|
72
node_modules/abstract-level/lib/default-kv-iterator.js
generated
vendored
Normal file
72
node_modules/abstract-level/lib/default-kv-iterator.js
generated
vendored
Normal file
@ -0,0 +1,72 @@
|
||||
'use strict'
|
||||
|
||||
const { AbstractKeyIterator, AbstractValueIterator } = require('../abstract-iterator')
|
||||
|
||||
const kIterator = Symbol('iterator')
|
||||
const kCallback = Symbol('callback')
|
||||
const kHandleOne = Symbol('handleOne')
|
||||
const kHandleMany = Symbol('handleMany')
|
||||
|
||||
class DefaultKeyIterator extends AbstractKeyIterator {
|
||||
constructor (db, options) {
|
||||
super(db, options)
|
||||
|
||||
this[kIterator] = db.iterator({ ...options, keys: true, values: false })
|
||||
this[kHandleOne] = this[kHandleOne].bind(this)
|
||||
this[kHandleMany] = this[kHandleMany].bind(this)
|
||||
}
|
||||
}
|
||||
|
||||
class DefaultValueIterator extends AbstractValueIterator {
|
||||
constructor (db, options) {
|
||||
super(db, options)
|
||||
|
||||
this[kIterator] = db.iterator({ ...options, keys: false, values: true })
|
||||
this[kHandleOne] = this[kHandleOne].bind(this)
|
||||
this[kHandleMany] = this[kHandleMany].bind(this)
|
||||
}
|
||||
}
|
||||
|
||||
for (const Iterator of [DefaultKeyIterator, DefaultValueIterator]) {
|
||||
const keys = Iterator === DefaultKeyIterator
|
||||
const mapEntry = keys ? (entry) => entry[0] : (entry) => entry[1]
|
||||
|
||||
Iterator.prototype._next = function (callback) {
|
||||
this[kCallback] = callback
|
||||
this[kIterator].next(this[kHandleOne])
|
||||
}
|
||||
|
||||
Iterator.prototype[kHandleOne] = function (err, key, value) {
|
||||
const callback = this[kCallback]
|
||||
if (err) callback(err)
|
||||
else callback(null, keys ? key : value)
|
||||
}
|
||||
|
||||
Iterator.prototype._nextv = function (size, options, callback) {
|
||||
this[kCallback] = callback
|
||||
this[kIterator].nextv(size, options, this[kHandleMany])
|
||||
}
|
||||
|
||||
Iterator.prototype._all = function (options, callback) {
|
||||
this[kCallback] = callback
|
||||
this[kIterator].all(options, this[kHandleMany])
|
||||
}
|
||||
|
||||
Iterator.prototype[kHandleMany] = function (err, entries) {
|
||||
const callback = this[kCallback]
|
||||
if (err) callback(err)
|
||||
else callback(null, entries.map(mapEntry))
|
||||
}
|
||||
|
||||
Iterator.prototype._seek = function (target, options) {
|
||||
this[kIterator].seek(target, options)
|
||||
}
|
||||
|
||||
Iterator.prototype._close = function (callback) {
|
||||
this[kIterator].close(callback)
|
||||
}
|
||||
}
|
||||
|
||||
// Internal utilities, should be typed as AbstractKeyIterator and AbstractValueIterator
|
||||
exports.DefaultKeyIterator = DefaultKeyIterator
|
||||
exports.DefaultValueIterator = DefaultValueIterator
|
108
node_modules/abstract-level/lib/deferred-iterator.js
generated
vendored
Normal file
108
node_modules/abstract-level/lib/deferred-iterator.js
generated
vendored
Normal file
@ -0,0 +1,108 @@
|
||||
'use strict'
|
||||
|
||||
const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../abstract-iterator')
|
||||
const ModuleError = require('module-error')
|
||||
|
||||
const kNut = Symbol('nut')
|
||||
const kUndefer = Symbol('undefer')
|
||||
const kFactory = Symbol('factory')
|
||||
|
||||
class DeferredIterator extends AbstractIterator {
|
||||
constructor (db, options) {
|
||||
super(db, options)
|
||||
|
||||
this[kNut] = null
|
||||
this[kFactory] = () => db.iterator(options)
|
||||
|
||||
this.db.defer(() => this[kUndefer]())
|
||||
}
|
||||
}
|
||||
|
||||
class DeferredKeyIterator extends AbstractKeyIterator {
|
||||
constructor (db, options) {
|
||||
super(db, options)
|
||||
|
||||
this[kNut] = null
|
||||
this[kFactory] = () => db.keys(options)
|
||||
|
||||
this.db.defer(() => this[kUndefer]())
|
||||
}
|
||||
}
|
||||
|
||||
class DeferredValueIterator extends AbstractValueIterator {
|
||||
constructor (db, options) {
|
||||
super(db, options)
|
||||
|
||||
this[kNut] = null
|
||||
this[kFactory] = () => db.values(options)
|
||||
|
||||
this.db.defer(() => this[kUndefer]())
|
||||
}
|
||||
}
|
||||
|
||||
for (const Iterator of [DeferredIterator, DeferredKeyIterator, DeferredValueIterator]) {
|
||||
Iterator.prototype[kUndefer] = function () {
|
||||
if (this.db.status === 'open') {
|
||||
this[kNut] = this[kFactory]()
|
||||
}
|
||||
}
|
||||
|
||||
Iterator.prototype._next = function (callback) {
|
||||
if (this[kNut] !== null) {
|
||||
this[kNut].next(callback)
|
||||
} else if (this.db.status === 'opening') {
|
||||
this.db.defer(() => this._next(callback))
|
||||
} else {
|
||||
this.nextTick(callback, new ModuleError('Iterator is not open: cannot call next() after close()', {
|
||||
code: 'LEVEL_ITERATOR_NOT_OPEN'
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
Iterator.prototype._nextv = function (size, options, callback) {
|
||||
if (this[kNut] !== null) {
|
||||
this[kNut].nextv(size, options, callback)
|
||||
} else if (this.db.status === 'opening') {
|
||||
this.db.defer(() => this._nextv(size, options, callback))
|
||||
} else {
|
||||
this.nextTick(callback, new ModuleError('Iterator is not open: cannot call nextv() after close()', {
|
||||
code: 'LEVEL_ITERATOR_NOT_OPEN'
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
Iterator.prototype._all = function (options, callback) {
|
||||
if (this[kNut] !== null) {
|
||||
this[kNut].all(callback)
|
||||
} else if (this.db.status === 'opening') {
|
||||
this.db.defer(() => this._all(options, callback))
|
||||
} else {
|
||||
this.nextTick(callback, new ModuleError('Iterator is not open: cannot call all() after close()', {
|
||||
code: 'LEVEL_ITERATOR_NOT_OPEN'
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
Iterator.prototype._seek = function (target, options) {
|
||||
if (this[kNut] !== null) {
|
||||
// TODO: explain why we need _seek() rather than seek() here
|
||||
this[kNut]._seek(target, options)
|
||||
} else if (this.db.status === 'opening') {
|
||||
this.db.defer(() => this._seek(target, options))
|
||||
}
|
||||
}
|
||||
|
||||
Iterator.prototype._close = function (callback) {
|
||||
if (this[kNut] !== null) {
|
||||
this[kNut].close(callback)
|
||||
} else if (this.db.status === 'opening') {
|
||||
this.db.defer(() => this._close(callback))
|
||||
} else {
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.DeferredIterator = DeferredIterator
|
||||
exports.DeferredKeyIterator = DeferredKeyIterator
|
||||
exports.DeferredValueIterator = DeferredValueIterator
|
11
node_modules/abstract-level/lib/next-tick-browser.js
generated
vendored
Normal file
11
node_modules/abstract-level/lib/next-tick-browser.js
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
'use strict'
|
||||
|
||||
const queueMicrotask = require('queue-microtask')
|
||||
|
||||
module.exports = function (fn, ...args) {
|
||||
if (args.length === 0) {
|
||||
queueMicrotask(fn)
|
||||
} else {
|
||||
queueMicrotask(() => fn(...args))
|
||||
}
|
||||
}
|
3
node_modules/abstract-level/lib/next-tick.js
generated
vendored
Normal file
3
node_modules/abstract-level/lib/next-tick.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = process.nextTick
|
38
node_modules/abstract-level/lib/range-options.js
generated
vendored
Normal file
38
node_modules/abstract-level/lib/range-options.js
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
'use strict'
|
||||
|
||||
const ModuleError = require('module-error')
|
||||
const hasOwnProperty = Object.prototype.hasOwnProperty
|
||||
const rangeOptions = new Set(['lt', 'lte', 'gt', 'gte'])
|
||||
|
||||
module.exports = function (options, keyEncoding) {
|
||||
const result = {}
|
||||
|
||||
for (const k in options) {
|
||||
if (!hasOwnProperty.call(options, k)) continue
|
||||
if (k === 'keyEncoding' || k === 'valueEncoding') continue
|
||||
|
||||
if (k === 'start' || k === 'end') {
|
||||
throw new ModuleError(`The legacy range option '${k}' has been removed`, {
|
||||
code: 'LEVEL_LEGACY'
|
||||
})
|
||||
} else if (k === 'encoding') {
|
||||
// To help migrating to abstract-level
|
||||
throw new ModuleError("The levelup-style 'encoding' alias has been removed, use 'valueEncoding' instead", {
|
||||
code: 'LEVEL_LEGACY'
|
||||
})
|
||||
}
|
||||
|
||||
if (rangeOptions.has(k)) {
|
||||
// Note that we don't reject nullish and empty options here. While
|
||||
// those types are invalid as keys, they are valid as range options.
|
||||
result[k] = keyEncoding.encode(options[k])
|
||||
} else {
|
||||
result[k] = options[k]
|
||||
}
|
||||
}
|
||||
|
||||
result.reverse = !!result.reverse
|
||||
result.limit = Number.isInteger(result.limit) && result.limit >= 0 ? result.limit : -1
|
||||
|
||||
return result
|
||||
}
|
64
node_modules/abstract-level/package.json
generated
vendored
Normal file
64
node_modules/abstract-level/package.json
generated
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
{
|
||||
"name": "abstract-level",
|
||||
"version": "1.0.4",
|
||||
"description": "Abstract class for a lexicographically sorted key-value database",
|
||||
"license": "MIT",
|
||||
"main": "index.js",
|
||||
"browser": {
|
||||
"./lib/next-tick.js": "./lib/next-tick-browser.js"
|
||||
},
|
||||
"types": "./index.d.ts",
|
||||
"scripts": {
|
||||
"test": "standard && ts-standard *.ts types/*.ts && hallmark && (nyc -s node test/self.js | faucet) && nyc report",
|
||||
"test-browsers": "airtap --coverage test/self.js",
|
||||
"coverage": "nyc report -r lcovonly"
|
||||
},
|
||||
"files": [
|
||||
"abstract-chained-batch.js",
|
||||
"abstract-iterator.js",
|
||||
"abstract-level.js",
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"lib",
|
||||
"test",
|
||||
"types",
|
||||
"CHANGELOG.md",
|
||||
"UPGRADING.md"
|
||||
],
|
||||
"dependencies": {
|
||||
"buffer": "^6.0.3",
|
||||
"catering": "^2.1.0",
|
||||
"is-buffer": "^2.0.5",
|
||||
"level-supports": "^4.0.0",
|
||||
"level-transcoder": "^1.0.1",
|
||||
"module-error": "^1.0.1",
|
||||
"queue-microtask": "^1.2.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^18.0.0",
|
||||
"@voxpelli/tsconfig": "^4.0.0",
|
||||
"airtap": "^4.0.4",
|
||||
"airtap-playwright": "^1.0.1",
|
||||
"faucet": "^0.0.3",
|
||||
"hallmark": "^4.0.0",
|
||||
"nyc": "^15.1.0",
|
||||
"sinon": "^14.0.0",
|
||||
"standard": "^16.0.4",
|
||||
"tape": "^5.4.0",
|
||||
"ts-standard": "^11.0.0",
|
||||
"typescript": "^4.5.5"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Level/abstract-level.git"
|
||||
},
|
||||
"homepage": "https://github.com/Level/abstract-level",
|
||||
"keywords": [
|
||||
"abstract-level",
|
||||
"level",
|
||||
"leveldb"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
}
|
143
node_modules/abstract-level/test/async-iterator-test.js
generated
vendored
Normal file
143
node_modules/abstract-level/test/async-iterator-test.js
generated
vendored
Normal file
@ -0,0 +1,143 @@
|
||||
'use strict'
|
||||
|
||||
const input = [{ key: '1', value: '1' }, { key: '2', value: '2' }]
|
||||
|
||||
let db
|
||||
|
||||
exports.setup = function (test, testCommon) {
|
||||
test('setup', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
db = testCommon.factory()
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open() error')
|
||||
|
||||
db.batch(input.map(entry => ({ ...entry, type: 'put' })), function (err) {
|
||||
t.ifError(err, 'no batch() error')
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
exports.asyncIterator = function (test, testCommon) {
|
||||
for (const mode of ['iterator', 'keys', 'values']) {
|
||||
test(`for await...of ${mode}()`, async function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const it = db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
|
||||
const output = []
|
||||
|
||||
for await (const item of it) {
|
||||
output.push(item)
|
||||
}
|
||||
|
||||
t.same(output, input.map(({ key, value }) => {
|
||||
return mode === 'iterator' ? [key, value] : mode === 'keys' ? key : value
|
||||
}))
|
||||
})
|
||||
|
||||
testCommon.supports.permanence && test(`for await...of ${mode}() (deferred)`, async function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.batch(input.map(entry => ({ ...entry, type: 'put' })))
|
||||
await db.close()
|
||||
|
||||
// Don't await
|
||||
db.open()
|
||||
|
||||
const it = db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
|
||||
const output = []
|
||||
|
||||
for await (const item of it) {
|
||||
output.push(item)
|
||||
}
|
||||
|
||||
t.same(output, input.map(({ key, value }) => {
|
||||
return mode === 'iterator' ? [key, value] : mode === 'keys' ? key : value
|
||||
}))
|
||||
|
||||
await db.close()
|
||||
})
|
||||
|
||||
testCommon.supports.snapshots && test(`for await...of ${mode}() (deferred, with snapshot)`, async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const db = testCommon.factory()
|
||||
const it = db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
|
||||
const promise = db.batch(input.map(entry => ({ ...entry, type: 'put' })))
|
||||
const output = []
|
||||
|
||||
for await (const item of it) {
|
||||
output.push(item)
|
||||
}
|
||||
|
||||
t.same(output, [], 'used snapshot')
|
||||
|
||||
// Wait for data to be written
|
||||
await promise
|
||||
|
||||
for await (const item of db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' })) {
|
||||
output.push(item)
|
||||
}
|
||||
|
||||
t.same(output, input.map(({ key, value }) => {
|
||||
return mode === 'iterator' ? [key, value] : mode === 'keys' ? key : value
|
||||
}))
|
||||
|
||||
await db.close()
|
||||
})
|
||||
|
||||
for (const deferred of [false, true]) {
|
||||
test(`for await...of ${mode}() (empty, deferred: ${deferred})`, async function (t) {
|
||||
const db = testCommon.factory()
|
||||
const entries = []
|
||||
|
||||
if (!deferred) await db.open()
|
||||
|
||||
for await (const item of db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' })) {
|
||||
entries.push(item)
|
||||
}
|
||||
|
||||
t.same(entries, [])
|
||||
await db.close()
|
||||
})
|
||||
}
|
||||
|
||||
test(`for await...of ${mode}() does not permit reuse`, async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
const it = db[mode]()
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
for await (const item of it) {
|
||||
t.pass('nexted')
|
||||
}
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
for await (const item of it) {
|
||||
t.fail('should not be called')
|
||||
}
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
exports.teardown = function (test, testCommon) {
|
||||
test('teardown', function (t) {
|
||||
t.plan(1)
|
||||
|
||||
db.close(function (err) {
|
||||
t.ifError(err, 'no close() error')
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.setup(test, testCommon)
|
||||
exports.asyncIterator(test, testCommon)
|
||||
exports.teardown(test, testCommon)
|
||||
}
|
320
node_modules/abstract-level/test/batch-test.js
generated
vendored
Normal file
320
node_modules/abstract-level/test/batch-test.js
generated
vendored
Normal file
@ -0,0 +1,320 @@
|
||||
'use strict'
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const { verifyNotFoundError, assertAsync } = require('./util')
|
||||
const { illegalKeys, illegalValues } = require('./util')
|
||||
|
||||
let db
|
||||
|
||||
exports.setUp = function (test, testCommon) {
|
||||
test('setUp db', function (t) {
|
||||
db = testCommon.factory()
|
||||
db.open(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.args = function (test, testCommon) {
|
||||
test('test batch() with missing `value`', assertAsync.ctx(function (t) {
|
||||
t.plan(3)
|
||||
|
||||
db.batch([{ type: 'put', key: 'foo1' }], assertAsync(function (err) {
|
||||
t.is(err && err.code, 'LEVEL_INVALID_VALUE', 'correct error code (callback)')
|
||||
}))
|
||||
|
||||
db.batch([{ type: 'put', key: 'foo1' }]).catch((err) => {
|
||||
t.is(err.code, 'LEVEL_INVALID_VALUE', 'correct error code (promise)')
|
||||
})
|
||||
}))
|
||||
|
||||
test('test batch() with illegal values', assertAsync.ctx(function (t) {
|
||||
t.plan(illegalValues.length * 6)
|
||||
|
||||
for (const { name, value } of illegalValues) {
|
||||
db.batch([{ type: 'put', key: 'foo1', value }], assertAsync(function (err) {
|
||||
t.ok(err, name + ' - has error (callback)')
|
||||
t.ok(err instanceof Error, name + ' - is Error (callback)')
|
||||
t.is(err && err.code, 'LEVEL_INVALID_VALUE', 'correct error code (callback)')
|
||||
}))
|
||||
|
||||
db.batch([{ type: 'put', key: 'foo1', value }]).catch(function (err) {
|
||||
t.ok(err instanceof Error, name + ' - is Error (promise)')
|
||||
t.is(err.code, 'LEVEL_INVALID_VALUE', name + ' - correct error code (promise)')
|
||||
})
|
||||
}
|
||||
}))
|
||||
|
||||
test('test batch() with missing `key`', assertAsync.ctx(function (t) {
|
||||
t.plan(3)
|
||||
|
||||
db.batch([{ type: 'put', value: 'foo1' }], assertAsync(function (err) {
|
||||
t.is(err && err.code, 'LEVEL_INVALID_KEY', 'correct error code (callback)')
|
||||
}))
|
||||
|
||||
db.batch([{ type: 'put', value: 'foo1' }]).catch(function (err) {
|
||||
t.is(err.code, 'LEVEL_INVALID_KEY', 'correct error code (promise)')
|
||||
})
|
||||
}))
|
||||
|
||||
test('test batch() with illegal keys', assertAsync.ctx(function (t) {
|
||||
t.plan(illegalKeys.length * 6)
|
||||
|
||||
for (const { name, key } of illegalKeys) {
|
||||
db.batch([{ type: 'put', key, value: 'foo1' }], assertAsync(function (err) {
|
||||
t.ok(err, name + ' - has error (callback)')
|
||||
t.ok(err instanceof Error, name + ' - is Error (callback)')
|
||||
t.is(err && err.code, 'LEVEL_INVALID_KEY', 'correct error code (callback)')
|
||||
}))
|
||||
|
||||
db.batch([{ type: 'put', key, value: 'foo1' }]).catch(function (err) {
|
||||
t.ok(err instanceof Error, name + ' - is Error (promise)')
|
||||
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (promise)')
|
||||
})
|
||||
}
|
||||
}))
|
||||
|
||||
test('test batch() with missing or incorrect type', assertAsync.ctx(function (t) {
|
||||
t.plan(10)
|
||||
|
||||
db.batch([{ key: 'key', value: 'value' }], assertAsync(function (err) {
|
||||
t.is(err && err.name, 'TypeError')
|
||||
t.is(err && err.message, "A batch operation must have a type property that is 'put' or 'del'", 'correct error message (callback)')
|
||||
}))
|
||||
|
||||
db.batch([{ key: 'key', value: 'value', type: 'foo' }], assertAsync(function (err) {
|
||||
t.is(err && err.name, 'TypeError')
|
||||
t.is(err && err.message, "A batch operation must have a type property that is 'put' or 'del'", 'correct error message (callback)')
|
||||
}))
|
||||
|
||||
db.batch([{ key: 'key', value: 'value' }]).catch(function (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err.message, "A batch operation must have a type property that is 'put' or 'del'", 'correct error message (promise)')
|
||||
})
|
||||
|
||||
db.batch([{ key: 'key', value: 'value', type: 'foo' }]).catch(function (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err.message, "A batch operation must have a type property that is 'put' or 'del'", 'correct error message (promise)')
|
||||
})
|
||||
}))
|
||||
|
||||
test('test batch() with missing or nullish operations', assertAsync.ctx(function (t) {
|
||||
t.plan(13)
|
||||
|
||||
db.batch(assertAsync(function (err) {
|
||||
t.is(err && err.name, 'TypeError')
|
||||
t.is(err && err.message, "The first argument 'operations' must be an array", 'correct error message (callback)')
|
||||
}))
|
||||
|
||||
for (const array of [null, undefined]) {
|
||||
db.batch(array, assertAsync(function (err) {
|
||||
t.is(err && err.name, 'TypeError')
|
||||
t.is(err && err.message, "The first argument 'operations' must be an array", 'correct error message (callback)')
|
||||
}))
|
||||
|
||||
db.batch(array).catch(function (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err.message, "The first argument 'operations' must be an array", 'correct error message (promise)')
|
||||
})
|
||||
}
|
||||
}))
|
||||
|
||||
test('test batch() with null options', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
db.batch([], null, function (err) {
|
||||
t.error(err)
|
||||
})
|
||||
|
||||
db.batch([], null).then(function () {
|
||||
t.pass('resolved')
|
||||
}).catch(t.fail.bind(t))
|
||||
})
|
||||
|
||||
;[null, undefined, 1, true].forEach(function (operation) {
|
||||
const type = operation === null ? 'null' : typeof operation
|
||||
|
||||
test('test batch() with ' + type + ' operation', assertAsync.ctx(function (t) {
|
||||
t.plan(5)
|
||||
|
||||
db.batch([operation], assertAsync(function (err) {
|
||||
t.is(err && err.name, 'TypeError')
|
||||
t.is(err && err.message, 'A batch operation must be an object', 'correct error message (callback)')
|
||||
}))
|
||||
|
||||
db.batch([operation]).catch(function (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err.message, 'A batch operation must be an object', 'correct error message (promise)')
|
||||
})
|
||||
}))
|
||||
})
|
||||
|
||||
test('test batch() with empty array', assertAsync.ctx(function (t) {
|
||||
t.plan(3)
|
||||
|
||||
db.batch([], assertAsync(function (err) {
|
||||
t.error(err, 'no error from batch()')
|
||||
}))
|
||||
|
||||
db.batch([]).then(function () {
|
||||
t.pass('resolved')
|
||||
}).catch(t.fail.bind(t))
|
||||
}))
|
||||
}
|
||||
|
||||
exports.batch = function (test, testCommon) {
|
||||
test('test simple batch()', function (t) {
|
||||
db.batch([{ type: 'put', key: 'foo', value: 'bar' }], function (err) {
|
||||
t.error(err)
|
||||
|
||||
db.get('foo', function (err, value) {
|
||||
t.error(err)
|
||||
t.is(value, 'bar')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('test simple batch() with promise', async function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
await db.open()
|
||||
await db.batch([{ type: 'put', key: 'foo', value: 'bar' }])
|
||||
|
||||
t.is(await db.get('foo', { valueEncoding: 'utf8' }), 'bar')
|
||||
return db.close()
|
||||
})
|
||||
|
||||
test('test multiple batch()', function (t) {
|
||||
db.batch([
|
||||
{ type: 'put', key: 'foobatch1', value: 'bar1' },
|
||||
{ type: 'put', key: 'foobatch2', value: 'bar2' },
|
||||
{ type: 'put', key: 'foobatch3', value: 'bar3' },
|
||||
{ type: 'del', key: 'foobatch2' }
|
||||
], function (err) {
|
||||
t.error(err)
|
||||
|
||||
let r = 0
|
||||
const done = function () {
|
||||
if (++r === 3) { t.end() }
|
||||
}
|
||||
|
||||
db.get('foobatch1', function (err, value) {
|
||||
t.error(err)
|
||||
t.is(value, 'bar1')
|
||||
done()
|
||||
})
|
||||
|
||||
db.get('foobatch2', function (err, value) {
|
||||
t.ok(err, 'entry not found')
|
||||
t.ok(typeof value === 'undefined', 'value is undefined')
|
||||
t.ok(verifyNotFoundError(err), 'NotFound error')
|
||||
done()
|
||||
})
|
||||
|
||||
db.get('foobatch3', function (err, value) {
|
||||
t.error(err)
|
||||
t.is(value, 'bar3')
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
for (const encoding of ['utf8', 'buffer', 'view']) {
|
||||
if (!testCommon.supports.encodings[encoding]) continue
|
||||
|
||||
// NOTE: adapted from memdown
|
||||
test(`empty values in batch with ${encoding} valueEncoding`, async function (t) {
|
||||
const db = testCommon.factory({ valueEncoding: encoding })
|
||||
const values = ['', Uint8Array.from([]), Buffer.alloc(0)]
|
||||
const expected = encoding === 'utf8' ? values[0] : encoding === 'view' ? values[1] : values[2]
|
||||
|
||||
await db.open()
|
||||
await db.batch(values.map((value, i) => ({ type: 'put', key: String(i), value })))
|
||||
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
const value = await db.get(String(i))
|
||||
|
||||
// Buffer is a Uint8Array, so this is allowed
|
||||
if (encoding === 'view' && Buffer.isBuffer(value)) {
|
||||
t.same(value, values[2])
|
||||
} else {
|
||||
t.same(value, expected)
|
||||
}
|
||||
}
|
||||
|
||||
return db.close()
|
||||
})
|
||||
|
||||
test(`empty keys in batch with ${encoding} keyEncoding`, async function (t) {
|
||||
const db = testCommon.factory({ keyEncoding: encoding })
|
||||
const keys = ['', Uint8Array.from([]), Buffer.alloc(0)]
|
||||
|
||||
await db.open()
|
||||
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
await db.batch([{ type: 'put', key: keys[i], value: String(i) }])
|
||||
t.same(await db.get(keys[i]), String(i), `got value ${i}`)
|
||||
}
|
||||
|
||||
return db.close()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
exports.atomic = function (test, testCommon) {
|
||||
test('test batch() is atomic', function (t) {
|
||||
t.plan(4)
|
||||
|
||||
let async = false
|
||||
|
||||
db.batch([
|
||||
{ type: 'put', key: 'foobah1', value: 'bar1' },
|
||||
{ type: 'put', value: 'bar2' },
|
||||
{ type: 'put', key: 'foobah3', value: 'bar3' }
|
||||
], function (err) {
|
||||
t.ok(err, 'should error')
|
||||
t.ok(async, 'callback is asynchronous')
|
||||
|
||||
db.get('foobah1', function (err) {
|
||||
t.ok(err, 'should not be found')
|
||||
})
|
||||
db.get('foobah3', function (err) {
|
||||
t.ok(err, 'should not be found')
|
||||
})
|
||||
})
|
||||
|
||||
async = true
|
||||
})
|
||||
}
|
||||
|
||||
exports.events = function (test, testCommon) {
|
||||
test('test batch([]) (array-form) emits batch event', async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
|
||||
t.ok(db.supports.events.batch)
|
||||
|
||||
db.on('batch', function (ops) {
|
||||
t.same(ops, [{ type: 'put', key: 456, value: 99, custom: 123 }])
|
||||
})
|
||||
|
||||
await db.batch([{ type: 'put', key: 456, value: 99, custom: 123 }])
|
||||
await db.close()
|
||||
})
|
||||
}
|
||||
|
||||
exports.tearDown = function (test, testCommon) {
|
||||
test('tearDown', function (t) {
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.setUp(test, testCommon)
|
||||
exports.args(test, testCommon)
|
||||
exports.batch(test, testCommon)
|
||||
exports.atomic(test, testCommon)
|
||||
exports.events(test, testCommon)
|
||||
exports.tearDown(test, testCommon)
|
||||
}
|
295
node_modules/abstract-level/test/chained-batch-test.js
generated
vendored
Normal file
295
node_modules/abstract-level/test/chained-batch-test.js
generated
vendored
Normal file
@ -0,0 +1,295 @@
|
||||
'use strict'
|
||||
|
||||
let db
|
||||
|
||||
exports.setUp = function (test, testCommon) {
|
||||
test('setUp db', function (t) {
|
||||
db = testCommon.factory()
|
||||
db.open(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.args = function (test, testCommon) {
|
||||
test('test batch has db reference', function (t) {
|
||||
t.ok(db.batch().db === db)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('test batch#put() with missing, null or undefined `value`', function (t) {
|
||||
t.plan(3 * 2)
|
||||
|
||||
for (const args of [[null], [undefined], []]) {
|
||||
const batch = db.batch()
|
||||
|
||||
try {
|
||||
batch.put('key', ...args)
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_INVALID_VALUE', 'correct error code')
|
||||
t.is(batch.length, 0, 'length is not incremented on error')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
test('test batch#put() with missing, null or undefined `key`', function (t) {
|
||||
t.plan(3 * 2)
|
||||
|
||||
for (const args of [[], [null, 'foo'], [undefined, 'foo']]) {
|
||||
const batch = db.batch()
|
||||
|
||||
try {
|
||||
batch.put(...args)
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_INVALID_KEY', 'correct error code')
|
||||
t.is(batch.length, 0, 'length is not incremented on error')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
test('test batch#del() with missing, null or undefined `key`', function (t) {
|
||||
t.plan(3 * 2)
|
||||
|
||||
for (const args of [[null], [undefined], []]) {
|
||||
const batch = db.batch()
|
||||
|
||||
try {
|
||||
batch.del(...args)
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_INVALID_KEY', 'correct error code')
|
||||
t.is(batch.length, 0, 'length is not incremented on error')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
test('test batch#clear() doesn\'t throw', function (t) {
|
||||
db.batch().clear()
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('test batch#put() after write()', function (t) {
|
||||
const batch = db.batch().put('foo', 'bar')
|
||||
batch.write(function () {})
|
||||
try {
|
||||
batch.put('boom', 'bang')
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_BATCH_NOT_OPEN', 'correct error code')
|
||||
return t.end()
|
||||
}
|
||||
t.fail('should have thrown')
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('test batch#del() after write()', function (t) {
|
||||
const batch = db.batch().put('foo', 'bar')
|
||||
batch.write(function () {})
|
||||
try {
|
||||
batch.del('foo')
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_BATCH_NOT_OPEN', 'correct error code')
|
||||
return t.end()
|
||||
}
|
||||
t.fail('should have thrown')
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('test batch#clear() after write()', function (t) {
|
||||
const batch = db.batch().put('foo', 'bar')
|
||||
batch.write(function () {})
|
||||
try {
|
||||
batch.clear()
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_BATCH_NOT_OPEN', 'correct error code')
|
||||
return t.end()
|
||||
}
|
||||
t.fail('should have thrown')
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('test batch#write() after write()', function (t) {
|
||||
t.plan(1)
|
||||
const batch = db.batch().put('foo', 'bar')
|
||||
batch.write(function () {})
|
||||
batch.write(function (err) {
|
||||
t.is(err && err.code, 'LEVEL_BATCH_NOT_OPEN', 'correct error code')
|
||||
})
|
||||
})
|
||||
|
||||
test('test batch#write() with no operations', function (t) {
|
||||
let async = false
|
||||
|
||||
db.batch().write(function (err) {
|
||||
t.ifError(err, 'no error from write()')
|
||||
t.ok(async, 'callback is asynchronous')
|
||||
t.end()
|
||||
})
|
||||
|
||||
async = true
|
||||
})
|
||||
|
||||
test('test batch#write() with promise and no operations', function (t) {
|
||||
db.batch().write()
|
||||
.then(t.end.bind(t))
|
||||
.catch(t.end.bind(t))
|
||||
})
|
||||
|
||||
test('test twice batch#close() is idempotent', function (t) {
|
||||
const batch = db.batch()
|
||||
batch.close(function () {
|
||||
let async = false
|
||||
|
||||
batch.close(function () {
|
||||
t.ok(async, 'callback is asynchronous')
|
||||
t.end()
|
||||
})
|
||||
|
||||
async = true
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
exports.batch = function (test, testCommon) {
|
||||
test('test basic batch', function (t) {
|
||||
db.batch([
|
||||
{ type: 'put', key: 'one', value: '1' },
|
||||
{ type: 'put', key: 'two', value: '2' },
|
||||
{ type: 'put', key: 'three', value: '3' }
|
||||
], function (err) {
|
||||
t.error(err)
|
||||
|
||||
const batch = db.batch()
|
||||
.put('1', 'one')
|
||||
.del('2', 'two')
|
||||
.put('3', 'three')
|
||||
|
||||
t.is(batch.length, 3, 'length was incremented')
|
||||
|
||||
batch.clear()
|
||||
t.is(batch.length, 0, 'length is reset')
|
||||
|
||||
batch.put('one', 'I')
|
||||
.put('two', 'II')
|
||||
.del('three')
|
||||
.put('foo', 'bar')
|
||||
|
||||
t.is(batch.length, 4, 'length was incremented')
|
||||
|
||||
batch.write(function (err) {
|
||||
t.error(err, 'no write() error')
|
||||
|
||||
db.iterator({ keyEncoding: 'utf8', valueEncoding: 'utf8' }).all(function (err, entries) {
|
||||
t.error(err)
|
||||
t.same(entries, [
|
||||
['foo', 'bar'],
|
||||
['one', 'I'],
|
||||
['two', 'II']
|
||||
])
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('test basic batch with promise', function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.error(err)
|
||||
|
||||
db.batch()
|
||||
.put('1', 'one')
|
||||
.put('2', 'two')
|
||||
.put('3', 'three')
|
||||
.write().then(function () {
|
||||
db.iterator({ keyEncoding: 'utf8', valueEncoding: 'utf8' }).all(function (err, entries) {
|
||||
t.error(err)
|
||||
t.same(entries, [
|
||||
['1', 'one'],
|
||||
['2', 'two'],
|
||||
['3', 'three']
|
||||
])
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
}).catch(t.fail.bind(t))
|
||||
})
|
||||
})
|
||||
|
||||
// NOTE: adapted from levelup
|
||||
test('chained batch with per-operation encoding options', async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
|
||||
db.once('batch', function (operations) {
|
||||
t.same(operations, [
|
||||
{ type: 'put', key: 'a', value: 'a', valueEncoding: 'json' },
|
||||
{ type: 'put', key: 'b', value: 'b' },
|
||||
{ type: 'put', key: '"c"', value: 'c' },
|
||||
{ type: 'del', key: 'c', keyEncoding: 'json', arbitraryOption: true }
|
||||
])
|
||||
})
|
||||
|
||||
await db.batch()
|
||||
.put('a', 'a', { valueEncoding: 'json' })
|
||||
.put('b', 'b')
|
||||
.put('"c"', 'c')
|
||||
.del('c', { keyEncoding: 'json', arbitraryOption: true })
|
||||
.write()
|
||||
|
||||
t.same(await db.iterator().all(), [
|
||||
['a', '"a"'],
|
||||
['b', 'b']
|
||||
])
|
||||
|
||||
return db.close()
|
||||
})
|
||||
}
|
||||
|
||||
exports.events = function (test, testCommon) {
|
||||
test('test chained batch() emits batch event', async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
|
||||
t.ok(db.supports.events.batch)
|
||||
|
||||
db.on('batch', function (ops) {
|
||||
t.same(ops, [
|
||||
{ type: 'put', key: 987, value: 'b', custom: 123 },
|
||||
{ type: 'del', key: 216, custom: 999 }
|
||||
])
|
||||
})
|
||||
|
||||
await db.batch().put(987, 'b', { custom: 123 }).del(216, { custom: 999 }).write()
|
||||
await db.close()
|
||||
})
|
||||
|
||||
test('test close() on chained batch event', async function () {
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
|
||||
let promise
|
||||
|
||||
db.on('batch', function () {
|
||||
// Should not interfere with the current write() operation
|
||||
promise = db.close()
|
||||
})
|
||||
|
||||
await db.batch().put('a', 'b').write()
|
||||
await promise
|
||||
})
|
||||
}
|
||||
|
||||
exports.tearDown = function (test, testCommon) {
|
||||
test('tearDown', function (t) {
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.setUp(test, testCommon)
|
||||
exports.args(test, testCommon)
|
||||
exports.batch(test, testCommon)
|
||||
exports.events(test, testCommon)
|
||||
exports.tearDown(test, testCommon)
|
||||
}
|
274
node_modules/abstract-level/test/clear-range-test.js
generated
vendored
Normal file
274
node_modules/abstract-level/test/clear-range-test.js
generated
vendored
Normal file
@ -0,0 +1,274 @@
|
||||
'use strict'
|
||||
|
||||
const data = (function () {
|
||||
const d = []
|
||||
let i = 0
|
||||
let k
|
||||
for (; i < 100; i++) {
|
||||
k = (i < 10 ? '0' : '') + i
|
||||
d.push({
|
||||
key: k,
|
||||
value: String(Math.random())
|
||||
})
|
||||
}
|
||||
return d
|
||||
}())
|
||||
|
||||
exports.range = function (test, testCommon) {
|
||||
function rangeTest (name, opts, expected) {
|
||||
test('db#clear() with ' + name, function (t) {
|
||||
prepare(t, function (db) {
|
||||
db.clear(opts, function (err) {
|
||||
t.ifError(err, 'no clear error')
|
||||
verify(t, db, expected)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function prepare (t, callback) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open error')
|
||||
|
||||
db.batch(data.map(function (d) {
|
||||
return {
|
||||
type: 'put',
|
||||
key: d.key,
|
||||
value: d.value
|
||||
}
|
||||
}), function (err) {
|
||||
t.ifError(err, 'no batch error')
|
||||
callback(db)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function verify (t, db, expected) {
|
||||
const it = db.iterator({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
|
||||
|
||||
it.all(function (err, entries) {
|
||||
t.ifError(err, 'no all() error')
|
||||
t.is(entries.length, expected.length, 'correct number of entries')
|
||||
t.same(entries, expected.map(kv => [kv.key, kv.value]))
|
||||
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
function exclude (data, start, end, expectedLength) {
|
||||
data = data.slice()
|
||||
const removed = data.splice(start, end - start + 1) // Inclusive
|
||||
if (expectedLength != null) checkLength(removed, expectedLength)
|
||||
return data
|
||||
}
|
||||
|
||||
// For sanity checks on test arguments
|
||||
function checkLength (arr, length) {
|
||||
if (arr.length !== length) {
|
||||
throw new RangeError('Expected ' + length + ' elements, got ' + arr.length)
|
||||
}
|
||||
|
||||
return arr
|
||||
}
|
||||
|
||||
rangeTest('full range', {}, [])
|
||||
|
||||
// Reversing has no effect without limit
|
||||
rangeTest('reverse=true', {
|
||||
reverse: true
|
||||
}, [])
|
||||
|
||||
rangeTest('gte=00', {
|
||||
gte: '00'
|
||||
}, [])
|
||||
|
||||
rangeTest('gte=50', {
|
||||
gte: '50'
|
||||
}, data.slice(0, 50))
|
||||
|
||||
rangeTest('lte=50 and reverse=true', {
|
||||
lte: '50',
|
||||
reverse: true
|
||||
}, data.slice(51))
|
||||
|
||||
rangeTest('gte=49.5 (midway)', {
|
||||
gte: '49.5'
|
||||
}, data.slice(0, 50))
|
||||
|
||||
rangeTest('gte=49999 (midway)', {
|
||||
gte: '49999'
|
||||
}, data.slice(0, 50))
|
||||
|
||||
rangeTest('lte=49.5 (midway) and reverse=true', {
|
||||
lte: '49.5',
|
||||
reverse: true
|
||||
}, data.slice(50))
|
||||
|
||||
rangeTest('lt=49.5 (midway) and reverse=true', {
|
||||
lt: '49.5',
|
||||
reverse: true
|
||||
}, data.slice(50))
|
||||
|
||||
rangeTest('lt=50 and reverse=true', {
|
||||
lt: '50',
|
||||
reverse: true
|
||||
}, data.slice(50))
|
||||
|
||||
rangeTest('lte=50', {
|
||||
lte: '50'
|
||||
}, data.slice(51))
|
||||
|
||||
rangeTest('lte=50.5 (midway)', {
|
||||
lte: '50.5'
|
||||
}, data.slice(51))
|
||||
|
||||
rangeTest('lte=50555 (midway)', {
|
||||
lte: '50555'
|
||||
}, data.slice(51))
|
||||
|
||||
rangeTest('lt=50555 (midway)', {
|
||||
lt: '50555'
|
||||
}, data.slice(51))
|
||||
|
||||
rangeTest('gte=50.5 (midway) and reverse=true', {
|
||||
gte: '50.5',
|
||||
reverse: true
|
||||
}, data.slice(0, 51))
|
||||
|
||||
rangeTest('gt=50.5 (midway) and reverse=true', {
|
||||
gt: '50.5',
|
||||
reverse: true
|
||||
}, data.slice(0, 51))
|
||||
|
||||
rangeTest('gt=50 and reverse=true', {
|
||||
gt: '50',
|
||||
reverse: true
|
||||
}, data.slice(0, 51))
|
||||
|
||||
// First key is actually '00' so it should avoid it
|
||||
rangeTest('lte=0', {
|
||||
lte: '0'
|
||||
}, data)
|
||||
|
||||
// First key is actually '00' so it should avoid it
|
||||
rangeTest('lt=0', {
|
||||
lt: '0'
|
||||
}, data)
|
||||
|
||||
rangeTest('gte=30 and lte=70', {
|
||||
gte: '30',
|
||||
lte: '70'
|
||||
}, exclude(data, 30, 70))
|
||||
|
||||
// The gte and lte options should take precedence over gt and lt respectively.
|
||||
rangeTest('test iterator with gte=30 and lte=70 and gt=40 and lt=60', {
|
||||
gte: '30',
|
||||
lte: '70',
|
||||
gt: '40',
|
||||
lt: '60'
|
||||
}, exclude(data, 30, 70))
|
||||
|
||||
// Also test the other way around: if gt and lt were to select a bigger range.
|
||||
rangeTest('test iterator with gte=30 and lte=70 and gt=20 and lt=80', {
|
||||
gte: '30',
|
||||
lte: '70',
|
||||
gt: '20',
|
||||
lt: '80'
|
||||
}, exclude(data, 30, 70))
|
||||
|
||||
rangeTest('gt=29 and lt=71', {
|
||||
gt: '29',
|
||||
lt: '71'
|
||||
}, exclude(data, 30, 70))
|
||||
|
||||
rangeTest('gte=30 and lte=70 and reverse=true', {
|
||||
lte: '70',
|
||||
gte: '30',
|
||||
reverse: true
|
||||
}, exclude(data, 30, 70))
|
||||
|
||||
rangeTest('gt=29 and lt=71 and reverse=true', {
|
||||
lt: '71',
|
||||
gt: '29',
|
||||
reverse: true
|
||||
}, exclude(data, 30, 70))
|
||||
|
||||
rangeTest('limit=20', {
|
||||
limit: 20
|
||||
}, data.slice(20))
|
||||
|
||||
rangeTest('limit=20 and gte=20', {
|
||||
limit: 20,
|
||||
gte: '20'
|
||||
}, exclude(data, 20, 39, 20))
|
||||
|
||||
rangeTest('limit=20 and reverse=true', {
|
||||
limit: 20,
|
||||
reverse: true
|
||||
}, data.slice(0, -20))
|
||||
|
||||
rangeTest('limit=20 and lte=79 and reverse=true', {
|
||||
limit: 20,
|
||||
lte: '79',
|
||||
reverse: true
|
||||
}, exclude(data, 60, 79, 20))
|
||||
|
||||
rangeTest('limit=-1 should clear whole database', {
|
||||
limit: -1
|
||||
}, [])
|
||||
|
||||
rangeTest('limit=0 should not clear anything', {
|
||||
limit: 0
|
||||
}, data)
|
||||
|
||||
rangeTest('lte after limit', {
|
||||
limit: 20,
|
||||
lte: '50'
|
||||
}, data.slice(20))
|
||||
|
||||
rangeTest('lte before limit', {
|
||||
limit: 50,
|
||||
lte: '19'
|
||||
}, data.slice(20))
|
||||
|
||||
rangeTest('gte after database end', {
|
||||
gte: '9a'
|
||||
}, data)
|
||||
|
||||
rangeTest('gt after database end', {
|
||||
gt: '9a'
|
||||
}, data)
|
||||
|
||||
rangeTest('lte after database end and reverse=true', {
|
||||
lte: '9a',
|
||||
reverse: true
|
||||
}, [])
|
||||
|
||||
rangeTest('lte and gte after database and reverse=true', {
|
||||
lte: '9b',
|
||||
gte: '9a',
|
||||
reverse: true
|
||||
}, data)
|
||||
|
||||
rangeTest('lt and gt after database and reverse=true', {
|
||||
lt: '9b',
|
||||
gt: '9a',
|
||||
reverse: true
|
||||
}, data)
|
||||
|
||||
rangeTest('gt greater than lt', {
|
||||
gt: '20',
|
||||
lt: '10'
|
||||
}, data)
|
||||
|
||||
rangeTest('gte greater than lte', {
|
||||
gte: '20',
|
||||
lte: '10'
|
||||
}, data)
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.range(test, testCommon)
|
||||
}
|
183
node_modules/abstract-level/test/clear-test.js
generated
vendored
Normal file
183
node_modules/abstract-level/test/clear-test.js
generated
vendored
Normal file
@ -0,0 +1,183 @@
|
||||
'use strict'
|
||||
|
||||
const isBuffer = require('is-buffer')
|
||||
const { Buffer } = require('buffer')
|
||||
|
||||
exports.args = function (test, testCommon) {
|
||||
test('test clear() with legacy range options', function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err)
|
||||
|
||||
try {
|
||||
db.clear({ start: 'foo' }, t.fail.bind(t))
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_LEGACY')
|
||||
}
|
||||
|
||||
try {
|
||||
db.clear({ end: 'foo' }).catch(t.fail.bind(t))
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_LEGACY')
|
||||
}
|
||||
|
||||
db.close(t.ifError.bind(t))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
exports.clear = function (test, testCommon) {
|
||||
makeTest('string', ['a', 'b'])
|
||||
|
||||
if (testCommon.supports.encodings.buffer) {
|
||||
makeTest('buffer', [Buffer.from('a'), Buffer.from('b')])
|
||||
makeTest('mixed', [Buffer.from('a'), 'b'])
|
||||
|
||||
// These keys would be equal when compared as utf8 strings
|
||||
makeTest('non-utf8 buffer', [Buffer.from('80', 'hex'), Buffer.from('c0', 'hex')])
|
||||
}
|
||||
|
||||
function makeTest (type, keys) {
|
||||
test('test simple clear() on ' + type + ' keys', function (t) {
|
||||
t.plan(8)
|
||||
|
||||
const db = testCommon.factory()
|
||||
const ops = keys.map(function (key) {
|
||||
return { type: 'put', key: key, value: 'foo', keyEncoding: isBuffer(key) ? 'buffer' : 'utf8' }
|
||||
})
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open error')
|
||||
|
||||
db.batch(ops, function (err) {
|
||||
t.ifError(err, 'no batch error')
|
||||
|
||||
db.iterator().all(function (err, entries) {
|
||||
t.ifError(err, 'no all() error')
|
||||
t.is(entries.length, keys.length, 'has entries')
|
||||
|
||||
db.clear(function (err) {
|
||||
t.ifError(err, 'no clear error')
|
||||
|
||||
db.iterator().all(function (err, entries) {
|
||||
t.ifError(err, 'no all() error')
|
||||
t.is(entries.length, 0, 'has no entries')
|
||||
|
||||
db.close(function (err) {
|
||||
t.ifError(err, 'no close error')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('test simple clear() on ' + type + ' keys, with promise', function (t) {
|
||||
t.plan(8)
|
||||
|
||||
const db = testCommon.factory()
|
||||
const ops = keys.map(function (key) {
|
||||
return { type: 'put', key: key, value: 'foo', keyEncoding: isBuffer(key) ? 'buffer' : 'utf8' }
|
||||
})
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open error')
|
||||
|
||||
db.batch(ops, function (err) {
|
||||
t.ifError(err, 'no batch error')
|
||||
|
||||
db.iterator().all(function (err, entries) {
|
||||
t.ifError(err, 'no all() error')
|
||||
t.is(entries.length, keys.length, 'has entries')
|
||||
|
||||
db.clear().then(function () {
|
||||
t.ifError(err, 'no clear error')
|
||||
|
||||
db.iterator().all(function (err, entries) {
|
||||
t.ifError(err, 'no all() error')
|
||||
t.is(entries.length, 0, 'has no entries')
|
||||
|
||||
db.close(function (err) {
|
||||
t.ifError(err, 'no close error')
|
||||
})
|
||||
})
|
||||
}).catch(t.fail.bind(t))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// NOTE: adapted from levelup
|
||||
for (const deferred of [false, true]) {
|
||||
for (const [gte, keyEncoding] of [['"b"', 'utf8'], ['b', 'json']]) {
|
||||
test(`clear() with ${keyEncoding} encoding (deferred: ${deferred})`, async function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
await db.open()
|
||||
await db.batch([
|
||||
{ type: 'put', key: '"a"', value: 'a' },
|
||||
{ type: 'put', key: '"b"', value: 'b' }
|
||||
])
|
||||
|
||||
if (deferred) {
|
||||
await db.close()
|
||||
t.is(db.status, 'closed')
|
||||
db.open(t.ifError.bind(t))
|
||||
t.is(db.status, 'opening')
|
||||
}
|
||||
|
||||
await db.clear({ gte, keyEncoding })
|
||||
|
||||
const keys = await db.keys().all()
|
||||
t.same(keys, ['"a"'], 'got expected keys')
|
||||
|
||||
return db.close()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.events = function (test, testCommon) {
|
||||
test('test clear() with options emits clear event', async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
|
||||
t.ok(db.supports.events.clear)
|
||||
|
||||
db.on('clear', function (options) {
|
||||
t.same(options, { gt: 567, custom: 123 })
|
||||
})
|
||||
|
||||
await db.clear({ gt: 567, custom: 123 })
|
||||
await db.close()
|
||||
})
|
||||
|
||||
test('test clear() without options emits clear event', async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
|
||||
t.ok(db.supports.events.clear)
|
||||
|
||||
db.on('clear', function (options) {
|
||||
t.same(options, {})
|
||||
})
|
||||
|
||||
await db.clear()
|
||||
await db.close()
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.args(test, testCommon)
|
||||
exports.events(test, testCommon)
|
||||
exports.clear(test, testCommon)
|
||||
}
|
32
node_modules/abstract-level/test/close-test.js
generated
vendored
Normal file
32
node_modules/abstract-level/test/close-test.js
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
'use strict'
|
||||
|
||||
exports.close = function (test, testCommon) {
|
||||
test('test close()', function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open() error')
|
||||
|
||||
db.close(function (err) {
|
||||
t.error(err)
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('test close() with promise', function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open() error')
|
||||
|
||||
db.close()
|
||||
.then(t.end.bind(t))
|
||||
.catch(t.end.bind(t))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.close(test, testCommon)
|
||||
}
|
90
node_modules/abstract-level/test/common.js
generated
vendored
Normal file
90
node_modules/abstract-level/test/common.js
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
'use strict'
|
||||
|
||||
const kNone = Symbol('none')
|
||||
const kProtected = Symbol('protected')
|
||||
|
||||
function testCommon (options) {
|
||||
const factory = options.factory
|
||||
const test = options.test
|
||||
|
||||
if (typeof factory !== 'function') {
|
||||
throw new TypeError('factory must be a function')
|
||||
}
|
||||
|
||||
if (typeof test !== 'function') {
|
||||
throw new TypeError('test must be a function')
|
||||
}
|
||||
|
||||
if (options.legacyRange != null) {
|
||||
throw new Error('The legacyRange option has been removed')
|
||||
}
|
||||
|
||||
let supports = kNone
|
||||
|
||||
return protect(options, {
|
||||
test: test,
|
||||
factory: factory,
|
||||
internals: options.internals || {},
|
||||
|
||||
// Expose manifest through testCommon to more easily skip tests based on
|
||||
// supported features. Use a getter to only create a db once. Implicitly
|
||||
// we also test that the manifest doesn't change after the db constructor.
|
||||
get supports () {
|
||||
if (supports === kNone) this.supports = this.factory().supports
|
||||
return supports
|
||||
},
|
||||
|
||||
// Prefer assigning early via manifest-test unless test.only() is used
|
||||
// in which case we create the manifest on-demand. Copy it to be safe.
|
||||
set supports (value) {
|
||||
if (supports === kNone) supports = JSON.parse(JSON.stringify(value))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = testCommon
|
||||
|
||||
// To help migrating from abstract-leveldown.
|
||||
// Throw if test suite options are used instead of db.supports
|
||||
function protect (options, testCommon) {
|
||||
const legacyOptions = [
|
||||
['createIfMissing', true],
|
||||
['errorIfExists', true],
|
||||
['snapshots', true],
|
||||
['seek', true],
|
||||
['encodings', true],
|
||||
['deferredOpen', true],
|
||||
['streams', true],
|
||||
['clear', true],
|
||||
['getMany', true],
|
||||
['bufferKeys', false],
|
||||
['serialize', false],
|
||||
['idempotentOpen', false],
|
||||
['passiveOpen', false],
|
||||
['openCallback', false]
|
||||
]
|
||||
|
||||
Object.defineProperty(testCommon, kProtected, {
|
||||
value: true
|
||||
})
|
||||
|
||||
for (const [k, exists] of legacyOptions) {
|
||||
const msg = exists ? 'has moved to db.supports' : 'has been removed'
|
||||
|
||||
// Options may be a testCommon instance
|
||||
if (!options[kProtected] && k in options) {
|
||||
throw new Error(`The test suite option '${k}' ${msg}`)
|
||||
}
|
||||
|
||||
Object.defineProperty(testCommon, k, {
|
||||
get () {
|
||||
throw new Error(`The test suite option '${k}' ${msg}`)
|
||||
},
|
||||
set () {
|
||||
throw new Error(`The test suite option '${k}' ${msg}`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return testCommon
|
||||
}
|
329
node_modules/abstract-level/test/deferred-open-test.js
generated
vendored
Normal file
329
node_modules/abstract-level/test/deferred-open-test.js
generated
vendored
Normal file
@ -0,0 +1,329 @@
|
||||
'use strict'
|
||||
|
||||
const { DeferredIterator } = require('../lib/deferred-iterator')
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
function verifyValues (t, db, entries) {
|
||||
let pendingGets = 3
|
||||
|
||||
for (let k = 1; k <= entries; k++) {
|
||||
db.get('k' + k, { valueEncoding: 'utf8' }, function (err, v) {
|
||||
t.ifError(err, 'no get() error')
|
||||
t.is(v, 'v' + k, 'value is ok')
|
||||
t.is(db.status, 'open', 'status is ok')
|
||||
|
||||
if (--pendingGets <= 0) {
|
||||
db.get('k4', { valueEncoding: 'utf8' }, function (err) {
|
||||
t.ok(err)
|
||||
db.close(t.ifError.bind(t))
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: copied from levelup
|
||||
test('deferred open(): put() and get() on new database', function (t) {
|
||||
t.plan(15)
|
||||
|
||||
// Open database without callback, opens in next tick
|
||||
const db = testCommon.factory()
|
||||
|
||||
let pendingPuts = 3
|
||||
|
||||
// Insert 3 values with put(), these should be deferred until the database is actually open
|
||||
for (let k = 1; k <= 3; k++) {
|
||||
db.put('k' + k, 'v' + k, function (err) {
|
||||
t.ifError(err, 'no put() error')
|
||||
|
||||
if (--pendingPuts <= 0) {
|
||||
verifyValues(t, db, 3)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
t.is(db.status, 'opening')
|
||||
})
|
||||
|
||||
// NOTE: copied from levelup
|
||||
test('deferred open(): batch() on new database', function (t) {
|
||||
t.plan(13)
|
||||
|
||||
// Open database without callback, opens in next tick
|
||||
const db = testCommon.factory()
|
||||
|
||||
// Insert 3 values with batch(), these should be deferred until the database is actually open
|
||||
db.batch([
|
||||
{ type: 'put', key: 'k1', value: 'v1' },
|
||||
{ type: 'put', key: 'k2', value: 'v2' },
|
||||
{ type: 'put', key: 'k3', value: 'v3' }
|
||||
], function (err) {
|
||||
t.ifError(err, 'no batch() error')
|
||||
verifyValues(t, db, 3)
|
||||
})
|
||||
|
||||
t.is(db.status, 'opening')
|
||||
})
|
||||
|
||||
// NOTE: copied from levelup
|
||||
test('deferred open(): chained batch() on new database', function (t) {
|
||||
t.plan(13)
|
||||
|
||||
// Open database without callback, opens in next tick
|
||||
const db = testCommon.factory()
|
||||
|
||||
// Insert 3 values with batch(), these should be deferred until the database is actually open
|
||||
db.batch()
|
||||
.put('k1', 'v1')
|
||||
.put('k2', 'v2')
|
||||
.put('k3', 'v3')
|
||||
.write(function (err) {
|
||||
t.ifError(err, 'no write() error')
|
||||
verifyValues(t, db, 3)
|
||||
})
|
||||
|
||||
t.is(db.status, 'opening')
|
||||
})
|
||||
|
||||
// NOTE: copied from levelup
|
||||
test('deferred open(): put() and get() on reopened database', async function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
await db.close()
|
||||
t.is(db.status, 'closed')
|
||||
|
||||
db.open(() => {})
|
||||
t.is(db.status, 'opening')
|
||||
|
||||
await db.put('beep', 'boop')
|
||||
|
||||
t.is(db.status, 'open')
|
||||
t.is(await db.get('beep', { valueEncoding: 'utf8' }), 'boop')
|
||||
|
||||
await db.close()
|
||||
})
|
||||
|
||||
// NOTE: copied from levelup
|
||||
test('deferred open(): value of queued operation is not stringified', function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = testCommon.factory({ valueEncoding: 'json' })
|
||||
|
||||
db.put('key', { thing: 2 }, function (err) {
|
||||
t.ifError(err)
|
||||
|
||||
db.get('key', function (err, value) {
|
||||
t.ifError(err)
|
||||
t.same(value, { thing: 2 })
|
||||
db.close(t.ifError.bind(t))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// NOTE: copied from levelup
|
||||
test('deferred open(): key of queued operation is not stringified', function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = testCommon.factory({ keyEncoding: 'json' })
|
||||
|
||||
db.put({ thing: 2 }, 'value', function (err) {
|
||||
t.ifError(err)
|
||||
|
||||
db.iterator().next(function (err, key, value) {
|
||||
t.ifError(err, 'no next() error')
|
||||
t.same(key, { thing: 2 })
|
||||
db.close(t.ifError.bind(t))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// NOTE: copied from deferred-leveldown
|
||||
test('cannot operate on closed db', function (t) {
|
||||
t.plan(6)
|
||||
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err)
|
||||
|
||||
db.close(function (err) {
|
||||
t.ifError(err)
|
||||
|
||||
db.put('foo', 'bar', function (err) {
|
||||
t.is(err && err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
})
|
||||
|
||||
try {
|
||||
db.iterator()
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
}
|
||||
|
||||
try {
|
||||
db.keys()
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
}
|
||||
|
||||
try {
|
||||
db.values()
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// NOTE: copied from deferred-leveldown
|
||||
test('cannot operate on closing db', function (t) {
|
||||
t.plan(6)
|
||||
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err)
|
||||
|
||||
db.close(function (err) {
|
||||
t.ifError(err)
|
||||
})
|
||||
|
||||
db.put('foo', 'bar', function (err) {
|
||||
t.is(err && err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
})
|
||||
|
||||
try {
|
||||
db.iterator()
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
}
|
||||
|
||||
try {
|
||||
db.keys()
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
}
|
||||
|
||||
try {
|
||||
db.values()
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// NOTE: copied from deferred-leveldown
|
||||
test('deferred iterator - cannot operate on closed db', function (t) {
|
||||
t.plan(10)
|
||||
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.error(err, 'no error')
|
||||
|
||||
db.close(function (err) {
|
||||
t.ifError(err)
|
||||
|
||||
it.next(function (err, key, value) {
|
||||
t.is(err && err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
})
|
||||
|
||||
it.next().catch(function (err) {
|
||||
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
})
|
||||
|
||||
it.nextv(10, function (err, items) {
|
||||
t.is(err && err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
})
|
||||
|
||||
it.nextv(10).catch(function (err) {
|
||||
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
})
|
||||
|
||||
it.all(function (err, items) {
|
||||
t.is(err && err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
})
|
||||
|
||||
it.all().catch(function (err) {
|
||||
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
})
|
||||
|
||||
// Was already closed
|
||||
it.close(function () {
|
||||
t.ifError(err, 'no close() error')
|
||||
})
|
||||
|
||||
it.close().catch(function () {
|
||||
t.fail('no close() error')
|
||||
})
|
||||
|
||||
try {
|
||||
it.seek('foo')
|
||||
} catch (err) {
|
||||
// Should *not* throw
|
||||
t.fail(err)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
const it = db.iterator({ gt: 'foo' })
|
||||
t.ok(it instanceof DeferredIterator)
|
||||
})
|
||||
|
||||
// NOTE: copied from deferred-leveldown
|
||||
test('deferred iterator - cannot operate on closing db', function (t) {
|
||||
t.plan(10)
|
||||
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.error(err, 'no error')
|
||||
|
||||
db.close(function (err) {
|
||||
t.ifError(err)
|
||||
})
|
||||
|
||||
it.next(function (err, key, value) {
|
||||
t.is(err && err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
})
|
||||
|
||||
it.next().catch(function (err) {
|
||||
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
})
|
||||
|
||||
it.nextv(10, function (err) {
|
||||
t.is(err && err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
})
|
||||
|
||||
it.nextv(10).catch(function (err) {
|
||||
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
})
|
||||
|
||||
it.all(function (err) {
|
||||
t.is(err && err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
})
|
||||
|
||||
it.all().catch(function (err) {
|
||||
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
})
|
||||
|
||||
// Is already closing
|
||||
it.close(function (err) {
|
||||
t.ifError(err, 'no close() error')
|
||||
})
|
||||
|
||||
it.close().catch(function () {
|
||||
t.fail('no close() error')
|
||||
})
|
||||
|
||||
try {
|
||||
it.seek('foo')
|
||||
} catch (err) {
|
||||
// Should *not* throw
|
||||
t.fail(err)
|
||||
}
|
||||
})
|
||||
|
||||
const it = db.iterator({ gt: 'foo' })
|
||||
t.ok(it instanceof DeferredIterator)
|
||||
})
|
||||
}
|
105
node_modules/abstract-level/test/del-test.js
generated
vendored
Normal file
105
node_modules/abstract-level/test/del-test.js
generated
vendored
Normal file
@ -0,0 +1,105 @@
|
||||
'use strict'
|
||||
|
||||
const { verifyNotFoundError, illegalKeys, assertAsync } = require('./util')
|
||||
|
||||
let db
|
||||
|
||||
exports.setUp = function (test, testCommon) {
|
||||
test('setUp db', function (t) {
|
||||
db = testCommon.factory()
|
||||
db.open(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.args = function (test, testCommon) {
|
||||
test('test del() with illegal keys', assertAsync.ctx(function (t) {
|
||||
t.plan(illegalKeys.length * 5)
|
||||
|
||||
for (const { name, key } of illegalKeys) {
|
||||
db.del(key, assertAsync(function (err) {
|
||||
t.ok(err instanceof Error, name + ' - is Error (callback)')
|
||||
t.is(err && err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (callback)')
|
||||
}))
|
||||
|
||||
db.del(key).catch(function (err) {
|
||||
t.ok(err instanceof Error, name + ' - is Error (promise)')
|
||||
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (callback)')
|
||||
})
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
exports.del = function (test, testCommon) {
|
||||
test('test simple del()', function (t) {
|
||||
db.put('foo', 'bar', function (err) {
|
||||
t.error(err)
|
||||
db.del('foo', function (err) {
|
||||
t.error(err)
|
||||
db.get('foo', function (err, value) {
|
||||
t.ok(err, 'entry properly deleted')
|
||||
t.ok(typeof value === 'undefined', 'value is undefined')
|
||||
t.ok(verifyNotFoundError(err), 'NotFound error')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('test simple del() with promise', function (t) {
|
||||
db.put('foo', 'bar', function (err) {
|
||||
t.error(err)
|
||||
db.del('foo').then(function (err) {
|
||||
t.error(err)
|
||||
db.get('foo', function (err, value) {
|
||||
t.ok(err, 'entry properly deleted')
|
||||
t.ok(typeof value === 'undefined', 'value is undefined')
|
||||
t.ok(verifyNotFoundError(err), 'NotFound error')
|
||||
t.end()
|
||||
})
|
||||
}).catch(t.fail.bind(t))
|
||||
})
|
||||
})
|
||||
|
||||
test('test del on non-existent key', function (t) {
|
||||
db.del('blargh', function (err) {
|
||||
t.error(err)
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
test('test del on non-existent key, with promise', async function (t) {
|
||||
return db.del('blargh')
|
||||
})
|
||||
}
|
||||
|
||||
exports.events = function (test, testCommon) {
|
||||
test('test del() emits del event', async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
|
||||
t.ok(db.supports.events.del)
|
||||
|
||||
db.on('del', function (key) {
|
||||
t.is(key, 456)
|
||||
})
|
||||
|
||||
await db.del(456)
|
||||
await db.close()
|
||||
})
|
||||
}
|
||||
|
||||
exports.tearDown = function (test, testCommon) {
|
||||
test('tearDown', function (t) {
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.setUp(test, testCommon)
|
||||
exports.args(test, testCommon)
|
||||
exports.del(test, testCommon)
|
||||
exports.events(test, testCommon)
|
||||
exports.tearDown(test, testCommon)
|
||||
}
|
255
node_modules/abstract-level/test/encoding-buffer-test.js
generated
vendored
Normal file
255
node_modules/abstract-level/test/encoding-buffer-test.js
generated
vendored
Normal file
@ -0,0 +1,255 @@
|
||||
'use strict'
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const textEncoder = new TextEncoder()
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
if (!testCommon.supports.encodings.buffer) return
|
||||
|
||||
// NOTE: adapted from levelup
|
||||
test('test put() and get() with buffer value and buffer valueEncoding', async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
await db.put('test', testBuffer(), { valueEncoding: 'buffer' })
|
||||
t.same(await db.get('test', { valueEncoding: 'buffer' }), testBuffer())
|
||||
return db.close()
|
||||
})
|
||||
|
||||
// NOTE: adapted from levelup
|
||||
test('test put() and get() with buffer value and buffer valueEncoding in factory', async function (t) {
|
||||
const db = testCommon.factory({ valueEncoding: 'buffer' })
|
||||
await db.open()
|
||||
await db.put('test', testBuffer())
|
||||
t.same(await db.get('test'), testBuffer())
|
||||
return db.close()
|
||||
})
|
||||
|
||||
// NOTE: adapted from levelup
|
||||
test('test put() and get() with buffer key and buffer keyEncoding', async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
await db.put(testBuffer(), 'test', { keyEncoding: 'buffer' })
|
||||
t.same(await db.get(testBuffer(), { keyEncoding: 'buffer' }), 'test')
|
||||
return db.close()
|
||||
})
|
||||
|
||||
// NOTE: adapted from levelup
|
||||
test('test put() and get() with buffer key and utf8 keyEncoding', async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
await db.put(Buffer.from('foo🐄'), 'test', { keyEncoding: 'utf8' })
|
||||
t.same(await db.get(Buffer.from('foo🐄'), { keyEncoding: 'utf8' }), 'test')
|
||||
return db.close()
|
||||
})
|
||||
|
||||
// NOTE: adapted from levelup
|
||||
test('test put() and get() with string value and buffer valueEncoding', async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
await db.put('test', 'foo🐄', { valueEncoding: 'buffer' })
|
||||
t.same(await db.get('test', { valueEncoding: 'buffer' }), Buffer.from('foo🐄'))
|
||||
t.same(await db.get('test', { valueEncoding: 'utf8' }), 'foo🐄')
|
||||
return db.close()
|
||||
})
|
||||
|
||||
// NOTE: adapted from memdown
|
||||
test('put() as string, get() as buffer and vice versa', async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
const enc = { keyEncoding: 'buffer', valueEncoding: 'buffer' }
|
||||
const [a, b] = ['🐄', '🐄 says moo']
|
||||
|
||||
const promise1 = db.put(a, a).then(async () => {
|
||||
const value = await db.get(Buffer.from(a), enc)
|
||||
t.same(value, Buffer.from(a), 'got buffer value')
|
||||
})
|
||||
|
||||
const promise2 = db.put(Buffer.from(b), Buffer.from(b), enc).then(async () => {
|
||||
const value = await db.get(b)
|
||||
t.same(value, b, 'got string value')
|
||||
})
|
||||
|
||||
await Promise.all([promise1, promise2])
|
||||
return db.close()
|
||||
})
|
||||
|
||||
// NOTE: adapted from memdown
|
||||
test('put() stringifies input to buffer', async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
await db.put(1, 2)
|
||||
|
||||
const it = db.iterator({ keyEncoding: 'buffer', valueEncoding: 'buffer' })
|
||||
const entries = await it.all()
|
||||
|
||||
t.same(entries[0][0], Buffer.from('1'), 'key was stringified')
|
||||
t.same(entries[0][1], Buffer.from('2'), 'value was stringified')
|
||||
|
||||
return db.close()
|
||||
})
|
||||
|
||||
// NOTE: adapted from memdown
|
||||
test('put() as string, iterate as buffer', async function (t) {
|
||||
const db = testCommon.factory({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
|
||||
await db.open()
|
||||
await db.put('🐄', '🐄')
|
||||
|
||||
const it = db.iterator({ keyEncoding: 'buffer', valueEncoding: 'buffer' })
|
||||
const entries = await it.all()
|
||||
|
||||
t.same(entries, [[Buffer.from('🐄'), Buffer.from('🐄')]])
|
||||
return db.close()
|
||||
})
|
||||
|
||||
// NOTE: adapted from memdown
|
||||
test('put() as buffer, iterate as string', async function (t) {
|
||||
const db = testCommon.factory({ keyEncoding: 'buffer', valueEncoding: 'buffer' })
|
||||
await db.open()
|
||||
await db.put(Buffer.from('🐄'), Buffer.from('🐄'))
|
||||
|
||||
const it = db.iterator({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
|
||||
const entries = await it.all()
|
||||
|
||||
t.same(entries, [['🐄', '🐄']])
|
||||
return db.close()
|
||||
})
|
||||
|
||||
test('put() as view, iterate as view', async function (t) {
|
||||
const db = testCommon.factory({ keyEncoding: 'view', valueEncoding: 'view' })
|
||||
const cow = textEncoder.encode('🐄')
|
||||
await db.open()
|
||||
await db.put(cow, cow)
|
||||
|
||||
const it = db.iterator()
|
||||
const entries = await it.all()
|
||||
const key = Buffer.isBuffer(entries[0][0]) ? Buffer.from(cow) : cow // Valid, Buffer is a Uint8Array
|
||||
const value = Buffer.isBuffer(entries[0][1]) ? Buffer.from(cow) : cow
|
||||
|
||||
t.same(entries, [[key, value]])
|
||||
return db.close()
|
||||
})
|
||||
|
||||
test('put() as string, iterate as view', async function (t) {
|
||||
const db = testCommon.factory({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
|
||||
const cow = textEncoder.encode('🐄')
|
||||
await db.open()
|
||||
await db.put('🐄', '🐄')
|
||||
|
||||
const it = db.iterator({ keyEncoding: 'view', valueEncoding: 'view' })
|
||||
const entries = await it.all()
|
||||
const key = Buffer.isBuffer(entries[0][0]) ? Buffer.from(cow) : cow // Valid, Buffer is a Uint8Array
|
||||
const value = Buffer.isBuffer(entries[0][1]) ? Buffer.from(cow) : cow
|
||||
|
||||
t.same(entries, [[key, value]])
|
||||
return db.close()
|
||||
})
|
||||
|
||||
test('put() as view, iterate as string', async function (t) {
|
||||
const db = testCommon.factory({ keyEncoding: 'view', valueEncoding: 'view' })
|
||||
const cow = textEncoder.encode('🐄')
|
||||
await db.open()
|
||||
await db.put(cow, cow)
|
||||
|
||||
const it = db.iterator({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
|
||||
const entries = await it.all()
|
||||
|
||||
t.same(entries, [['🐄', '🐄']])
|
||||
return db.close()
|
||||
})
|
||||
|
||||
// NOTE: adapted from levelup
|
||||
test('batch() with multiple puts with buffer valueEncoding per batch', async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
await db.batch([
|
||||
{ type: 'put', key: 'foo', value: testBuffer() },
|
||||
{ type: 'put', key: 'bar', value: testBuffer() },
|
||||
{ type: 'put', key: 'baz', value: 'abazvalue' }
|
||||
], { valueEncoding: 'buffer' })
|
||||
|
||||
t.same(await db.get('foo', { valueEncoding: 'buffer' }), testBuffer())
|
||||
t.same(await db.get('bar', { valueEncoding: 'buffer' }), testBuffer())
|
||||
t.same(await db.get('baz', { valueEncoding: 'buffer' }), Buffer.from('abazvalue'))
|
||||
|
||||
return db.close()
|
||||
})
|
||||
|
||||
test('batch() with multiple puts with buffer valueEncoding per operation', async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
await db.batch([
|
||||
{ type: 'put', key: 'foo', value: testBuffer(), valueEncoding: 'buffer' },
|
||||
{ type: 'put', key: 'bar', value: testBuffer(), valueEncoding: 'buffer' },
|
||||
{ type: 'put', key: 'baz', value: 'abazvalue', valueEncoding: 'buffer' }
|
||||
])
|
||||
|
||||
t.same(await db.get('foo', { valueEncoding: 'buffer' }), testBuffer())
|
||||
t.same(await db.get('bar', { valueEncoding: 'buffer' }), testBuffer())
|
||||
t.same(await db.get('baz', { valueEncoding: 'buffer' }), Buffer.from('abazvalue'))
|
||||
|
||||
return db.close()
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test('batch() with buffer encoding in factory', async function (t) {
|
||||
const operations = [{
|
||||
type: 'put',
|
||||
key: Buffer.from([1, 2, 3]),
|
||||
value: Buffer.from([4, 5, 6])
|
||||
}, {
|
||||
type: 'put',
|
||||
key: Buffer.from([7, 8, 9]),
|
||||
value: Buffer.from([10, 11, 12])
|
||||
}]
|
||||
|
||||
const db = testCommon.factory({ keyEncoding: 'buffer', valueEncoding: 'buffer' })
|
||||
await db.open()
|
||||
await db.batch(operations)
|
||||
|
||||
t.same(await db.get(operations[0].key), operations[0].value)
|
||||
t.same(await db.get(operations[1].key), operations[1].value)
|
||||
|
||||
return db.close()
|
||||
})
|
||||
|
||||
for (const keyEncoding of ['buffer', 'view']) {
|
||||
// NOTE: adapted from memdown
|
||||
test(`storage is byte-aware (${keyEncoding} encoding)`, function (t) {
|
||||
const db = testCommon.factory({ keyEncoding })
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open error')
|
||||
|
||||
const one = Buffer.from('80', 'hex')
|
||||
const two = Buffer.from('c0', 'hex')
|
||||
|
||||
t.ok(two.toString() === one.toString(), 'would be equal when not byte-aware')
|
||||
t.ok(two.compare(one) > 0, 'but greater when byte-aware')
|
||||
|
||||
db.put(one, 'one', function (err) {
|
||||
t.ifError(err, 'no put() error')
|
||||
|
||||
db.get(one, function (err, value) {
|
||||
t.ifError(err, 'no get() error')
|
||||
t.is(value, 'one', 'value one ok')
|
||||
|
||||
db.put(two, 'two', function (err) {
|
||||
t.ifError(err, 'no put() error')
|
||||
|
||||
db.get(one, function (err, value) {
|
||||
t.ifError(err, 'no get() error')
|
||||
t.is(value, 'one', 'value one did not change')
|
||||
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function testBuffer () {
|
||||
return Buffer.from('0080c0ff', 'hex')
|
||||
}
|
99
node_modules/abstract-level/test/encoding-custom-test.js
generated
vendored
Normal file
99
node_modules/abstract-level/test/encoding-custom-test.js
generated
vendored
Normal file
@ -0,0 +1,99 @@
|
||||
'use strict'
|
||||
|
||||
// NOTE: copied from levelup
|
||||
exports.all = function (test, testCommon) {
|
||||
for (const deferred of [false, true]) {
|
||||
test(`custom encoding: simple-object values (deferred: ${deferred})`, function (t) {
|
||||
run(t, deferred, [
|
||||
{ key: '0', value: 0 },
|
||||
{ key: '1', value: 1 },
|
||||
{ key: 'string', value: 'a string' },
|
||||
{ key: 'true', value: true },
|
||||
{ key: 'false', value: false }
|
||||
])
|
||||
})
|
||||
|
||||
test(`custom encoding: simple-object keys (deferred: ${deferred})`, function (t) {
|
||||
// Test keys that would be considered the same with default utf8 encoding.
|
||||
// Because String([1]) === String(1).
|
||||
run(t, deferred, [
|
||||
{ value: '0', key: [1] },
|
||||
{ value: '1', key: 1 },
|
||||
{ value: 'string', key: 'a string' },
|
||||
{ value: 'true', key: true },
|
||||
{ value: 'false', key: false }
|
||||
])
|
||||
})
|
||||
|
||||
test(`custom encoding: complex-object values (deferred: ${deferred})`, function (t) {
|
||||
run(t, deferred, [{
|
||||
key: '0',
|
||||
value: {
|
||||
foo: 'bar',
|
||||
bar: [1, 2, 3],
|
||||
bang: { yes: true, no: false }
|
||||
}
|
||||
}])
|
||||
})
|
||||
|
||||
test(`custom encoding: complex-object keys (deferred: ${deferred})`, function (t) {
|
||||
// Test keys that would be considered the same with default utf8 encoding.
|
||||
// Because String({}) === String({}) === '[object Object]'.
|
||||
run(t, deferred, [{
|
||||
value: '0',
|
||||
key: {
|
||||
foo: 'bar',
|
||||
bar: [1, 2, 3],
|
||||
bang: { yes: true, no: false }
|
||||
}
|
||||
}, {
|
||||
value: '1',
|
||||
key: {
|
||||
foo: 'different',
|
||||
bar: [1, 2, 3],
|
||||
bang: { yes: true, no: false }
|
||||
}
|
||||
}])
|
||||
})
|
||||
}
|
||||
|
||||
function run (t, deferred, entries) {
|
||||
const customEncoding = {
|
||||
encode: JSON.stringify,
|
||||
decode: JSON.parse,
|
||||
format: 'utf8',
|
||||
type: 'custom'
|
||||
}
|
||||
|
||||
const db = testCommon.factory({
|
||||
keyEncoding: customEncoding,
|
||||
valueEncoding: customEncoding
|
||||
})
|
||||
|
||||
const operations = entries.map(entry => ({ type: 'put', ...entry }))
|
||||
const init = deferred ? (fn) => fn() : db.open.bind(db)
|
||||
|
||||
init(function (err) {
|
||||
t.ifError(err, 'no init() error')
|
||||
|
||||
db.batch(operations, function (err) {
|
||||
t.ifError(err, 'no batch() error')
|
||||
|
||||
let pending = entries.length
|
||||
const next = () => {
|
||||
if (--pending === 0) {
|
||||
db.close(t.end.bind(t))
|
||||
}
|
||||
}
|
||||
|
||||
for (const entry of entries) {
|
||||
db.get(entry.key, function (err, value) {
|
||||
t.ifError(err, 'no get() error')
|
||||
t.same(value, entry.value)
|
||||
next()
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
64
node_modules/abstract-level/test/encoding-decode-error-test.js
generated
vendored
Normal file
64
node_modules/abstract-level/test/encoding-decode-error-test.js
generated
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
'use strict'
|
||||
|
||||
let db
|
||||
let keySequence = 0
|
||||
|
||||
const testKey = () => 'test' + (++keySequence)
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
test('setup', async function (t) {
|
||||
db = testCommon.factory()
|
||||
return db.open()
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test('get() and getMany() forward decode error', function (t) {
|
||||
const key = testKey()
|
||||
const valueEncoding = {
|
||||
encode: (v) => v,
|
||||
decode: (v) => { throw new Error('decode error xyz') },
|
||||
format: 'utf8'
|
||||
}
|
||||
|
||||
db.put(key, 'bar', { valueEncoding }, function (err) {
|
||||
t.ifError(err, 'no put() error')
|
||||
|
||||
db.get(key, { valueEncoding }, function (err, value) {
|
||||
t.is(err && err.code, 'LEVEL_DECODE_ERROR')
|
||||
t.is(err && err.cause && err.cause.message, 'decode error xyz')
|
||||
t.is(value, undefined)
|
||||
|
||||
db.getMany(['other-key', key], { valueEncoding }, function (err, values) {
|
||||
t.is(err && err.code, 'LEVEL_DECODE_ERROR')
|
||||
t.is(err && err.cause && err.cause.message, 'decode error xyz')
|
||||
t.is(values, undefined)
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test('get() and getMany() yield encoding error if stored value is invalid', function (t) {
|
||||
const key = testKey()
|
||||
|
||||
db.put(key, 'this {} is [] not : json', { valueEncoding: 'utf8' }, function (err) {
|
||||
t.ifError(err, 'no put() error')
|
||||
|
||||
db.get(key, { valueEncoding: 'json' }, function (err) {
|
||||
t.is(err && err.code, 'LEVEL_DECODE_ERROR')
|
||||
t.is(err && err.cause.name, 'SyntaxError') // From JSON.parse()
|
||||
|
||||
db.getMany(['other-key', key], { valueEncoding: 'json' }, function (err) {
|
||||
t.is(err && err.code, 'LEVEL_DECODE_ERROR')
|
||||
t.is(err && err.cause.name, 'SyntaxError') // From JSON.parse()
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('teardown', async function (t) {
|
||||
return db.close()
|
||||
})
|
||||
}
|
88
node_modules/abstract-level/test/encoding-json-test.js
generated
vendored
Normal file
88
node_modules/abstract-level/test/encoding-json-test.js
generated
vendored
Normal file
@ -0,0 +1,88 @@
|
||||
'use strict'
|
||||
|
||||
// NOTE: copied from levelup
|
||||
exports.all = function (test, testCommon) {
|
||||
for (const deferred of [false, true]) {
|
||||
test(`json encoding: simple-object values (deferred: ${deferred})`, function (t) {
|
||||
run(t, deferred, [
|
||||
{ key: '0', value: 0 },
|
||||
{ key: '1', value: 1 },
|
||||
{ key: '2', value: 'a string' },
|
||||
{ key: '3', value: true },
|
||||
{ key: '4', value: false }
|
||||
])
|
||||
})
|
||||
|
||||
test(`json encoding: simple-object keys (deferred: ${deferred})`, function (t) {
|
||||
run(t, deferred, [
|
||||
{ value: 'string', key: 'a string' },
|
||||
{ value: '0', key: 0 },
|
||||
{ value: '1', key: 1 },
|
||||
{ value: 'false', key: false },
|
||||
{ value: 'true', key: true }
|
||||
])
|
||||
})
|
||||
|
||||
test(`json encoding: complex-object values (deferred: ${deferred})`, function (t) {
|
||||
run(t, deferred, [{
|
||||
key: '0',
|
||||
value: {
|
||||
foo: 'bar',
|
||||
bar: [1, 2, 3],
|
||||
bang: { yes: true, no: false }
|
||||
}
|
||||
}])
|
||||
})
|
||||
|
||||
test(`json encoding: complex-object keys (deferred: ${deferred})`, function (t) {
|
||||
run(t, deferred, [{
|
||||
value: '0',
|
||||
key: {
|
||||
foo: 'bar',
|
||||
bar: [1, 2, 3],
|
||||
bang: { yes: true, no: false }
|
||||
}
|
||||
}])
|
||||
})
|
||||
}
|
||||
|
||||
function run (t, deferred, entries) {
|
||||
const db = testCommon.factory({ keyEncoding: 'json', valueEncoding: 'json' })
|
||||
const operations = entries.map(entry => ({ type: 'put', ...entry }))
|
||||
const init = deferred ? (fn) => fn() : db.open.bind(db)
|
||||
|
||||
init(function (err) {
|
||||
t.ifError(err, 'no init() error')
|
||||
|
||||
db.batch(operations, function (err) {
|
||||
t.ifError(err, 'no batch() error')
|
||||
|
||||
let pending = entries.length + 1
|
||||
const next = () => {
|
||||
if (--pending === 0) db.close(t.end.bind(t))
|
||||
}
|
||||
|
||||
testGet(next)
|
||||
testIterator(next)
|
||||
})
|
||||
})
|
||||
|
||||
function testGet (next) {
|
||||
for (const entry of entries) {
|
||||
db.get(entry.key, function (err, value) {
|
||||
t.ifError(err, 'no get() error')
|
||||
t.same(value, entry.value)
|
||||
next()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function testIterator (next) {
|
||||
db.iterator().all(function (err, result) {
|
||||
t.ifError(err, 'no all() error')
|
||||
t.same(result, entries.map(kv => [kv.key, kv.value]))
|
||||
next()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
141
node_modules/abstract-level/test/encoding-test.js
generated
vendored
Normal file
141
node_modules/abstract-level/test/encoding-test.js
generated
vendored
Normal file
@ -0,0 +1,141 @@
|
||||
'use strict'
|
||||
|
||||
let db
|
||||
let keySequence = 0
|
||||
|
||||
const testKey = () => 'test' + (++keySequence)
|
||||
|
||||
// TODO: test encoding options on every method. This is largely
|
||||
// covered (indirectly) by other tests, but a dedicated property-
|
||||
// based test for each would be good to have.
|
||||
exports.all = function (test, testCommon) {
|
||||
test('setup', async function (t) {
|
||||
db = testCommon.factory()
|
||||
return db.open()
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test('encodings default to utf8', function (t) {
|
||||
t.is(db.keyEncoding().commonName, 'utf8')
|
||||
t.is(db.valueEncoding().commonName, 'utf8')
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('can set encoding options in factory', async function (t) {
|
||||
const dbs = []
|
||||
|
||||
for (const name of ['buffer', 'view', 'json']) {
|
||||
if (!testCommon.supports.encodings[name]) continue
|
||||
|
||||
const db1 = testCommon.factory({ keyEncoding: name })
|
||||
const db2 = testCommon.factory({ valueEncoding: name })
|
||||
const db3 = testCommon.factory({ keyEncoding: name, valueEncoding: name })
|
||||
|
||||
t.is(db1.keyEncoding().commonName, name)
|
||||
t.is(db1.keyEncoding(), db1.keyEncoding(name))
|
||||
t.is(db1.valueEncoding().commonName, 'utf8')
|
||||
t.is(db1.valueEncoding(), db1.valueEncoding('utf8'))
|
||||
|
||||
t.is(db2.keyEncoding().commonName, 'utf8')
|
||||
t.is(db2.keyEncoding(), db2.keyEncoding('utf8'))
|
||||
t.is(db2.valueEncoding().commonName, name)
|
||||
t.is(db2.valueEncoding(), db2.valueEncoding(name))
|
||||
|
||||
t.is(db3.keyEncoding().commonName, name)
|
||||
t.is(db3.keyEncoding(), db3.keyEncoding(name))
|
||||
t.is(db3.valueEncoding().commonName, name)
|
||||
t.is(db3.valueEncoding(), db3.valueEncoding(name))
|
||||
|
||||
dbs.push(db1, db2, db3)
|
||||
}
|
||||
|
||||
await Promise.all(dbs.map(db => db.close()))
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
for (const deferred of [false, true]) {
|
||||
test(`default utf8 encoding stringifies numbers (deferred: ${deferred})`, async function (t) {
|
||||
const db = testCommon.factory()
|
||||
if (!deferred) await db.open()
|
||||
await db.put(1, 2)
|
||||
t.is(await db.get(1), '2')
|
||||
return db.close()
|
||||
})
|
||||
}
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test('can decode from string to json', function (t) {
|
||||
const key = testKey()
|
||||
const data = { thisis: 'json' }
|
||||
|
||||
db.put(key, JSON.stringify(data), { valueEncoding: 'utf8' }, function (err) {
|
||||
t.ifError(err, 'no put() error')
|
||||
|
||||
db.get(key, { valueEncoding: 'json' }, function (err, value) {
|
||||
t.ifError(err, 'no get() error')
|
||||
t.same(value, data, 'got parsed object')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test('can decode from json to string', function (t) {
|
||||
const data = { thisis: 'json' }
|
||||
const key = testKey()
|
||||
|
||||
db.put(key, data, { valueEncoding: 'json' }, function (err) {
|
||||
t.ifError(err, 'no put() error')
|
||||
|
||||
db.get(key, { valueEncoding: 'utf8' }, function (err, value) {
|
||||
t.ifError(err, 'no get() error')
|
||||
t.is(value, JSON.stringify(data), 'got unparsed JSON string')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test('getMany() skips decoding not-found values', function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const valueEncoding = {
|
||||
encode: JSON.stringify,
|
||||
decode (value) {
|
||||
t.is(value, JSON.stringify(data))
|
||||
return JSON.parse(value)
|
||||
},
|
||||
format: 'utf8'
|
||||
}
|
||||
|
||||
const data = { beep: 'boop' }
|
||||
const key = testKey()
|
||||
|
||||
db.put(key, data, { valueEncoding }, function (err) {
|
||||
t.ifError(err, 'no put() error')
|
||||
|
||||
db.getMany([key, testKey()], { valueEncoding }, function (err, values) {
|
||||
t.ifError(err, 'no getMany() error')
|
||||
t.same(values, [data, undefined])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// NOTE: adapted from memdown
|
||||
test('number keys with utf8 encoding', async function (t) {
|
||||
const db = testCommon.factory()
|
||||
const numbers = [-Infinity, 0, 12, 2, +Infinity]
|
||||
|
||||
await db.open()
|
||||
await db.batch(numbers.map(key => ({ type: 'put', key, value: 'value' })))
|
||||
|
||||
const keys = await db.keys({ keyEncoding: 'utf8' }).all()
|
||||
t.same(keys, numbers.map(String), 'sorts lexicographically')
|
||||
|
||||
return db.close()
|
||||
})
|
||||
|
||||
test('teardown', async function (t) {
|
||||
return db.close()
|
||||
})
|
||||
}
|
52
node_modules/abstract-level/test/factory-test.js
generated
vendored
Normal file
52
node_modules/abstract-level/test/factory-test.js
generated
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = function (test, testCommon) {
|
||||
test('testCommon.factory() returns valid database', function (t) {
|
||||
t.plan(6)
|
||||
|
||||
const db = testCommon.factory()
|
||||
const kEvent = Symbol('event')
|
||||
|
||||
// Avoid instanceof, for levelup compatibility tests
|
||||
t.is(typeof db, 'object', 'is an object')
|
||||
t.isNot(db, null, 'is not null')
|
||||
t.is(typeof db.open, 'function', 'has open() method')
|
||||
t.is(typeof db.on, 'function', 'has on() method')
|
||||
t.is(typeof db.emit, 'function', 'has emit() method')
|
||||
|
||||
db.once(kEvent, (v) => t.is(v, 'foo', 'got event'))
|
||||
db.emit(kEvent, 'foo')
|
||||
})
|
||||
|
||||
test('testCommon.factory() returns a unique database', function (t) {
|
||||
const db1 = testCommon.factory()
|
||||
const db2 = testCommon.factory()
|
||||
|
||||
t.isNot(db1, db2, 'unique instances')
|
||||
|
||||
function close () {
|
||||
db1.close(function (err) {
|
||||
t.error(err, 'no error while closing db1')
|
||||
db2.close(function (err) {
|
||||
t.error(err, 'no error while closing db2')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
db1.open(function (err) {
|
||||
t.error(err, 'no error while opening db1')
|
||||
db2.open(function (err) {
|
||||
t.error(err, 'no error while opening db2')
|
||||
db1.put('key', 'value', function (err) {
|
||||
t.error(err, 'put key in db1')
|
||||
db2.get('key', function (err, value) {
|
||||
t.ok(err, 'db2 should be empty')
|
||||
t.is(value, undefined, 'db2 should be empty')
|
||||
close()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
276
node_modules/abstract-level/test/get-many-test.js
generated
vendored
Normal file
276
node_modules/abstract-level/test/get-many-test.js
generated
vendored
Normal file
@ -0,0 +1,276 @@
|
||||
'use strict'
|
||||
|
||||
const { assertAsync, illegalKeys } = require('./util')
|
||||
|
||||
let db
|
||||
|
||||
/**
|
||||
* @param {import('tape')} test
|
||||
*/
|
||||
exports.setUp = function (test, testCommon) {
|
||||
test('setUp db', function (t) {
|
||||
db = testCommon.factory()
|
||||
db.open(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('tape')} test
|
||||
*/
|
||||
exports.args = function (test, testCommon) {
|
||||
test('test getMany() requires an array argument (callback)', assertAsync.ctx(function (t) {
|
||||
// Add 1 assertion for every assertAsync()
|
||||
t.plan(6)
|
||||
|
||||
db.getMany('foo', assertAsync(function (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err && err.message, "The first argument 'keys' must be an array")
|
||||
}))
|
||||
db.getMany('foo', {}, assertAsync(function (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err && err.message, "The first argument 'keys' must be an array")
|
||||
}))
|
||||
}))
|
||||
|
||||
test('test getMany() requires an array argument (promise)', function (t) {
|
||||
t.plan(6)
|
||||
|
||||
db.getMany().catch(function (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err && err.message, "The first argument 'keys' must be an array")
|
||||
})
|
||||
db.getMany('foo').catch(function (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err && err.message, "The first argument 'keys' must be an array")
|
||||
})
|
||||
db.getMany('foo', {}).catch(function (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err && err.message, "The first argument 'keys' must be an array")
|
||||
})
|
||||
})
|
||||
|
||||
test('test getMany() with illegal keys', assertAsync.ctx(function (t) {
|
||||
// Add 1 assertion for every assertAsync()
|
||||
t.plan(illegalKeys.length * 10)
|
||||
|
||||
for (const { name, key } of illegalKeys) {
|
||||
db.getMany([key], assertAsync(function (err) {
|
||||
t.ok(err instanceof Error, name + ' - is Error (callback)')
|
||||
t.is(err && err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (callback)')
|
||||
}))
|
||||
|
||||
db.getMany(['valid', key], assertAsync(function (err) {
|
||||
t.ok(err instanceof Error, name + ' - is Error (callback, second key)')
|
||||
t.is(err && err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (callback, second key)')
|
||||
}))
|
||||
|
||||
db.getMany([key]).catch(function (err) {
|
||||
t.ok(err instanceof Error, name + ' - is Error (promise)')
|
||||
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (promise)')
|
||||
})
|
||||
|
||||
db.getMany(['valid', key]).catch(function (err) {
|
||||
t.ok(err instanceof Error, name + ' - is Error (promise, second key)')
|
||||
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (promise, second key)')
|
||||
})
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('tape')} test
|
||||
*/
|
||||
exports.getMany = function (test, testCommon) {
|
||||
test('test simple getMany()', function (t) {
|
||||
db.put('foo', 'bar', function (err) {
|
||||
t.error(err)
|
||||
|
||||
function verify (err, values) {
|
||||
t.error(err)
|
||||
t.ok(Array.isArray(values), 'got an array')
|
||||
t.is(values.length, 1, 'array has 1 element')
|
||||
t.is(values[0], 'bar')
|
||||
}
|
||||
|
||||
db.getMany(['foo'], function (err, values) {
|
||||
verify(err, values)
|
||||
|
||||
db.getMany(['foo'], {}, function (err, values) {
|
||||
verify(err, values)
|
||||
|
||||
db.getMany(['foo'], { valueEncoding: 'utf8' }, function (err, values) {
|
||||
t.error(err)
|
||||
t.is(values && typeof values[0], 'string', 'should be string if not buffer')
|
||||
t.same(values, ['bar'])
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('test getMany() with multiple keys', function (t) {
|
||||
t.plan(5)
|
||||
|
||||
db.put('beep', 'boop', function (err) {
|
||||
t.ifError(err)
|
||||
|
||||
db.getMany(['foo', 'beep'], { valueEncoding: 'utf8' }, function (err, values) {
|
||||
t.ifError(err)
|
||||
t.same(values, ['bar', 'boop'])
|
||||
})
|
||||
|
||||
db.getMany(['beep', 'foo'], { valueEncoding: 'utf8' }, function (err, values) {
|
||||
t.ifError(err)
|
||||
t.same(values, ['boop', 'bar'], 'maintains order of input keys')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('test empty getMany()', assertAsync.ctx(function (t) {
|
||||
const encodings = Object.keys(db.supports.encodings).filter(k => db.supports.encodings[k])
|
||||
t.plan(encodings.length * 3)
|
||||
|
||||
for (const valueEncoding of encodings) {
|
||||
db.getMany([], { valueEncoding }, assertAsync(function (err, values) {
|
||||
t.ifError(err)
|
||||
t.same(values, [])
|
||||
}))
|
||||
}
|
||||
}))
|
||||
|
||||
test('test not-found getMany()', assertAsync.ctx(function (t) {
|
||||
const encodings = Object.keys(db.supports.encodings).filter(k => db.supports.encodings[k])
|
||||
t.plan(encodings.length * 3)
|
||||
|
||||
for (const valueEncoding of encodings) {
|
||||
db.getMany(['nope', 'another'], { valueEncoding }, assertAsync(function (err, values) {
|
||||
t.ifError(err)
|
||||
t.same(values, [undefined, undefined])
|
||||
}))
|
||||
}
|
||||
}))
|
||||
|
||||
test('test getMany() with promise', async function (t) {
|
||||
t.same(await db.getMany(['foo'], { valueEncoding: 'utf8' }), ['bar'])
|
||||
t.same(await db.getMany(['beep'], { valueEncoding: 'utf8' }), ['boop'])
|
||||
t.same(await db.getMany(['foo', 'beep'], { valueEncoding: 'utf8' }), ['bar', 'boop'])
|
||||
t.same(await db.getMany(['beep', 'foo'], { valueEncoding: 'utf8' }), ['boop', 'bar'])
|
||||
t.same(await db.getMany(['beep', 'foo', 'nope'], { valueEncoding: 'utf8' }), ['boop', 'bar', undefined])
|
||||
t.same(await db.getMany([], { valueEncoding: 'utf8' }), [])
|
||||
})
|
||||
|
||||
test('test simultaneous getMany()', function (t) {
|
||||
db.put('hello', 'world', function (err) {
|
||||
t.error(err)
|
||||
|
||||
let completed = 0
|
||||
const done = function () {
|
||||
if (++completed === 20) t.end()
|
||||
}
|
||||
|
||||
for (let i = 0; i < 10; ++i) {
|
||||
db.getMany(['hello'], function (err, values) {
|
||||
t.error(err)
|
||||
t.is(values.length, 1)
|
||||
t.is(values[0] && values[0].toString(), 'world')
|
||||
done()
|
||||
})
|
||||
}
|
||||
|
||||
for (let i = 0; i < 10; ++i) {
|
||||
db.getMany(['not found'], function (err, values) {
|
||||
t.error(err)
|
||||
t.same(values, [undefined])
|
||||
done()
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('test getMany() on opening db', assertAsync.ctx(function (t) {
|
||||
t.plan(2 * 2 * 5)
|
||||
|
||||
// Also test empty array because it has a fast-path
|
||||
for (const keys of [['foo'], []]) {
|
||||
// Opening should make no difference, because we call it after getMany()
|
||||
for (const open of [true, false]) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
t.is(db.status, 'opening')
|
||||
|
||||
db.getMany(keys, assertAsync(function (err, values) {
|
||||
t.ifError(err, 'no error')
|
||||
t.same(values, keys.map(_ => undefined))
|
||||
}))
|
||||
|
||||
if (open) {
|
||||
db.open(t.error.bind(t))
|
||||
} else {
|
||||
t.pass()
|
||||
}
|
||||
}
|
||||
}
|
||||
}))
|
||||
|
||||
test('test getMany() on closed db', function (t) {
|
||||
t.plan(2 * 4)
|
||||
|
||||
// Also test empty array because it has a fast-path
|
||||
for (const keys of [['foo'], []]) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err)
|
||||
|
||||
db.close(assertAsync.with(t, function (err) {
|
||||
t.ifError(err)
|
||||
|
||||
db.getMany(keys, assertAsync(function (err) {
|
||||
t.is(err && err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
}))
|
||||
}))
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
test('test getMany() on closing db', function (t) {
|
||||
t.plan(2 * 4)
|
||||
|
||||
// Also test empty array because it has a fast-path
|
||||
for (const keys of [['foo'], []]) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(assertAsync.with(t, function (err) {
|
||||
t.ifError(err)
|
||||
|
||||
db.close(function (err) {
|
||||
t.ifError(err)
|
||||
})
|
||||
|
||||
db.getMany(keys, assertAsync(function (err) {
|
||||
t.is(err && err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
}))
|
||||
}))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('tape')} test
|
||||
*/
|
||||
exports.tearDown = function (test, testCommon) {
|
||||
test('tearDown', function (t) {
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('tape')} test
|
||||
*/
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.setUp(test, testCommon)
|
||||
exports.args(test, testCommon)
|
||||
exports.getMany(test, testCommon)
|
||||
exports.tearDown(test, testCommon)
|
||||
}
|
144
node_modules/abstract-level/test/get-test.js
generated
vendored
Normal file
144
node_modules/abstract-level/test/get-test.js
generated
vendored
Normal file
@ -0,0 +1,144 @@
|
||||
'use strict'
|
||||
|
||||
const isBuffer = require('is-buffer')
|
||||
const { verifyNotFoundError, illegalKeys, assertAsync } = require('./util')
|
||||
|
||||
let db
|
||||
|
||||
exports.setUp = function (test, testCommon) {
|
||||
test('setUp db', function (t) {
|
||||
db = testCommon.factory()
|
||||
db.open(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.args = function (test, testCommon) {
|
||||
test('test get() with illegal keys', assertAsync.ctx(function (t) {
|
||||
t.plan(illegalKeys.length * 5)
|
||||
|
||||
for (const { name, key } of illegalKeys) {
|
||||
db.get(key, assertAsync(function (err) {
|
||||
t.ok(err instanceof Error, name + ' - is Error (callback)')
|
||||
t.is(err && err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (callback)')
|
||||
}))
|
||||
|
||||
db.get(key).catch(function (err) {
|
||||
t.ok(err instanceof Error, name + ' - is Error (promise)')
|
||||
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (promise)')
|
||||
})
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
exports.get = function (test, testCommon) {
|
||||
test('test simple get()', function (t) {
|
||||
db.put('foo', 'bar', function (err) {
|
||||
t.error(err)
|
||||
db.get('foo', function (err, value) {
|
||||
t.error(err)
|
||||
t.is(value, 'bar')
|
||||
|
||||
db.get('foo', {}, function (err, value) { // same but with {}
|
||||
t.error(err)
|
||||
t.is(value, 'bar')
|
||||
|
||||
db.get('foo', { valueEncoding: 'utf8' }, function (err, value) {
|
||||
t.error(err)
|
||||
t.is(value, 'bar')
|
||||
|
||||
if (!db.supports.encodings.buffer) {
|
||||
return t.end()
|
||||
}
|
||||
|
||||
db.get('foo', { valueEncoding: 'buffer' }, function (err, value) {
|
||||
t.error(err)
|
||||
t.ok(isBuffer(value), 'should be buffer')
|
||||
t.is(value.toString(), 'bar')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('test get() with promise', function (t) {
|
||||
db.put('promises', 'yes', function (err) {
|
||||
t.error(err)
|
||||
|
||||
db.get('promises').then(function (value) {
|
||||
t.is(value, 'yes', 'got value without options')
|
||||
|
||||
db.get('not found').catch(function (err) {
|
||||
t.ok(err, 'should error')
|
||||
t.ok(verifyNotFoundError(err), 'correct error')
|
||||
|
||||
if (!db.supports.encodings.buffer) {
|
||||
return t.end()
|
||||
}
|
||||
|
||||
db.get('promises', { valueEncoding: 'buffer' }).then(function (value) {
|
||||
t.ok(isBuffer(value), 'is buffer')
|
||||
t.is(value.toString(), 'yes', 'correct value')
|
||||
t.end()
|
||||
}).catch(t.fail.bind(t))
|
||||
})
|
||||
}).catch(t.fail.bind(t))
|
||||
})
|
||||
})
|
||||
|
||||
test('test simultaneous get()', function (t) {
|
||||
db.put('hello', 'world', function (err) {
|
||||
t.error(err)
|
||||
let completed = 0
|
||||
const done = function () {
|
||||
if (++completed === 20) t.end()
|
||||
}
|
||||
|
||||
for (let i = 0; i < 10; ++i) {
|
||||
db.get('hello', function (err, value) {
|
||||
t.error(err)
|
||||
t.is(value.toString(), 'world')
|
||||
done()
|
||||
})
|
||||
}
|
||||
|
||||
for (let i = 0; i < 10; ++i) {
|
||||
db.get('not found', function (err, value) {
|
||||
t.ok(err, 'should error')
|
||||
t.ok(verifyNotFoundError(err), 'correct error')
|
||||
t.ok(typeof value === 'undefined', 'value is undefined')
|
||||
done()
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('test get() not found error is asynchronous', function (t) {
|
||||
t.plan(4)
|
||||
|
||||
let async = false
|
||||
|
||||
db.get('not found', function (err, value) {
|
||||
t.ok(err, 'should error')
|
||||
t.ok(verifyNotFoundError(err), 'correct error')
|
||||
t.ok(typeof value === 'undefined', 'value is undefined')
|
||||
t.ok(async, 'callback is asynchronous')
|
||||
})
|
||||
|
||||
async = true
|
||||
})
|
||||
}
|
||||
|
||||
exports.tearDown = function (test, testCommon) {
|
||||
test('tearDown', function (t) {
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.setUp(test, testCommon)
|
||||
exports.args(test, testCommon)
|
||||
exports.get(test, testCommon)
|
||||
exports.tearDown(test, testCommon)
|
||||
}
|
72
node_modules/abstract-level/test/index.js
generated
vendored
Normal file
72
node_modules/abstract-level/test/index.js
generated
vendored
Normal file
@ -0,0 +1,72 @@
|
||||
'use strict'
|
||||
|
||||
const common = require('./common')
|
||||
const kSublevels = Symbol('sublevels')
|
||||
|
||||
function suite (options) {
|
||||
const testCommon = common(options)
|
||||
const test = testCommon.test
|
||||
|
||||
require('./factory-test')(test, testCommon)
|
||||
require('./manifest-test')(test, testCommon)
|
||||
require('./open-test').all(test, testCommon)
|
||||
require('./close-test').all(test, testCommon)
|
||||
|
||||
if (testCommon.supports.createIfMissing) {
|
||||
require('./open-create-if-missing-test').all(test, testCommon)
|
||||
}
|
||||
|
||||
if (testCommon.supports.errorIfExists) {
|
||||
require('./open-error-if-exists-test').all(test, testCommon)
|
||||
}
|
||||
|
||||
require('./put-test').all(test, testCommon)
|
||||
require('./get-test').all(test, testCommon)
|
||||
require('./del-test').all(test, testCommon)
|
||||
require('./put-get-del-test').all(test, testCommon)
|
||||
require('./get-many-test').all(test, testCommon)
|
||||
|
||||
require('./batch-test').all(test, testCommon)
|
||||
require('./chained-batch-test').all(test, testCommon)
|
||||
|
||||
require('./iterator-test').all(test, testCommon)
|
||||
require('./iterator-range-test').all(test, testCommon)
|
||||
require('./async-iterator-test').all(test, testCommon)
|
||||
|
||||
require('./deferred-open-test').all(test, testCommon)
|
||||
require('./encoding-test').all(test, testCommon)
|
||||
require('./encoding-json-test').all(test, testCommon)
|
||||
require('./encoding-custom-test').all(test, testCommon)
|
||||
require('./encoding-buffer-test').all(test, testCommon)
|
||||
require('./encoding-decode-error-test').all(test, testCommon)
|
||||
|
||||
if (testCommon.supports.seek) {
|
||||
require('./iterator-seek-test').all(test, testCommon)
|
||||
}
|
||||
|
||||
if (testCommon.supports.snapshots) {
|
||||
require('./iterator-snapshot-test').all(test, testCommon)
|
||||
} else {
|
||||
require('./iterator-no-snapshot-test').all(test, testCommon)
|
||||
}
|
||||
|
||||
require('./clear-test').all(test, testCommon)
|
||||
require('./clear-range-test').all(test, testCommon)
|
||||
require('./sublevel-test').all(test, testCommon)
|
||||
|
||||
// Run the same suite on a sublevel
|
||||
if (!testCommon.internals[kSublevels]) {
|
||||
const factory = testCommon.factory
|
||||
|
||||
suite({
|
||||
...testCommon,
|
||||
internals: { [kSublevels]: true },
|
||||
factory (opts) {
|
||||
return factory().sublevel('test', opts)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
suite.common = common
|
||||
module.exports = suite
|
63
node_modules/abstract-level/test/iterator-no-snapshot-test.js
generated
vendored
Normal file
63
node_modules/abstract-level/test/iterator-no-snapshot-test.js
generated
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
'use strict'
|
||||
|
||||
exports.noSnapshot = function (test, testCommon) {
|
||||
function make (run) {
|
||||
return function (t) {
|
||||
const db = testCommon.factory()
|
||||
const operations = [
|
||||
{ type: 'put', key: 'a', value: 'a' },
|
||||
{ type: 'put', key: 'b', value: 'b' },
|
||||
{ type: 'put', key: 'c', value: 'c' }
|
||||
]
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open error')
|
||||
|
||||
db.batch(operations, function (err) {
|
||||
t.ifError(err, 'no batch error')
|
||||
|
||||
// For this test it is important that we don't read eagerly.
|
||||
// NOTE: highWaterMarkBytes is not an abstract option, but
|
||||
// it is supported by classic-level and others. Also set the
|
||||
// old & equivalent leveldown highWaterMark option for compat.
|
||||
const it = db.iterator({ highWaterMarkBytes: 0, highWaterMark: 0 })
|
||||
|
||||
run(db, function (err) {
|
||||
t.ifError(err, 'no run error')
|
||||
verify(t, it, db)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function verify (t, it, db) {
|
||||
it.all(function (err, entries) {
|
||||
t.ifError(err, 'no iterator error')
|
||||
|
||||
const kv = entries.map(function ([key, value]) {
|
||||
return key.toString() + value.toString()
|
||||
})
|
||||
|
||||
if (kv.length === 3) {
|
||||
t.same(kv, ['aa', 'bb', 'cc'], 'maybe supports snapshots')
|
||||
} else {
|
||||
t.same(kv, ['aa', 'cc'], 'ignores keys that have been deleted in the mean time')
|
||||
}
|
||||
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
test('delete key after creating iterator', make(function (db, done) {
|
||||
db.del('b', done)
|
||||
}))
|
||||
|
||||
test('batch delete key after creating iterator', make(function (db, done) {
|
||||
db.batch([{ type: 'del', key: 'b' }], done)
|
||||
}))
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.noSnapshot(test, testCommon)
|
||||
}
|
287
node_modules/abstract-level/test/iterator-range-test.js
generated
vendored
Normal file
287
node_modules/abstract-level/test/iterator-range-test.js
generated
vendored
Normal file
@ -0,0 +1,287 @@
|
||||
'use strict'
|
||||
|
||||
let db
|
||||
|
||||
const data = (function () {
|
||||
const d = []
|
||||
let i = 0
|
||||
let k
|
||||
for (; i < 100; i++) {
|
||||
k = (i < 10 ? '0' : '') + i
|
||||
d.push({
|
||||
key: k,
|
||||
value: String(Math.random())
|
||||
})
|
||||
}
|
||||
return d
|
||||
}())
|
||||
|
||||
exports.setUp = function (test, testCommon) {
|
||||
test('setUp db', function (t) {
|
||||
db = testCommon.factory()
|
||||
db.open(function () {
|
||||
db.batch(data.map(function (d) {
|
||||
return {
|
||||
type: 'put',
|
||||
key: d.key,
|
||||
value: d.value
|
||||
}
|
||||
}), t.end.bind(t))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
exports.range = function (test, testCommon) {
|
||||
function rangeTest (name, opts, expected) {
|
||||
opts.keyEncoding = 'utf8'
|
||||
opts.valueEncoding = 'utf8'
|
||||
|
||||
test(name, function (t) {
|
||||
db.iterator(opts).all(function (err, entries) {
|
||||
t.error(err)
|
||||
t.is(entries.length, expected.length, 'correct number of entries')
|
||||
t.same(entries, expected.map(o => [o.key, o.value]))
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
// Test the documented promise that in reverse mode,
|
||||
// "the returned entries are the same, but in reverse".
|
||||
if (!opts.reverse && !('limit' in opts)) {
|
||||
const reverseOpts = Object.assign({}, opts, { reverse: true })
|
||||
|
||||
rangeTest(
|
||||
name + ' (flipped)',
|
||||
reverseOpts,
|
||||
expected.slice().reverse()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
rangeTest('test full data collection', {}, data)
|
||||
|
||||
rangeTest('test iterator with reverse=true', {
|
||||
reverse: true
|
||||
}, data.slice().reverse())
|
||||
|
||||
rangeTest('test iterator with gte=00', {
|
||||
gte: '00'
|
||||
}, data)
|
||||
|
||||
rangeTest('test iterator with gte=50', {
|
||||
gte: '50'
|
||||
}, data.slice(50))
|
||||
|
||||
rangeTest('test iterator with lte=50 and reverse=true', {
|
||||
lte: '50',
|
||||
reverse: true
|
||||
}, data.slice().reverse().slice(49))
|
||||
|
||||
rangeTest('test iterator with gte=49.5 (midway)', {
|
||||
gte: '49.5'
|
||||
}, data.slice(50))
|
||||
|
||||
rangeTest('test iterator with gte=49999 (midway)', {
|
||||
gte: '49999'
|
||||
}, data.slice(50))
|
||||
|
||||
rangeTest('test iterator with lte=49.5 (midway) and reverse=true', {
|
||||
lte: '49.5',
|
||||
reverse: true
|
||||
}, data.slice().reverse().slice(50))
|
||||
|
||||
rangeTest('test iterator with lt=49.5 (midway) and reverse=true', {
|
||||
lt: '49.5',
|
||||
reverse: true
|
||||
}, data.slice().reverse().slice(50))
|
||||
|
||||
rangeTest('test iterator with lt=50 and reverse=true', {
|
||||
lt: '50',
|
||||
reverse: true
|
||||
}, data.slice().reverse().slice(50))
|
||||
|
||||
rangeTest('test iterator with lte=50', {
|
||||
lte: '50'
|
||||
}, data.slice(0, 51))
|
||||
|
||||
rangeTest('test iterator with lte=50.5 (midway)', {
|
||||
lte: '50.5'
|
||||
}, data.slice(0, 51))
|
||||
|
||||
rangeTest('test iterator with lte=50555 (midway)', {
|
||||
lte: '50555'
|
||||
}, data.slice(0, 51))
|
||||
|
||||
rangeTest('test iterator with lt=50555 (midway)', {
|
||||
lt: '50555'
|
||||
}, data.slice(0, 51))
|
||||
|
||||
rangeTest('test iterator with gte=50.5 (midway) and reverse=true', {
|
||||
gte: '50.5',
|
||||
reverse: true
|
||||
}, data.slice().reverse().slice(0, 49))
|
||||
|
||||
rangeTest('test iterator with gt=50.5 (midway) and reverse=true', {
|
||||
gt: '50.5',
|
||||
reverse: true
|
||||
}, data.slice().reverse().slice(0, 49))
|
||||
|
||||
rangeTest('test iterator with gt=50 and reverse=true', {
|
||||
gt: '50',
|
||||
reverse: true
|
||||
}, data.slice().reverse().slice(0, 49))
|
||||
|
||||
// first key is actually '00' so it should avoid it
|
||||
rangeTest('test iterator with lte=0', {
|
||||
lte: '0'
|
||||
}, [])
|
||||
|
||||
// first key is actually '00' so it should avoid it
|
||||
rangeTest('test iterator with lt=0', {
|
||||
lt: '0'
|
||||
}, [])
|
||||
|
||||
rangeTest('test iterator with gte=30 and lte=70', {
|
||||
gte: '30',
|
||||
lte: '70'
|
||||
}, data.slice(30, 71))
|
||||
|
||||
// The gte and lte options should take precedence over gt and lt respectively.
|
||||
rangeTest('test iterator with gte=30 and lte=70 and gt=40 and lt=60', {
|
||||
gte: '30',
|
||||
lte: '70',
|
||||
gt: '40',
|
||||
lt: '60'
|
||||
}, data.slice(30, 71))
|
||||
|
||||
// Also test the other way around: if gt and lt were to select a bigger range.
|
||||
rangeTest('test iterator with gte=30 and lte=70 and gt=20 and lt=80', {
|
||||
gte: '30',
|
||||
lte: '70',
|
||||
gt: '20',
|
||||
lt: '80'
|
||||
}, data.slice(30, 71))
|
||||
|
||||
rangeTest('test iterator with gt=29 and lt=71', {
|
||||
gt: '29',
|
||||
lt: '71'
|
||||
}, data.slice(30, 71))
|
||||
|
||||
rangeTest('test iterator with gte=30 and lte=70 and reverse=true', {
|
||||
lte: '70',
|
||||
gte: '30',
|
||||
reverse: true
|
||||
}, data.slice().reverse().slice(29, 70))
|
||||
|
||||
rangeTest('test iterator with gt=29 and lt=71 and reverse=true', {
|
||||
lt: '71',
|
||||
gt: '29',
|
||||
reverse: true
|
||||
}, data.slice().reverse().slice(29, 70))
|
||||
|
||||
rangeTest('test iterator with limit=20', {
|
||||
limit: 20
|
||||
}, data.slice(0, 20))
|
||||
|
||||
rangeTest('test iterator with limit=20 and gte=20', {
|
||||
limit: 20,
|
||||
gte: '20'
|
||||
}, data.slice(20, 40))
|
||||
|
||||
rangeTest('test iterator with limit=20 and reverse=true', {
|
||||
limit: 20,
|
||||
reverse: true
|
||||
}, data.slice().reverse().slice(0, 20))
|
||||
|
||||
rangeTest('test iterator with limit=20 and lte=79 and reverse=true', {
|
||||
limit: 20,
|
||||
lte: '79',
|
||||
reverse: true
|
||||
}, data.slice().reverse().slice(20, 40))
|
||||
|
||||
// the default limit value from levelup is -1
|
||||
rangeTest('test iterator with limit=-1 should iterate over whole database', {
|
||||
limit: -1
|
||||
}, data)
|
||||
|
||||
rangeTest('test iterator with limit=0 should not iterate over anything', {
|
||||
limit: 0
|
||||
}, [])
|
||||
|
||||
rangeTest('test iterator with lte after limit', {
|
||||
limit: 20,
|
||||
lte: '50'
|
||||
}, data.slice(0, 20))
|
||||
|
||||
rangeTest('test iterator with lte before limit', {
|
||||
limit: 50,
|
||||
lte: '19'
|
||||
}, data.slice(0, 20))
|
||||
|
||||
rangeTest('test iterator with gte after database end', {
|
||||
gte: '9a'
|
||||
}, [])
|
||||
|
||||
rangeTest('test iterator with gt after database end', {
|
||||
gt: '9a'
|
||||
}, [])
|
||||
|
||||
rangeTest('test iterator with lte after database end and reverse=true', {
|
||||
lte: '9a',
|
||||
reverse: true
|
||||
}, data.slice().reverse())
|
||||
|
||||
rangeTest('test iterator with lt after database end', {
|
||||
lt: 'a'
|
||||
}, data.slice())
|
||||
|
||||
rangeTest('test iterator with lt at database end', {
|
||||
lt: data[data.length - 1].key
|
||||
}, data.slice(0, -1))
|
||||
|
||||
rangeTest('test iterator with lte at database end', {
|
||||
lte: data[data.length - 1].key
|
||||
}, data.slice())
|
||||
|
||||
rangeTest('test iterator with lt before database end', {
|
||||
lt: data[data.length - 2].key
|
||||
}, data.slice(0, -2))
|
||||
|
||||
rangeTest('test iterator with lte before database end', {
|
||||
lte: data[data.length - 2].key
|
||||
}, data.slice(0, -1))
|
||||
|
||||
rangeTest('test iterator with lte and gte after database and reverse=true', {
|
||||
lte: '9b',
|
||||
gte: '9a',
|
||||
reverse: true
|
||||
}, [])
|
||||
|
||||
rangeTest('test iterator with lt and gt after database and reverse=true', {
|
||||
lt: '9b',
|
||||
gt: '9a',
|
||||
reverse: true
|
||||
}, [])
|
||||
|
||||
rangeTest('gt greater than lt', {
|
||||
gt: '20',
|
||||
lt: '10'
|
||||
}, [])
|
||||
|
||||
rangeTest('gte greater than lte', {
|
||||
gte: '20',
|
||||
lte: '10'
|
||||
}, [])
|
||||
}
|
||||
|
||||
exports.tearDown = function (test, testCommon) {
|
||||
test('tearDown', function (t) {
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.setUp(test, testCommon)
|
||||
exports.range(test, testCommon)
|
||||
exports.tearDown(test, testCommon)
|
||||
}
|
252
node_modules/abstract-level/test/iterator-seek-test.js
generated
vendored
Normal file
252
node_modules/abstract-level/test/iterator-seek-test.js
generated
vendored
Normal file
@ -0,0 +1,252 @@
|
||||
'use strict'
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const identity = (v) => v
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.sequence(test, testCommon)
|
||||
exports.seek(test, testCommon)
|
||||
}
|
||||
|
||||
exports.sequence = function (test, testCommon) {
|
||||
for (const deferred of [false, true]) {
|
||||
for (const mode of ['iterator', 'keys', 'values']) {
|
||||
test(`${mode}().seek() throws if next() has not completed (deferred: ${deferred})`, async function (t) {
|
||||
const db = testCommon.factory()
|
||||
if (!deferred) await db.open()
|
||||
|
||||
const it = db[mode]()
|
||||
const promise = it.next()
|
||||
|
||||
t.throws(() => it.seek('two'), (err) => err.code === 'LEVEL_ITERATOR_BUSY')
|
||||
|
||||
await promise
|
||||
await db.close()
|
||||
})
|
||||
|
||||
test(`${mode}().seek() does not throw after close() (deferred: ${deferred})`, async function (t) {
|
||||
const db = testCommon.factory()
|
||||
if (!deferred) await db.open()
|
||||
|
||||
const it = db[mode]()
|
||||
await it.close()
|
||||
|
||||
t.doesNotThrow(() => it.seek('two'))
|
||||
|
||||
await db.close()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.seek = function (test, testCommon) {
|
||||
const testData = () => [
|
||||
{ type: 'put', key: 'one', value: '1' },
|
||||
{ type: 'put', key: 'two', value: '2' },
|
||||
{ type: 'put', key: 'three', value: '3' }
|
||||
]
|
||||
|
||||
for (const mode of ['iterator', 'keys', 'values']) {
|
||||
const mapEntry = mode === 'iterator' ? e => e : mode === 'keys' ? e => e[0] : e => e[1]
|
||||
|
||||
test(`${mode}().seek() to string target`, async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.batch(testData())
|
||||
const it = db[mode]()
|
||||
|
||||
it.seek('two')
|
||||
|
||||
t.same(await it.next(), mapEntry(['two', '2']), 'match')
|
||||
t.same(await it.next(), undefined, 'end of iterator')
|
||||
|
||||
return db.close()
|
||||
})
|
||||
|
||||
if (testCommon.supports.encodings.buffer) {
|
||||
// TODO: make this test meaningful, with bytes outside the utf8 range
|
||||
test(`${mode}().seek() to buffer target`, async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.batch(testData())
|
||||
const it = db[mode]({ keyEncoding: 'buffer' })
|
||||
|
||||
it.seek(Buffer.from('two'))
|
||||
|
||||
t.same(await it.next(), mapEntry([Buffer.from('two'), '2']), 'match')
|
||||
t.same(await it.next(), undefined, 'end of iterator')
|
||||
|
||||
return db.close()
|
||||
})
|
||||
}
|
||||
|
||||
test(`${mode}().seek() to target with custom encoding`, async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.batch(testData())
|
||||
const it = db[mode]()
|
||||
const keyEncoding = { encode: () => 'two', decode: identity, format: 'utf8' }
|
||||
|
||||
it.seek('xyz', { keyEncoding })
|
||||
|
||||
t.same(await it.next(), mapEntry(['two', '2']), 'match')
|
||||
t.same(await it.next(), undefined, 'end of iterator')
|
||||
|
||||
return db.close()
|
||||
})
|
||||
|
||||
test(`${mode}().seek() on reverse iterator`, async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.batch(testData())
|
||||
const it = db[mode]({ reverse: true, limit: 1 })
|
||||
|
||||
// Should land on key equal to or smaller than 'three!' which is 'three'
|
||||
it.seek('three!')
|
||||
|
||||
t.same(await it.next(), mapEntry(['three', '3']), 'match')
|
||||
t.same(await it.next(), undefined, 'end of iterator')
|
||||
|
||||
return db.close()
|
||||
})
|
||||
|
||||
test(`${mode}().seek() to out of range target`, async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.batch(testData())
|
||||
const it = db[mode]()
|
||||
|
||||
it.seek('zzz')
|
||||
t.same(await it.next(), undefined, 'end of iterator')
|
||||
|
||||
return db.close()
|
||||
})
|
||||
|
||||
test(`${mode}().seek() on reverse iterator to out of range target`, async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.batch(testData())
|
||||
const it = db[mode]({ reverse: true })
|
||||
|
||||
it.seek('zzz')
|
||||
|
||||
t.same(await it.next(), mapEntry(['two', '2']), 'match')
|
||||
t.same(await it.next(), mapEntry(['three', '3']), 'match')
|
||||
t.same(await it.next(), mapEntry(['one', '1']), 'match')
|
||||
t.same(await it.next(), undefined, 'end of iterator')
|
||||
|
||||
return db.close()
|
||||
})
|
||||
|
||||
if (testCommon.supports.snapshots) {
|
||||
for (const reverse of [false, true]) {
|
||||
for (const deferred of [false, true]) {
|
||||
test(`${mode}().seek() respects snapshot (reverse: ${reverse}, deferred: ${deferred})`, async function (t) {
|
||||
const db = testCommon.factory()
|
||||
if (!deferred) await db.open()
|
||||
|
||||
const it = db[mode]({ reverse })
|
||||
|
||||
// Add entry after having created the iterator (and its snapshot)
|
||||
await db.put('a', 'a')
|
||||
|
||||
// Seeking should not create a new snapshot, which'd include the new entry
|
||||
it.seek('a')
|
||||
t.same(await it.next(), undefined)
|
||||
|
||||
return db.close()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
test(`${mode}().seek() respects range`, function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.error(err, 'no error from open()')
|
||||
|
||||
// Can't use Array.fill() because IE
|
||||
const ops = []
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
ops.push({ type: 'put', key: String(i), value: String(i) })
|
||||
}
|
||||
|
||||
db.batch(ops, function (err) {
|
||||
t.error(err, 'no error from batch()')
|
||||
|
||||
let pending = 0
|
||||
|
||||
expect({ gt: '5' }, '4', undefined)
|
||||
expect({ gt: '5' }, '5', undefined)
|
||||
expect({ gt: '5' }, '6', '6')
|
||||
|
||||
expect({ gte: '5' }, '4', undefined)
|
||||
expect({ gte: '5' }, '5', '5')
|
||||
expect({ gte: '5' }, '6', '6')
|
||||
|
||||
// The gte option should take precedence over gt.
|
||||
expect({ gte: '5', gt: '7' }, '4', undefined)
|
||||
expect({ gte: '5', gt: '7' }, '5', '5')
|
||||
expect({ gte: '5', gt: '7' }, '6', '6')
|
||||
expect({ gte: '5', gt: '3' }, '4', undefined)
|
||||
expect({ gte: '5', gt: '3' }, '5', '5')
|
||||
expect({ gte: '5', gt: '3' }, '6', '6')
|
||||
|
||||
expect({ lt: '5' }, '4', '4')
|
||||
expect({ lt: '5' }, '5', undefined)
|
||||
expect({ lt: '5' }, '6', undefined)
|
||||
|
||||
expect({ lte: '5' }, '4', '4')
|
||||
expect({ lte: '5' }, '5', '5')
|
||||
expect({ lte: '5' }, '6', undefined)
|
||||
|
||||
// The lte option should take precedence over lt.
|
||||
expect({ lte: '5', lt: '3' }, '4', '4')
|
||||
expect({ lte: '5', lt: '3' }, '5', '5')
|
||||
expect({ lte: '5', lt: '3' }, '6', undefined)
|
||||
expect({ lte: '5', lt: '7' }, '4', '4')
|
||||
expect({ lte: '5', lt: '7' }, '5', '5')
|
||||
expect({ lte: '5', lt: '7' }, '6', undefined)
|
||||
|
||||
expect({ lt: '5', reverse: true }, '4', '4')
|
||||
expect({ lt: '5', reverse: true }, '5', undefined)
|
||||
expect({ lt: '5', reverse: true }, '6', undefined)
|
||||
|
||||
expect({ lte: '5', reverse: true }, '4', '4')
|
||||
expect({ lte: '5', reverse: true }, '5', '5')
|
||||
expect({ lte: '5', reverse: true }, '6', undefined)
|
||||
|
||||
expect({ gt: '5', reverse: true }, '4', undefined)
|
||||
expect({ gt: '5', reverse: true }, '5', undefined)
|
||||
expect({ gt: '5', reverse: true }, '6', '6')
|
||||
|
||||
expect({ gte: '5', reverse: true }, '4', undefined)
|
||||
expect({ gte: '5', reverse: true }, '5', '5')
|
||||
expect({ gte: '5', reverse: true }, '6', '6')
|
||||
|
||||
expect({ gt: '7', lt: '8' }, '7', undefined)
|
||||
expect({ gte: '7', lt: '8' }, '7', '7')
|
||||
expect({ gte: '7', lt: '8' }, '8', undefined)
|
||||
expect({ gt: '7', lte: '8' }, '8', '8')
|
||||
|
||||
function expect (range, target, expected) {
|
||||
pending++
|
||||
const ite = db[mode](range)
|
||||
|
||||
ite.seek(target)
|
||||
ite.next(function (err, item) {
|
||||
t.error(err, 'no error from next()')
|
||||
|
||||
const json = JSON.stringify(range)
|
||||
const msg = 'seek(' + target + ') on ' + json + ' yields ' + expected
|
||||
|
||||
// Either a key or value depending on mode
|
||||
t.is(item, expected, msg)
|
||||
|
||||
ite.close(function (err) {
|
||||
t.error(err, 'no error from close()')
|
||||
if (!--pending) db.close(t.end.bind(t))
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
99
node_modules/abstract-level/test/iterator-snapshot-test.js
generated
vendored
Normal file
99
node_modules/abstract-level/test/iterator-snapshot-test.js
generated
vendored
Normal file
@ -0,0 +1,99 @@
|
||||
'use strict'
|
||||
|
||||
exports.snapshot = function (test, testCommon) {
|
||||
function make (run) {
|
||||
return function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open error')
|
||||
|
||||
db.put('z', 'from snapshot', function (err) {
|
||||
t.ifError(err, 'no put error')
|
||||
|
||||
// For this test it is important that we don't read eagerly.
|
||||
// NOTE: highWaterMarkBytes is not an abstract option, but
|
||||
// it is supported by classic-level and others. Also set the
|
||||
// old & equivalent leveldown highWaterMark option for compat.
|
||||
const it = db.iterator({ highWaterMarkBytes: 0, highWaterMark: 0 })
|
||||
|
||||
run(t, db, it, function end (err) {
|
||||
t.ifError(err, 'no run error')
|
||||
|
||||
it.close(function (err) {
|
||||
t.ifError(err, 'no iterator close error')
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
test('delete key after snapshotting', make(function (t, db, it, end) {
|
||||
db.del('z', function (err) {
|
||||
t.ifError(err, 'no del error')
|
||||
|
||||
it.next(function (err, key, value) {
|
||||
t.ifError(err, 'no next error')
|
||||
t.ok(key, 'got a key')
|
||||
t.is(key.toString(), 'z', 'correct key')
|
||||
t.is(value.toString(), 'from snapshot', 'correct value')
|
||||
|
||||
end()
|
||||
})
|
||||
})
|
||||
}))
|
||||
|
||||
test('overwrite key after snapshotting', make(function (t, db, it, end) {
|
||||
db.put('z', 'not from snapshot', function (err) {
|
||||
t.ifError(err, 'no put error')
|
||||
|
||||
it.next(function (err, key, value) {
|
||||
t.ifError(err, 'no next error')
|
||||
t.ok(key, 'got a key')
|
||||
t.is(key.toString(), 'z', 'correct key')
|
||||
t.is(value.toString(), 'from snapshot', 'correct value')
|
||||
|
||||
end()
|
||||
})
|
||||
})
|
||||
}))
|
||||
|
||||
test('add key after snapshotting that sorts first', make(function (t, db, it, end) {
|
||||
db.put('a', 'not from snapshot', function (err) {
|
||||
t.ifError(err, 'no put error')
|
||||
|
||||
it.next(function (err, key, value) {
|
||||
t.ifError(err, 'no next error')
|
||||
|
||||
t.ok(key, 'got a key')
|
||||
t.is(key.toString(), 'z', 'correct key')
|
||||
t.is(value.toString(), 'from snapshot', 'correct value')
|
||||
|
||||
end()
|
||||
})
|
||||
})
|
||||
}))
|
||||
|
||||
// NOTE: adapted from memdown
|
||||
test('delete key after snapshotting, with more entries available', async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
await Promise.all([db.put('a', 'A'), db.put('b', 'B'), db.put('c', 'C')])
|
||||
|
||||
const iterator = db.iterator({ gte: 'a' })
|
||||
t.same(await iterator.next(), ['a', 'A'])
|
||||
|
||||
await db.del('b')
|
||||
t.same(await iterator.next(), ['b', 'B'])
|
||||
t.same(await iterator.next(), ['c', 'C'])
|
||||
|
||||
await iterator.close()
|
||||
return db.close()
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.snapshot(test, testCommon)
|
||||
}
|
548
node_modules/abstract-level/test/iterator-test.js
generated
vendored
Normal file
548
node_modules/abstract-level/test/iterator-test.js
generated
vendored
Normal file
@ -0,0 +1,548 @@
|
||||
'use strict'
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
const identity = (v) => v
|
||||
|
||||
let db
|
||||
|
||||
exports.setUp = function (test, testCommon) {
|
||||
test('setUp db', function (t) {
|
||||
db = testCommon.factory()
|
||||
db.open(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.args = function (test, testCommon) {
|
||||
for (const mode of ['iterator', 'keys', 'values']) {
|
||||
test(`${mode}() has db reference`, async function (t) {
|
||||
const it = db[mode]()
|
||||
|
||||
// May return iterator of an underlying db, that's okay.
|
||||
t.ok(it.db === db || it.db === (db.db || db._db || db))
|
||||
|
||||
await it.close()
|
||||
})
|
||||
|
||||
test(`${mode}() has limit and count properties`, async function (t) {
|
||||
const iterators = [db[mode]()]
|
||||
t.is(iterators[0].limit, Infinity, 'defaults to infinite')
|
||||
|
||||
for (const limit of [-1, 0, 1, Infinity]) {
|
||||
const it = db[mode]({ limit })
|
||||
iterators.push(it)
|
||||
t.is(it.limit, limit === -1 ? Infinity : limit, 'has limit property')
|
||||
}
|
||||
|
||||
t.ok(iterators.every(it => it.count === 0), 'has count property')
|
||||
await Promise.all(iterators.map(it => it.close()))
|
||||
})
|
||||
|
||||
test(`${mode}().nextv() yields error if size is invalid`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const it = db[mode]()
|
||||
|
||||
for (const args of [[], [NaN], ['1'], [2.5]]) {
|
||||
try {
|
||||
await it.nextv(...args)
|
||||
} catch (err) {
|
||||
t.is(err.message, "The first argument 'size' must be an integer")
|
||||
}
|
||||
}
|
||||
|
||||
await it.close()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
exports.sequence = function (test, testCommon) {
|
||||
for (const mode of ['iterator', 'keys', 'values']) {
|
||||
test(`${mode}().close() is idempotent`, function (t) {
|
||||
const iterator = db[mode]()
|
||||
|
||||
iterator.close(function () {
|
||||
let async = false
|
||||
|
||||
iterator.close(function () {
|
||||
t.ok(async, 'callback is asynchronous')
|
||||
t.end()
|
||||
})
|
||||
|
||||
async = true
|
||||
})
|
||||
})
|
||||
|
||||
for (const method of ['next', 'nextv', 'all']) {
|
||||
const requiredArgs = method === 'nextv' ? [1] : []
|
||||
|
||||
test(`${mode}().${method}() after close() yields error`, function (t) {
|
||||
const iterator = db[mode]()
|
||||
iterator.close(function (err) {
|
||||
t.error(err)
|
||||
|
||||
let async = false
|
||||
|
||||
iterator[method](...requiredArgs, function (err2) {
|
||||
t.ok(err2, 'returned error')
|
||||
t.is(err2.code, 'LEVEL_ITERATOR_NOT_OPEN', 'correct message')
|
||||
t.ok(async, 'callback is asynchronous')
|
||||
t.end()
|
||||
})
|
||||
|
||||
async = true
|
||||
})
|
||||
})
|
||||
|
||||
for (const otherMethod of ['next', 'nextv', 'all']) {
|
||||
const otherRequiredArgs = otherMethod === 'nextv' ? [1] : []
|
||||
|
||||
test(`${mode}().${method}() while busy with ${otherMethod}() yields error`, function (t) {
|
||||
const iterator = db[mode]()
|
||||
iterator[otherMethod](...otherRequiredArgs, function (err) {
|
||||
t.error(err)
|
||||
iterator.close(function (err) {
|
||||
t.error(err)
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
let async = false
|
||||
|
||||
iterator[method](...requiredArgs, function (err) {
|
||||
t.ok(err, 'returned error')
|
||||
t.is(err.code, 'LEVEL_ITERATOR_BUSY')
|
||||
t.ok(async, 'callback is asynchronous')
|
||||
})
|
||||
|
||||
async = true
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const deferred of [false, true]) {
|
||||
for (const mode of ['iterator', 'keys', 'values']) {
|
||||
for (const method of ['next', 'nextv', 'all']) {
|
||||
const requiredArgs = method === 'nextv' ? [10] : []
|
||||
|
||||
// NOTE: adapted from leveldown
|
||||
test(`${mode}().${method}() after db.close() yields error (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const db = testCommon.factory()
|
||||
if (!deferred) await db.open()
|
||||
|
||||
await db.put('a', 'a')
|
||||
await db.put('b', 'b')
|
||||
|
||||
const it = db[mode]()
|
||||
|
||||
// The first call *should* succeed, because it was scheduled before close(). However, success
|
||||
// is not a must. Because nextv() and all() fallback to next*(), they're allowed to fail. An
|
||||
// implementation can also choose to abort any pending call on close.
|
||||
let promise = it[method](...requiredArgs).then(() => {
|
||||
t.pass('Optionally succeeded')
|
||||
}).catch((err) => {
|
||||
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
})
|
||||
|
||||
// The second call *must* fail, because it was scheduled after close()
|
||||
promise = promise.then(() => {
|
||||
return it[method](...requiredArgs).then(() => {
|
||||
t.fail('Expected an error')
|
||||
}).catch((err) => {
|
||||
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
})
|
||||
})
|
||||
|
||||
return Promise.all([db.close(), promise])
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.iterator = function (test, testCommon) {
|
||||
test('test simple iterator()', function (t) {
|
||||
const data = [
|
||||
{ type: 'put', key: 'foobatch1', value: 'bar1' },
|
||||
{ type: 'put', key: 'foobatch2', value: 'bar2' },
|
||||
{ type: 'put', key: 'foobatch3', value: 'bar3' }
|
||||
]
|
||||
let idx = 0
|
||||
|
||||
db.batch(data, function (err) {
|
||||
t.error(err)
|
||||
const iterator = db.iterator()
|
||||
const fn = function (err, key, value) {
|
||||
t.error(err)
|
||||
if (key && value) {
|
||||
t.is(key, data[idx].key, 'correct key')
|
||||
t.is(value, data[idx].value, 'correct value')
|
||||
db.nextTick(next)
|
||||
idx++
|
||||
} else { // end
|
||||
t.ok(err == null, 'err argument is nullish')
|
||||
t.ok(typeof key === 'undefined', 'key argument is undefined')
|
||||
t.ok(typeof value === 'undefined', 'value argument is undefined')
|
||||
t.is(idx, data.length, 'correct number of entries')
|
||||
iterator.close(function () {
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
}
|
||||
const next = function () {
|
||||
iterator.next(fn)
|
||||
}
|
||||
|
||||
next()
|
||||
})
|
||||
})
|
||||
|
||||
// NOTE: adapted from leveldown
|
||||
test('key-only iterator', function (t) {
|
||||
const it = db.iterator({ values: false })
|
||||
|
||||
it.next(function (err, key, value) {
|
||||
t.ifError(err, 'no next() error')
|
||||
t.is(key, 'foobatch1')
|
||||
t.is(value, undefined)
|
||||
it.close(t.end.bind(t))
|
||||
})
|
||||
})
|
||||
|
||||
// NOTE: adapted from leveldown
|
||||
test('value-only iterator', function (t) {
|
||||
const it = db.iterator({ keys: false })
|
||||
|
||||
it.next(function (err, key, value) {
|
||||
t.ifError(err, 'no next() error')
|
||||
t.is(key, undefined)
|
||||
t.is(value, 'bar1')
|
||||
it.close(t.end.bind(t))
|
||||
})
|
||||
})
|
||||
|
||||
test('db.keys().next()', function (t) {
|
||||
const it = db.keys()
|
||||
|
||||
it.next(function (err, key) {
|
||||
t.ifError(err, 'no next() error')
|
||||
t.is(key, 'foobatch1')
|
||||
it.close(t.end.bind(t))
|
||||
})
|
||||
})
|
||||
|
||||
test('db.values().next()', function (t) {
|
||||
const it = db.values()
|
||||
|
||||
it.next(function (err, value) {
|
||||
t.ifError(err, 'no next() error')
|
||||
t.is(value, 'bar1')
|
||||
it.close(t.end.bind(t))
|
||||
})
|
||||
})
|
||||
|
||||
for (const mode of ['iterator', 'keys', 'values']) {
|
||||
const mapEntry = e => mode === 'iterator' ? e : mode === 'keys' ? e[0] : e[1]
|
||||
|
||||
test(`${mode}().nextv()`, async function (t) {
|
||||
const it = db[mode]()
|
||||
|
||||
t.same(await it.nextv(1), [['foobatch1', 'bar1']].map(mapEntry))
|
||||
t.same(await it.nextv(2, {}), [['foobatch2', 'bar2'], ['foobatch3', 'bar3']].map(mapEntry))
|
||||
t.same(await it.nextv(2), [])
|
||||
|
||||
await it.close()
|
||||
})
|
||||
|
||||
test(`${mode}().nextv() in reverse`, async function (t) {
|
||||
const it = db[mode]({ reverse: true })
|
||||
|
||||
t.same(await it.nextv(1), [['foobatch3', 'bar3']].map(mapEntry))
|
||||
t.same(await it.nextv(2, {}), [['foobatch2', 'bar2'], ['foobatch1', 'bar1']].map(mapEntry))
|
||||
t.same(await it.nextv(2), [])
|
||||
|
||||
await it.close()
|
||||
})
|
||||
|
||||
test(`${mode}().nextv() has soft minimum of 1`, async function (t) {
|
||||
const it = db[mode]()
|
||||
|
||||
t.same(await it.nextv(0), [['foobatch1', 'bar1']].map(mapEntry))
|
||||
t.same(await it.nextv(0), [['foobatch2', 'bar2']].map(mapEntry))
|
||||
t.same(await it.nextv(0, {}), [['foobatch3', 'bar3']].map(mapEntry))
|
||||
t.same(await it.nextv(0), [])
|
||||
|
||||
await it.close()
|
||||
})
|
||||
|
||||
test(`${mode}().nextv() requesting more than available`, async function (t) {
|
||||
const it = db[mode]()
|
||||
|
||||
t.same(await it.nextv(10), [
|
||||
['foobatch1', 'bar1'],
|
||||
['foobatch2', 'bar2'],
|
||||
['foobatch3', 'bar3']
|
||||
].map(mapEntry))
|
||||
t.same(await it.nextv(10), [])
|
||||
|
||||
await it.close()
|
||||
})
|
||||
|
||||
test(`${mode}().nextv() honors limit`, async function (t) {
|
||||
const it = db[mode]({ limit: 2 })
|
||||
|
||||
t.same(await it.nextv(10), [['foobatch1', 'bar1'], ['foobatch2', 'bar2']].map(mapEntry))
|
||||
t.same(await it.nextv(10), [])
|
||||
|
||||
await it.close()
|
||||
})
|
||||
|
||||
test(`${mode}().nextv() honors limit in reverse`, async function (t) {
|
||||
const it = db[mode]({ limit: 2, reverse: true })
|
||||
|
||||
t.same(await it.nextv(10), [['foobatch3', 'bar3'], ['foobatch2', 'bar2']].map(mapEntry))
|
||||
t.same(await it.nextv(10), [])
|
||||
|
||||
await it.close()
|
||||
})
|
||||
|
||||
test(`${mode}().all()`, async function (t) {
|
||||
t.same(await db[mode]().all(), [
|
||||
['foobatch1', 'bar1'],
|
||||
['foobatch2', 'bar2'],
|
||||
['foobatch3', 'bar3']
|
||||
].map(mapEntry))
|
||||
|
||||
t.same(await db[mode]().all({}), [
|
||||
['foobatch1', 'bar1'],
|
||||
['foobatch2', 'bar2'],
|
||||
['foobatch3', 'bar3']
|
||||
].map(mapEntry))
|
||||
})
|
||||
|
||||
test(`${mode}().all() in reverse`, async function (t) {
|
||||
t.same(await db[mode]({ reverse: true }).all(), [
|
||||
['foobatch3', 'bar3'],
|
||||
['foobatch2', 'bar2'],
|
||||
['foobatch1', 'bar1']
|
||||
].map(mapEntry))
|
||||
})
|
||||
|
||||
test(`${mode}().all() honors limit`, async function (t) {
|
||||
t.same(await db[mode]({ limit: 2 }).all(), [
|
||||
['foobatch1', 'bar1'],
|
||||
['foobatch2', 'bar2']
|
||||
].map(mapEntry))
|
||||
|
||||
const it = db[mode]({ limit: 2 })
|
||||
|
||||
t.same(await it.next(), mapEntry(['foobatch1', 'bar1']))
|
||||
t.same(await it.all(), [['foobatch2', 'bar2']].map(mapEntry))
|
||||
})
|
||||
|
||||
test(`${mode}().all() honors limit in reverse`, async function (t) {
|
||||
t.same(await db[mode]({ limit: 2, reverse: true }).all(), [
|
||||
['foobatch3', 'bar3'],
|
||||
['foobatch2', 'bar2']
|
||||
].map(mapEntry))
|
||||
|
||||
const it = db[mode]({ limit: 2, reverse: true })
|
||||
|
||||
t.same(await it.next(), mapEntry(['foobatch3', 'bar3']))
|
||||
t.same(await it.all(), [['foobatch2', 'bar2']].map(mapEntry))
|
||||
})
|
||||
}
|
||||
|
||||
// NOTE: adapted from memdown
|
||||
test('iterator() sorts lexicographically', async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
|
||||
// Write in unsorted order with multiple operations
|
||||
await db.put('f', 'F')
|
||||
await db.put('a', 'A')
|
||||
await db.put('~', '~')
|
||||
await db.put('e', 'E')
|
||||
await db.put('🐄', '🐄')
|
||||
await db.batch([
|
||||
{ type: 'put', key: 'd', value: 'D' },
|
||||
{ type: 'put', key: 'b', value: 'B' },
|
||||
{ type: 'put', key: 'ff', value: 'FF' },
|
||||
{ type: 'put', key: 'a🐄', value: 'A🐄' }
|
||||
])
|
||||
await db.batch([
|
||||
{ type: 'put', key: '', value: 'empty' },
|
||||
{ type: 'put', key: '2', value: '2' },
|
||||
{ type: 'put', key: '12', value: '12' },
|
||||
{ type: 'put', key: '\t', value: '\t' }
|
||||
])
|
||||
|
||||
t.same(await db.iterator().all(), [
|
||||
['', 'empty'],
|
||||
['\t', '\t'],
|
||||
['12', '12'],
|
||||
['2', '2'],
|
||||
['a', 'A'],
|
||||
['a🐄', 'A🐄'],
|
||||
['b', 'B'],
|
||||
['d', 'D'],
|
||||
['e', 'E'],
|
||||
['f', 'F'],
|
||||
['ff', 'FF'],
|
||||
['~', '~'],
|
||||
['🐄', '🐄']
|
||||
])
|
||||
|
||||
t.same(await db.iterator({ lte: '' }).all(), [
|
||||
['', 'empty']
|
||||
])
|
||||
|
||||
return db.close()
|
||||
})
|
||||
|
||||
for (const keyEncoding of ['buffer', 'view']) {
|
||||
if (!testCommon.supports.encodings[keyEncoding]) continue
|
||||
|
||||
test(`test iterator() has byte order (${keyEncoding} encoding)`, function (t) {
|
||||
const db = testCommon.factory({ keyEncoding })
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open() error')
|
||||
|
||||
const ctor = keyEncoding === 'buffer' ? Buffer : Uint8Array
|
||||
const keys = [2, 11, 1].map(b => ctor.from([b]))
|
||||
|
||||
db.batch(keys.map((key) => ({ type: 'put', key, value: 'x' })), function (err) {
|
||||
t.ifError(err, 'no batch() error')
|
||||
|
||||
db.keys().all(function (err, keys) {
|
||||
t.ifError(err, 'no all() error')
|
||||
t.same(keys.map(k => k[0]), [1, 2, 11], 'order is ok')
|
||||
|
||||
db.iterator().all(function (err, entries) {
|
||||
t.ifError(err, 'no all() error')
|
||||
t.same(entries.map(e => e[0][0]), [1, 2, 11], 'order is ok')
|
||||
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// NOTE: adapted from memdown and level-js
|
||||
test(`test iterator() with byte range (${keyEncoding} encoding)`, async function (t) {
|
||||
const db = testCommon.factory({ keyEncoding })
|
||||
await db.open()
|
||||
|
||||
await db.put(Uint8Array.from([0x0]), '0')
|
||||
await db.put(Uint8Array.from([128]), '128')
|
||||
await db.put(Uint8Array.from([160]), '160')
|
||||
await db.put(Uint8Array.from([192]), '192')
|
||||
|
||||
const collect = async (range) => {
|
||||
const entries = await db.iterator(range).all()
|
||||
t.ok(entries.every(e => e[0] instanceof Uint8Array)) // True for both encodings
|
||||
t.ok(entries.every(e => e[1] === String(e[0][0])))
|
||||
return entries.map(e => e[0][0])
|
||||
}
|
||||
|
||||
t.same(await collect({ gt: Uint8Array.from([255]) }), [])
|
||||
t.same(await collect({ gt: Uint8Array.from([192]) }), [])
|
||||
t.same(await collect({ gt: Uint8Array.from([160]) }), [192])
|
||||
t.same(await collect({ gt: Uint8Array.from([128]) }), [160, 192])
|
||||
t.same(await collect({ gt: Uint8Array.from([0x0]) }), [128, 160, 192])
|
||||
t.same(await collect({ gt: Uint8Array.from([]) }), [0x0, 128, 160, 192])
|
||||
|
||||
t.same(await collect({ lt: Uint8Array.from([255]) }), [0x0, 128, 160, 192])
|
||||
t.same(await collect({ lt: Uint8Array.from([192]) }), [0x0, 128, 160])
|
||||
t.same(await collect({ lt: Uint8Array.from([160]) }), [0x0, 128])
|
||||
t.same(await collect({ lt: Uint8Array.from([128]) }), [0x0])
|
||||
t.same(await collect({ lt: Uint8Array.from([0x0]) }), [])
|
||||
t.same(await collect({ lt: Uint8Array.from([]) }), [])
|
||||
|
||||
t.same(await collect({ gte: Uint8Array.from([255]) }), [])
|
||||
t.same(await collect({ gte: Uint8Array.from([192]) }), [192])
|
||||
t.same(await collect({ gte: Uint8Array.from([160]) }), [160, 192])
|
||||
t.same(await collect({ gte: Uint8Array.from([128]) }), [128, 160, 192])
|
||||
t.same(await collect({ gte: Uint8Array.from([0x0]) }), [0x0, 128, 160, 192])
|
||||
t.same(await collect({ gte: Uint8Array.from([]) }), [0x0, 128, 160, 192])
|
||||
|
||||
t.same(await collect({ lte: Uint8Array.from([255]) }), [0x0, 128, 160, 192])
|
||||
t.same(await collect({ lte: Uint8Array.from([192]) }), [0x0, 128, 160, 192])
|
||||
t.same(await collect({ lte: Uint8Array.from([160]) }), [0x0, 128, 160])
|
||||
t.same(await collect({ lte: Uint8Array.from([128]) }), [0x0, 128])
|
||||
t.same(await collect({ lte: Uint8Array.from([0x0]) }), [0x0])
|
||||
t.same(await collect({ lte: Uint8Array.from([]) }), [])
|
||||
|
||||
return db.close()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
exports.decode = function (test, testCommon) {
|
||||
for (const deferred of [false, true]) {
|
||||
for (const mode of ['iterator', 'keys', 'values']) {
|
||||
for (const method of ['next', 'nextv', 'all']) {
|
||||
const requiredArgs = method === 'nextv' ? [1] : []
|
||||
|
||||
for (const encodingOption of ['keyEncoding', 'valueEncoding']) {
|
||||
if (mode === 'keys' && encodingOption === 'valueEncoding') continue
|
||||
if (mode === 'values' && encodingOption === 'keyEncoding') continue
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`${mode}().${method}() catches decoding error from ${encodingOption} (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const encoding = {
|
||||
format: 'utf8',
|
||||
decode: function (x) {
|
||||
t.is(x, encodingOption === 'keyEncoding' ? 'testKey' : 'testValue')
|
||||
throw new Error('from encoding')
|
||||
},
|
||||
encode: identity
|
||||
}
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.put('testKey', 'testValue')
|
||||
|
||||
if (deferred) {
|
||||
await db.close()
|
||||
db.open(t.ifError.bind(t))
|
||||
} else {
|
||||
t.pass('non-deferred')
|
||||
}
|
||||
|
||||
const it = db[mode]({ [encodingOption]: encoding })
|
||||
|
||||
try {
|
||||
await it[method](...requiredArgs)
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_DECODE_ERROR')
|
||||
t.is(err.cause && err.cause.message, 'from encoding')
|
||||
}
|
||||
|
||||
return db.close()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.tearDown = function (test, testCommon) {
|
||||
test('tearDown', function (t) {
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.setUp(test, testCommon)
|
||||
exports.args(test, testCommon)
|
||||
exports.sequence(test, testCommon)
|
||||
exports.iterator(test, testCommon)
|
||||
exports.decode(test, testCommon)
|
||||
exports.tearDown(test, testCommon)
|
||||
}
|
24
node_modules/abstract-level/test/manifest-test.js
generated
vendored
Normal file
24
node_modules/abstract-level/test/manifest-test.js
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
'use strict'
|
||||
|
||||
const suite = require('level-supports/test')
|
||||
|
||||
module.exports = function (test, testCommon) {
|
||||
suite(test, testCommon)
|
||||
|
||||
test('manifest has expected properties', function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
t.is(db.supports.status, true)
|
||||
t.is(db.supports.promises, true)
|
||||
t.is(db.supports.clear, true)
|
||||
t.is(db.supports.getMany, true)
|
||||
|
||||
testCommon.supports = db.supports
|
||||
t.ok(testCommon.supports, 'can be accessed via testCommon')
|
||||
|
||||
t.ok(db.supports.encodings.utf8, 'supports utf8')
|
||||
t.ok(db.supports.encodings.json, 'supports json')
|
||||
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
}
|
35
node_modules/abstract-level/test/open-create-if-missing-test.js
generated
vendored
Normal file
35
node_modules/abstract-level/test/open-create-if-missing-test.js
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
'use strict'
|
||||
|
||||
exports.createIfMissing = function (test, testCommon) {
|
||||
test('test database open createIfMissing:false', function (t) {
|
||||
const db = testCommon.factory()
|
||||
let async = false
|
||||
|
||||
db.open({ createIfMissing: false }, function (err) {
|
||||
t.is(err && err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
t.ok(err && /does not exist/.test(err.cause && err.cause.message), 'error is about dir not existing')
|
||||
t.ok(async, 'callback is asynchronous')
|
||||
t.end()
|
||||
})
|
||||
|
||||
async = true
|
||||
})
|
||||
|
||||
test('test database open createIfMissing:false via constructor', function (t) {
|
||||
const db = testCommon.factory({ createIfMissing: false })
|
||||
let async = false
|
||||
|
||||
db.open(function (err) {
|
||||
t.is(err && err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
t.ok(err && /does not exist/.test(err.cause && err.cause.message), 'error is about dir not existing')
|
||||
t.ok(async, 'callback is asynchronous')
|
||||
t.end()
|
||||
})
|
||||
|
||||
async = true
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.createIfMissing(test, testCommon)
|
||||
}
|
29
node_modules/abstract-level/test/open-error-if-exists-test.js
generated
vendored
Normal file
29
node_modules/abstract-level/test/open-error-if-exists-test.js
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
'use strict'
|
||||
|
||||
exports.errorIfExists = function (test, testCommon) {
|
||||
test('test database open errorIfExists:true', function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.error(err)
|
||||
db.close(function (err) {
|
||||
t.error(err)
|
||||
|
||||
let async = false
|
||||
|
||||
db.open({ createIfMissing: false, errorIfExists: true }, function (err) {
|
||||
t.is(err && err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
t.ok(err && /exists/.test(err.cause && err.cause.message), 'error is about already existing')
|
||||
t.ok(async, 'callback is asynchronous')
|
||||
t.end()
|
||||
})
|
||||
|
||||
async = true
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.errorIfExists(test, testCommon)
|
||||
}
|
297
node_modules/abstract-level/test/open-test.js
generated
vendored
Normal file
297
node_modules/abstract-level/test/open-test.js
generated
vendored
Normal file
@ -0,0 +1,297 @@
|
||||
'use strict'
|
||||
|
||||
const { assertAsync } = require('./util')
|
||||
|
||||
exports.open = function (test, testCommon) {
|
||||
test('test database open, no options', function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
t.is(db.status, 'opening')
|
||||
|
||||
// default createIfMissing=true, errorIfExists=false
|
||||
db.open(function (err) {
|
||||
t.error(err)
|
||||
t.is(db.status, 'open')
|
||||
|
||||
db.close(function () {
|
||||
t.is(db.status, 'closed')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
t.is(db.status, 'opening')
|
||||
})
|
||||
|
||||
test('test database open, no options, with promise', function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
t.is(db.status, 'opening')
|
||||
|
||||
// default createIfMissing=true, errorIfExists=false
|
||||
db.open().then(function () {
|
||||
t.is(db.status, 'open')
|
||||
db.close(t.end.bind(t))
|
||||
}).catch(t.fail.bind(t))
|
||||
|
||||
t.is(db.status, 'opening')
|
||||
})
|
||||
|
||||
test('test database open, options and callback', function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
// default createIfMissing=true, errorIfExists=false
|
||||
db.open({}, function (err) {
|
||||
t.error(err)
|
||||
db.close(function () {
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('test database open, options with promise', function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
// default createIfMissing=true, errorIfExists=false
|
||||
db.open({}).then(function () {
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
})
|
||||
|
||||
test('test database open, close and open', function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.error(err)
|
||||
|
||||
db.close(function (err) {
|
||||
t.error(err)
|
||||
t.is(db.status, 'closed')
|
||||
|
||||
db.open(function (err) {
|
||||
t.error(err)
|
||||
t.is(db.status, 'open')
|
||||
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('test database open, close and open with promise', function (t) {
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open().then(function () {
|
||||
db.close(function (err) {
|
||||
t.error(err)
|
||||
db.open().then(function () {
|
||||
db.close(function () {
|
||||
t.end()
|
||||
})
|
||||
}).catch(t.fail.bind(t))
|
||||
})
|
||||
}).catch(t.fail.bind(t))
|
||||
})
|
||||
|
||||
test('test database open and close in same tick', assertAsync.ctx(function (t) {
|
||||
t.plan(10)
|
||||
|
||||
const db = testCommon.factory()
|
||||
const order = []
|
||||
|
||||
db.open(assertAsync(function (err) {
|
||||
order.push('A')
|
||||
t.is(err && err.code, 'LEVEL_DATABASE_NOT_OPEN', 'got open() error')
|
||||
t.is(db.status, 'closed', 'is closed')
|
||||
}))
|
||||
|
||||
t.is(db.status, 'opening', 'is opening')
|
||||
|
||||
// This wins from the open() call
|
||||
db.close(assertAsync(function (err) {
|
||||
order.push('B')
|
||||
t.same(order, ['A', 'closed event', 'B'], 'order is correct')
|
||||
t.ifError(err, 'no close() error')
|
||||
t.is(db.status, 'closed', 'is closed')
|
||||
}))
|
||||
|
||||
// But open() is still in control
|
||||
t.is(db.status, 'opening', 'is still opening')
|
||||
|
||||
// Should not emit 'open', because close() wins
|
||||
db.on('open', t.fail.bind(t))
|
||||
db.on('closed', assertAsync(() => { order.push('closed event') }))
|
||||
}))
|
||||
|
||||
test('test database open, close and open in same tick', assertAsync.ctx(function (t) {
|
||||
t.plan(14)
|
||||
|
||||
const db = testCommon.factory()
|
||||
const order = []
|
||||
|
||||
db.open(assertAsync(function (err) {
|
||||
order.push('A')
|
||||
t.ifError(err, 'no open() error (1)')
|
||||
t.is(db.status, 'open', 'is open')
|
||||
}))
|
||||
|
||||
t.is(db.status, 'opening', 'is opening')
|
||||
|
||||
// This wins from the open() call
|
||||
db.close(assertAsync(function (err) {
|
||||
order.push('B')
|
||||
t.is(err && err.code, 'LEVEL_DATABASE_NOT_CLOSED')
|
||||
t.is(db.status, 'open', 'is open')
|
||||
}))
|
||||
|
||||
t.is(db.status, 'opening', 'is still opening')
|
||||
|
||||
// This wins from the close() call
|
||||
db.open(assertAsync(function (err) {
|
||||
order.push('C')
|
||||
t.same(order, ['A', 'B', 'open event', 'C'], 'callback order is the same as call order')
|
||||
t.ifError(err, 'no open() error (2)')
|
||||
t.is(db.status, 'open', 'is open')
|
||||
}))
|
||||
|
||||
// Should not emit 'closed', because open() wins
|
||||
db.on('closed', t.fail.bind(t))
|
||||
db.on('open', assertAsync(() => { order.push('open event') }))
|
||||
|
||||
t.is(db.status, 'opening', 'is still opening')
|
||||
}))
|
||||
|
||||
test('test database open if already open (sequential)', function (t) {
|
||||
t.plan(7)
|
||||
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(assertAsync(function (err) {
|
||||
t.ifError(err, 'no open() error (1)')
|
||||
t.is(db.status, 'open', 'is open')
|
||||
|
||||
db.open(assertAsync(function (err) {
|
||||
t.ifError(err, 'no open() error (2)')
|
||||
t.is(db.status, 'open', 'is open')
|
||||
}))
|
||||
|
||||
t.is(db.status, 'open', 'not reopening')
|
||||
db.on('open', t.fail.bind(t))
|
||||
assertAsync.end(t)
|
||||
}))
|
||||
|
||||
assertAsync.end(t)
|
||||
})
|
||||
|
||||
test('test database open if already opening (parallel)', assertAsync.ctx(function (t) {
|
||||
t.plan(7)
|
||||
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(assertAsync(function (err) {
|
||||
t.ifError(err, 'no open() error (1)')
|
||||
t.is(db.status, 'open')
|
||||
}))
|
||||
|
||||
db.open(assertAsync(function (err) {
|
||||
t.ifError(err, 'no open() error (2)')
|
||||
t.is(db.status, 'open')
|
||||
db.close(t.end.bind(t))
|
||||
}))
|
||||
|
||||
t.is(db.status, 'opening')
|
||||
}))
|
||||
|
||||
test('test database close if already closed', function (t) {
|
||||
t.plan(8)
|
||||
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open() error')
|
||||
|
||||
db.close(assertAsync(function (err) {
|
||||
t.ifError(err, 'no close() error (1)')
|
||||
t.is(db.status, 'closed', 'is closed')
|
||||
|
||||
db.close(assertAsync(function (err) {
|
||||
t.ifError(err, 'no close() error (2)')
|
||||
t.is(db.status, 'closed', 'is closed')
|
||||
}))
|
||||
|
||||
t.is(db.status, 'closed', 'is closed', 'not reclosing')
|
||||
db.on('closed', t.fail.bind(t))
|
||||
assertAsync.end(t)
|
||||
}))
|
||||
|
||||
assertAsync.end(t)
|
||||
})
|
||||
})
|
||||
|
||||
test('test database close if new', assertAsync.ctx(function (t) {
|
||||
t.plan(5)
|
||||
|
||||
const db = testCommon.factory()
|
||||
const expectedStatus = db.supports.deferredOpen ? 'opening' : 'closed'
|
||||
|
||||
t.is(db.status, expectedStatus, 'status ok')
|
||||
|
||||
db.close(assertAsync(function (err) {
|
||||
t.ifError(err, 'no close() error')
|
||||
t.is(db.status, 'closed', 'status ok')
|
||||
}))
|
||||
|
||||
t.is(db.status, expectedStatus, 'status unchanged')
|
||||
|
||||
if (!db.supports.deferredOpen) {
|
||||
db.on('closed', t.fail.bind(t, 'should not emit closed'))
|
||||
}
|
||||
}))
|
||||
|
||||
test('test database close on open event', function (t) {
|
||||
t.plan(5)
|
||||
|
||||
const db = testCommon.factory()
|
||||
const order = []
|
||||
|
||||
db.open(function (err) {
|
||||
order.push('A')
|
||||
t.is(err && err.code, 'LEVEL_DATABASE_NOT_OPEN', 'got open() error')
|
||||
t.is(db.status, 'closed', 'is closed')
|
||||
})
|
||||
|
||||
db.on('open', function () {
|
||||
// This wins from the (still in progress) open() call
|
||||
db.close(function (err) {
|
||||
order.push('B')
|
||||
t.same(order, ['A', 'closed event', 'B'], 'order is correct')
|
||||
t.ifError(err, 'no close() error')
|
||||
t.is(db.status, 'closed', 'is closed')
|
||||
})
|
||||
})
|
||||
|
||||
db.on('closed', () => { order.push('closed event') })
|
||||
})
|
||||
|
||||
test('test passive open()', async function (t) {
|
||||
t.plan(1)
|
||||
const db = testCommon.factory()
|
||||
await db.open({ passive: true }) // OK, already opening
|
||||
await db.close()
|
||||
await db.open({ passive: true }).catch(err => {
|
||||
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
})
|
||||
await db.open()
|
||||
await db.open({ passive: true }) // OK, already open
|
||||
return db.close()
|
||||
})
|
||||
|
||||
test('test passive open(): ignored if set in constructor options', async function (t) {
|
||||
const db = testCommon.factory({ passive: true })
|
||||
await new Promise((resolve) => db.once('open', resolve))
|
||||
return db.close()
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.open(test, testCommon)
|
||||
}
|
113
node_modules/abstract-level/test/put-get-del-test.js
generated
vendored
Normal file
113
node_modules/abstract-level/test/put-get-del-test.js
generated
vendored
Normal file
@ -0,0 +1,113 @@
|
||||
'use strict'
|
||||
|
||||
const { verifyNotFoundError } = require('./util')
|
||||
const { Buffer } = require('buffer')
|
||||
|
||||
let db
|
||||
|
||||
function makeTest (test, type, key, value, expectedResult) {
|
||||
const hasExpectedResult = arguments.length === 5
|
||||
test('test put()/get()/del() with ' + type, function (t) {
|
||||
db.put(key, value, function (err) {
|
||||
t.error(err)
|
||||
db.get(key, function (err, _value) {
|
||||
t.error(err, 'no error, has key/value for `' + type + '`')
|
||||
|
||||
let result = _value
|
||||
|
||||
if (hasExpectedResult) {
|
||||
t.equal(result.toString(), expectedResult)
|
||||
} else {
|
||||
if (result != null) { result = _value.toString() }
|
||||
if (value != null) { value = value.toString() }
|
||||
t.equals(result, value)
|
||||
}
|
||||
db.del(key, function (err) {
|
||||
t.error(err, 'no error, deleted key/value for `' + type + '`')
|
||||
|
||||
let async = false
|
||||
|
||||
db.get(key, function (err, value) {
|
||||
t.ok(err, 'entry properly deleted')
|
||||
t.ok(verifyNotFoundError(err), 'correct error')
|
||||
t.is(value, undefined, 'value is undefined')
|
||||
t.ok(async, 'callback is asynchronous')
|
||||
t.end()
|
||||
})
|
||||
|
||||
async = true
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
exports.setUp = function (test, testCommon) {
|
||||
test('setUp db', function (t) {
|
||||
db = testCommon.factory()
|
||||
db.open(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.nonErrorKeys = function (test, testCommon) {
|
||||
// valid falsey keys
|
||||
makeTest(test, '`0` key', 0, 'foo 0')
|
||||
makeTest(test, 'empty string key', 0, 'foo')
|
||||
|
||||
// standard String key
|
||||
makeTest(
|
||||
test
|
||||
, 'long String key'
|
||||
, 'some long string that I\'m using as a key for this unit test, cross your fingers human, we\'re going in!'
|
||||
, 'foo'
|
||||
)
|
||||
|
||||
if (testCommon.supports.encodings.buffer) {
|
||||
makeTest(test, 'Buffer key', Buffer.from('0080c0ff', 'hex'), 'foo')
|
||||
makeTest(test, 'empty Buffer key', Buffer.alloc(0), 'foo')
|
||||
}
|
||||
|
||||
// non-empty Array as a value
|
||||
makeTest(test, 'Array value', 'foo', [1, 2, 3, 4])
|
||||
}
|
||||
|
||||
exports.nonErrorValues = function (test, testCommon) {
|
||||
// valid falsey values
|
||||
makeTest(test, '`false` value', 'foo false', false)
|
||||
makeTest(test, '`0` value', 'foo 0', 0)
|
||||
makeTest(test, '`NaN` value', 'foo NaN', NaN)
|
||||
|
||||
// all of the following result in an empty-string value:
|
||||
makeTest(test, 'empty String value', 'foo', '', '')
|
||||
makeTest(test, 'empty Buffer value', 'foo', Buffer.alloc(0), '')
|
||||
makeTest(test, 'empty Array value', 'foo', [], '')
|
||||
|
||||
// String value
|
||||
makeTest(
|
||||
test
|
||||
, 'long String value'
|
||||
, 'foo'
|
||||
, 'some long string that I\'m using as a key for this unit test, cross your fingers human, we\'re going in!'
|
||||
)
|
||||
|
||||
// Buffer value
|
||||
if (testCommon.supports.encodings.buffer) {
|
||||
makeTest(test, 'Buffer value', 'foo', Buffer.from('0080c0ff', 'hex'))
|
||||
}
|
||||
|
||||
// non-empty Array as a key
|
||||
makeTest(test, 'Array key', [1, 2, 3, 4], 'foo')
|
||||
}
|
||||
|
||||
exports.tearDown = function (test, testCommon) {
|
||||
test('tearDown', function (t) {
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.setUp(test, testCommon)
|
||||
exports.nonErrorKeys(test, testCommon)
|
||||
exports.nonErrorValues(test, testCommon)
|
||||
exports.tearDown(test, testCommon)
|
||||
}
|
131
node_modules/abstract-level/test/put-test.js
generated
vendored
Normal file
131
node_modules/abstract-level/test/put-test.js
generated
vendored
Normal file
@ -0,0 +1,131 @@
|
||||
'use strict'
|
||||
|
||||
const { assertAsync, illegalKeys, illegalValues } = require('./util')
|
||||
|
||||
let db
|
||||
|
||||
exports.setUp = function (test, testCommon) {
|
||||
test('setUp db', function (t) {
|
||||
db = testCommon.factory()
|
||||
db.open(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.args = function (test, testCommon) {
|
||||
test('test put() with illegal keys', assertAsync.ctx(function (t) {
|
||||
t.plan(illegalKeys.length * 5)
|
||||
|
||||
for (const { name, key } of illegalKeys) {
|
||||
db.put(key, 'value', assertAsync(function (err) {
|
||||
t.ok(err instanceof Error, name + ' - is Error (callback)')
|
||||
t.is(err && err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (callback)')
|
||||
}))
|
||||
|
||||
db.put(key, 'value').catch(function (err) {
|
||||
t.ok(err instanceof Error, name + ' - is Error (promise)')
|
||||
t.is(err.code, 'LEVEL_INVALID_KEY', name + ' - correct error code (promise)')
|
||||
})
|
||||
}
|
||||
}))
|
||||
|
||||
test('test put() with illegal values', assertAsync.ctx(function (t) {
|
||||
t.plan(illegalValues.length * 5)
|
||||
|
||||
for (const { name, value } of illegalValues) {
|
||||
db.put('key', value, assertAsync(function (err) {
|
||||
t.ok(err instanceof Error, name + '- is Error (callback)')
|
||||
t.is(err && err.code, 'LEVEL_INVALID_VALUE', name + ' - correct error code (callback)')
|
||||
}))
|
||||
|
||||
db.put('key', value).catch(function (err) {
|
||||
t.ok(err instanceof Error, name + ' - is Error (promise)')
|
||||
t.is(err.code, 'LEVEL_INVALID_VALUE', name + ' - correct error code (promise)')
|
||||
})
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
exports.put = function (test, testCommon) {
|
||||
test('test simple put()', assertAsync.ctx(function (t) {
|
||||
t.plan(7)
|
||||
|
||||
db.put('foo', 'bar', assertAsync(function (err) {
|
||||
t.ifError(err, 'no put() error')
|
||||
|
||||
db.get('foo', function (err, value) {
|
||||
t.ifError(err, 'no get() error')
|
||||
t.is(value, 'bar')
|
||||
|
||||
db.put('foo', 'new', function (err) {
|
||||
t.ifError(err, 'no put() error')
|
||||
|
||||
db.get('foo', function (err, value) {
|
||||
t.ifError(err, 'no get() error')
|
||||
t.is(value, 'new', 'value was overwritten')
|
||||
})
|
||||
})
|
||||
})
|
||||
}))
|
||||
}))
|
||||
|
||||
test('test simple put() with promise', async function (t) {
|
||||
await db.put('foo2', 'bar')
|
||||
t.is(await db.get('foo2'), 'bar')
|
||||
})
|
||||
|
||||
test('test deferred put()', assertAsync.ctx(function (t) {
|
||||
t.plan(5)
|
||||
|
||||
const db = testCommon.factory()
|
||||
|
||||
db.put('foo', 'bar', assertAsync(function (err) {
|
||||
t.ifError(err, 'no put() error')
|
||||
|
||||
db.get('foo', { valueEncoding: 'utf8' }, function (err, value) {
|
||||
t.ifError(err, 'no get() error')
|
||||
t.is(value, 'bar', 'value is ok')
|
||||
db.close(t.ifError.bind(t))
|
||||
})
|
||||
}))
|
||||
}))
|
||||
|
||||
test('test deferred put() with promise', async function (t) {
|
||||
const db = testCommon.factory()
|
||||
await db.put('foo', 'bar')
|
||||
t.is(await db.get('foo', { valueEncoding: 'utf8' }), 'bar', 'value is ok')
|
||||
return db.close()
|
||||
})
|
||||
}
|
||||
|
||||
exports.events = function (test, testCommon) {
|
||||
test('test put() emits put event', async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
|
||||
t.ok(db.supports.events.put)
|
||||
|
||||
db.on('put', function (key, value) {
|
||||
t.is(key, 123)
|
||||
t.is(value, 'b')
|
||||
})
|
||||
|
||||
await db.put(123, 'b')
|
||||
await db.close()
|
||||
})
|
||||
}
|
||||
|
||||
exports.tearDown = function (test, testCommon) {
|
||||
test('tearDown', function (t) {
|
||||
db.close(t.end.bind(t))
|
||||
})
|
||||
}
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
exports.setUp(test, testCommon)
|
||||
exports.args(test, testCommon)
|
||||
exports.put(test, testCommon)
|
||||
exports.events(test, testCommon)
|
||||
exports.tearDown(test, testCommon)
|
||||
}
|
1080
node_modules/abstract-level/test/self.js
generated
vendored
Normal file
1080
node_modules/abstract-level/test/self.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
263
node_modules/abstract-level/test/self/abstract-iterator-test.js
generated
vendored
Normal file
263
node_modules/abstract-level/test/self/abstract-iterator-test.js
generated
vendored
Normal file
@ -0,0 +1,263 @@
|
||||
'use strict'
|
||||
|
||||
const test = require('tape')
|
||||
const { AbstractLevel, AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../..')
|
||||
|
||||
const testCommon = require('../common')({
|
||||
test,
|
||||
factory: function () {
|
||||
return new AbstractLevel({ encodings: { utf8: true } })
|
||||
}
|
||||
})
|
||||
|
||||
for (const Ctor of [AbstractIterator, AbstractKeyIterator, AbstractValueIterator]) {
|
||||
test(`test ${Ctor.name} extensibility`, function (t) {
|
||||
const Test = class TestIterator extends Ctor {}
|
||||
const db = testCommon.factory()
|
||||
const test = new Test(db, {})
|
||||
t.ok(test.db === db, 'instance has db reference')
|
||||
t.end()
|
||||
})
|
||||
|
||||
test(`${Ctor.name} throws on invalid db argument`, function (t) {
|
||||
t.plan(4 * 2)
|
||||
|
||||
for (const args of [[], [null], [undefined], 'foo']) {
|
||||
const hint = args[0] === null ? 'null' : typeof args[0]
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line no-new
|
||||
new Ctor(...args)
|
||||
} catch (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err.message, 'The first argument must be an abstract-level database, received ' + hint)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
test(`${Ctor.name} throws on invalid options argument`, function (t) {
|
||||
t.plan(4 * 2)
|
||||
|
||||
for (const args of [[], [null], [undefined], 'foo']) {
|
||||
try {
|
||||
// eslint-disable-next-line no-new
|
||||
new Ctor({}, ...args)
|
||||
} catch (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err.message, 'The second argument must be an options object')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
test(`${Ctor.name}.next() extensibility`, async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
class TestIterator extends Ctor {
|
||||
_next (callback) {
|
||||
t.is(this, it, 'thisArg on _next() was correct')
|
||||
t.is(arguments.length, 1, 'got one argument')
|
||||
t.is(typeof callback, 'function', 'got a callback function')
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
const it = new TestIterator(db, {})
|
||||
await it.next()
|
||||
await db.close()
|
||||
})
|
||||
|
||||
test(`${Ctor.name}.next() throws on invalid callback argument`, async function (t) {
|
||||
t.plan(3 * 2)
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
|
||||
for (const invalid of [{}, null, 'foo']) {
|
||||
const it = new Ctor(db, {})
|
||||
|
||||
try {
|
||||
it.next(invalid)
|
||||
} catch (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err.message, 'Callback must be a function')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
test(`${Ctor.name}.nextv() extensibility`, async function (t) {
|
||||
t.plan(5 * 2)
|
||||
|
||||
class TestIterator extends Ctor {
|
||||
_nextv (size, options, callback) {
|
||||
t.is(this, it, 'thisArg on _nextv() was correct')
|
||||
t.is(arguments.length, 3, 'got 3 arguments')
|
||||
t.is(size, 100)
|
||||
t.same(options, {}, 'empty options')
|
||||
t.is(typeof callback, 'function', 'got a callback function')
|
||||
this.nextTick(callback, null, [])
|
||||
}
|
||||
}
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
const it = new TestIterator(db, {})
|
||||
await it.nextv(100)
|
||||
await it.nextv(100, {})
|
||||
await db.close()
|
||||
})
|
||||
|
||||
test(`${Ctor.name}.nextv() extensibility (options)`, async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
class TestIterator extends Ctor {
|
||||
_nextv (size, options, callback) {
|
||||
t.is(size, 100)
|
||||
t.same(options, { foo: 123 }, 'got options')
|
||||
this.nextTick(callback, null, [])
|
||||
}
|
||||
}
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
const it = new TestIterator(db, {})
|
||||
await it.nextv(100, { foo: 123 })
|
||||
await db.close()
|
||||
})
|
||||
|
||||
test(`${Ctor.name}.nextv() throws on invalid callback argument`, async function (t) {
|
||||
t.plan(3 * 2)
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
|
||||
for (const invalid of [{}, null, 'foo']) {
|
||||
const it = new Ctor(db, {})
|
||||
|
||||
try {
|
||||
it.nextv(100, {}, invalid)
|
||||
} catch (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err.message, 'Callback must be a function')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
test(`${Ctor.name}.all() extensibility`, async function (t) {
|
||||
t.plan(2 * 4)
|
||||
|
||||
for (const args of [[], [{}]]) {
|
||||
class TestIterator extends Ctor {
|
||||
_all (options, callback) {
|
||||
t.is(this, it, 'thisArg on _all() was correct')
|
||||
t.is(arguments.length, 2, 'got 2 arguments')
|
||||
t.same(options, {}, 'empty options')
|
||||
t.is(typeof callback, 'function', 'got a callback function')
|
||||
this.nextTick(callback, null, [])
|
||||
}
|
||||
}
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
const it = new TestIterator(db, {})
|
||||
await it.all(...args)
|
||||
await db.close()
|
||||
}
|
||||
})
|
||||
|
||||
test(`${Ctor.name}.all() extensibility (options)`, async function (t) {
|
||||
t.plan(1)
|
||||
|
||||
class TestIterator extends Ctor {
|
||||
_all (options, callback) {
|
||||
t.same(options, { foo: 123 }, 'got options')
|
||||
this.nextTick(callback, null, [])
|
||||
}
|
||||
}
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
const it = new TestIterator(db, {})
|
||||
await it.all({ foo: 123 })
|
||||
await db.close()
|
||||
})
|
||||
|
||||
test(`${Ctor.name}.all() throws on invalid callback argument`, async function (t) {
|
||||
t.plan(3 * 2)
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
|
||||
for (const invalid of [{}, null, 'foo']) {
|
||||
const it = new Ctor(db, {})
|
||||
|
||||
try {
|
||||
it.all({}, invalid)
|
||||
} catch (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err.message, 'Callback must be a function')
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
test(`${Ctor.name}.close() extensibility`, async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
class TestIterator extends Ctor {
|
||||
_close (callback) {
|
||||
t.is(this, it, 'thisArg on _close() was correct')
|
||||
t.is(arguments.length, 1, 'got one argument')
|
||||
t.is(typeof callback, 'function', 'got a callback function')
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
const it = new TestIterator(db, {})
|
||||
await it.close()
|
||||
await db.close()
|
||||
})
|
||||
|
||||
test(`${Ctor.name}.close() throws on invalid callback argument`, async function (t) {
|
||||
t.plan(3 * 2)
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
|
||||
for (const invalid of [{}, null, 'foo']) {
|
||||
const it = new Ctor(db, {})
|
||||
|
||||
try {
|
||||
it.close(invalid)
|
||||
} catch (err) {
|
||||
t.is(err.name, 'TypeError')
|
||||
t.is(err.message, 'Callback must be a function')
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
test('AbstractIterator throws when accessing legacy properties', async function (t) {
|
||||
t.plan(3 * 2)
|
||||
|
||||
const db = testCommon.factory()
|
||||
await db.open()
|
||||
const it = new AbstractIterator(db, {})
|
||||
|
||||
for (const k of ['_ended property', '_nexting property', '_end method']) {
|
||||
try {
|
||||
// eslint-disable-next-line no-unused-expressions
|
||||
it[k.split(' ')[0]]
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_LEGACY')
|
||||
}
|
||||
|
||||
try {
|
||||
it[k.split(' ')[0]] = 123
|
||||
} catch (err) {
|
||||
t.is(err.code, 'LEVEL_LEGACY')
|
||||
}
|
||||
}
|
||||
})
|
213
node_modules/abstract-level/test/self/async-iterator-test.js
generated
vendored
Normal file
213
node_modules/abstract-level/test/self/async-iterator-test.js
generated
vendored
Normal file
@ -0,0 +1,213 @@
|
||||
'use strict'
|
||||
|
||||
const test = require('tape')
|
||||
const { AbstractLevel, AbstractIterator } = require('../..')
|
||||
const { DeferredIterator, DeferredKeyIterator, DeferredValueIterator } = require('../../lib/deferred-iterator')
|
||||
|
||||
function withIterator (methods) {
|
||||
class TestIterator extends AbstractIterator { }
|
||||
|
||||
for (const k in methods) {
|
||||
TestIterator.prototype[k] = methods[k]
|
||||
}
|
||||
|
||||
class Test extends AbstractLevel {
|
||||
_iterator (options) {
|
||||
return new TestIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
return new Test({ encodings: { utf8: true } })
|
||||
}
|
||||
|
||||
for (const mode of ['iterator', 'keys', 'values']) {
|
||||
for (const type of ['explicit', 'deferred']) {
|
||||
const verify = function (t, db, it) {
|
||||
t.is(db.status, type === 'explicit' ? 'open' : 'opening')
|
||||
|
||||
if (type === 'explicit') {
|
||||
t.is(
|
||||
it.constructor.name,
|
||||
mode === 'iterator' ? 'TestIterator' : mode === 'keys' ? 'DefaultKeyIterator' : 'DefaultValueIterator'
|
||||
)
|
||||
} else {
|
||||
t.is(
|
||||
it.constructor,
|
||||
mode === 'iterator' ? DeferredIterator : mode === 'keys' ? DeferredKeyIterator : DeferredValueIterator
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
test(`for await...of ${mode}() (${type} open)`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const input = [{ key: '1', value: '1' }, { key: '2', value: '2' }]
|
||||
const output = []
|
||||
|
||||
const db = withIterator({
|
||||
_next (callback) {
|
||||
const { key, value } = input[n++] || []
|
||||
this.nextTick(callback, null, key, value)
|
||||
},
|
||||
|
||||
_close (callback) {
|
||||
this.nextTick(function () {
|
||||
closed = true
|
||||
callback()
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
if (type === 'explicit') await db.open()
|
||||
const it = db[mode]({ keyEncoding: 'utf8', valueEncoding: 'utf8' })
|
||||
verify(t, db, it)
|
||||
|
||||
let n = 0
|
||||
let closed = false
|
||||
|
||||
for await (const item of it) {
|
||||
output.push(item)
|
||||
}
|
||||
|
||||
t.same(output, input.map(x => mode === 'iterator' ? [x.key, x.value] : mode === 'keys' ? x.key : x.value))
|
||||
t.ok(closed, 'closed')
|
||||
})
|
||||
|
||||
test(`for await...of ${mode}() closes on user error (${type} open)`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = withIterator({
|
||||
_next (callback) {
|
||||
this.nextTick(callback, null, n.toString(), n.toString())
|
||||
if (n++ > 10) throw new Error('Infinite loop')
|
||||
},
|
||||
|
||||
_close (callback) {
|
||||
this.nextTick(function () {
|
||||
closed = true
|
||||
callback(new Error('close error'))
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
if (type === 'explicit') await db.open()
|
||||
const it = db[mode]()
|
||||
verify(t, db, it)
|
||||
|
||||
let n = 0
|
||||
let closed = false
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line no-unused-vars, no-unreachable-loop
|
||||
for await (const kv of it) {
|
||||
throw new Error('user error')
|
||||
}
|
||||
} catch (err) {
|
||||
t.is(err.message, 'user error')
|
||||
t.ok(closed, 'closed')
|
||||
}
|
||||
})
|
||||
|
||||
test(`for await...of ${mode}() closes on iterator error (${type} open)`, async function (t) {
|
||||
t.plan(5)
|
||||
|
||||
const db = withIterator({
|
||||
_next (callback) {
|
||||
t.pass('nexted')
|
||||
this.nextTick(callback, new Error('iterator error'))
|
||||
},
|
||||
|
||||
_close (callback) {
|
||||
this.nextTick(function () {
|
||||
closed = true
|
||||
callback()
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
if (type === 'explicit') await db.open()
|
||||
const it = db[mode]()
|
||||
verify(t, db, it)
|
||||
|
||||
let closed = false
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
for await (const kv of it) {
|
||||
t.fail('should not yield items')
|
||||
}
|
||||
} catch (err) {
|
||||
t.is(err.message, 'iterator error')
|
||||
t.ok(closed, 'closed')
|
||||
}
|
||||
})
|
||||
|
||||
test(`for await...of ${mode}() closes on user break (${type} open)`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = withIterator({
|
||||
_next (callback) {
|
||||
this.nextTick(callback, null, n.toString(), n.toString())
|
||||
if (n++ > 10) throw new Error('Infinite loop')
|
||||
},
|
||||
|
||||
_close (callback) {
|
||||
this.nextTick(function () {
|
||||
closed = true
|
||||
callback()
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
if (type === 'explicit') await db.open()
|
||||
const it = db[mode]()
|
||||
verify(t, db, it)
|
||||
|
||||
let n = 0
|
||||
let closed = false
|
||||
|
||||
// eslint-disable-next-line no-unused-vars, no-unreachable-loop
|
||||
for await (const kv of it) {
|
||||
t.pass('got a chance to break')
|
||||
break
|
||||
}
|
||||
|
||||
t.ok(closed, 'closed')
|
||||
})
|
||||
|
||||
test(`for await...of ${mode}() closes on user return (${type} open)`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = withIterator({
|
||||
_next (callback) {
|
||||
this.nextTick(callback, null, n.toString(), n.toString())
|
||||
if (n++ > 10) throw new Error('Infinite loop')
|
||||
},
|
||||
|
||||
_close (callback) {
|
||||
this.nextTick(function () {
|
||||
closed = true
|
||||
callback()
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
if (type === 'explicit') await db.open()
|
||||
const it = db[mode]()
|
||||
verify(t, db, it)
|
||||
|
||||
let n = 0
|
||||
let closed = false
|
||||
|
||||
await (async () => {
|
||||
// eslint-disable-next-line no-unused-vars, no-unreachable-loop
|
||||
for await (const kv of it) {
|
||||
t.pass('got a chance to return')
|
||||
return
|
||||
}
|
||||
})()
|
||||
|
||||
t.ok(closed, 'closed')
|
||||
})
|
||||
}
|
||||
}
|
75
node_modules/abstract-level/test/self/attach-resource-test.js
generated
vendored
Normal file
75
node_modules/abstract-level/test/self/attach-resource-test.js
generated
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
'use strict'
|
||||
|
||||
const test = require('tape')
|
||||
const { mockLevel } = require('../util')
|
||||
const nextTick = require('../../lib/next-tick')
|
||||
|
||||
test('resource must be an object with a close() method', function (t) {
|
||||
t.plan(5)
|
||||
|
||||
const db = mockLevel()
|
||||
|
||||
for (const invalid of [null, undefined, {}, { close: 123 }]) {
|
||||
try {
|
||||
db.attachResource(invalid)
|
||||
} catch (err) {
|
||||
t.is(err && err.message, 'The first argument must be a resource object')
|
||||
}
|
||||
}
|
||||
|
||||
db.close(t.ifError.bind(t))
|
||||
})
|
||||
|
||||
test('resource is closed on failed open', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const db = mockLevel({
|
||||
_open: function (options, callback) {
|
||||
t.pass('opened')
|
||||
this.nextTick(callback, new Error('_open error'))
|
||||
}
|
||||
})
|
||||
|
||||
const resource = {
|
||||
close: function (cb) {
|
||||
// Note: resource shouldn't care about db.status
|
||||
t.pass('closed')
|
||||
nextTick(cb)
|
||||
}
|
||||
}
|
||||
|
||||
db.attachResource(resource)
|
||||
})
|
||||
|
||||
test('resource is closed on db.close()', function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const db = mockLevel()
|
||||
|
||||
const resource = {
|
||||
close: function (cb) {
|
||||
// Note: resource shouldn't care about db.status
|
||||
t.pass('closed')
|
||||
nextTick(cb)
|
||||
}
|
||||
}
|
||||
|
||||
db.attachResource(resource)
|
||||
db.close(t.ifError.bind(t))
|
||||
})
|
||||
|
||||
test('resource is not closed on db.close() if detached', function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const db = mockLevel()
|
||||
|
||||
const resource = {
|
||||
close: function (cb) {
|
||||
t.fail('should not be called')
|
||||
}
|
||||
}
|
||||
|
||||
db.attachResource(resource)
|
||||
db.detachResource(resource)
|
||||
db.close(t.ifError.bind(t))
|
||||
})
|
90
node_modules/abstract-level/test/self/defer-test.js
generated
vendored
Normal file
90
node_modules/abstract-level/test/self/defer-test.js
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
'use strict'
|
||||
|
||||
const test = require('tape')
|
||||
const { mockLevel } = require('../util')
|
||||
|
||||
test('defer() requires valid function argument', function (t) {
|
||||
t.plan(7)
|
||||
|
||||
const db = mockLevel()
|
||||
|
||||
for (const invalid of [123, true, false, null, undefined, {}]) {
|
||||
try {
|
||||
db.defer(invalid)
|
||||
} catch (err) {
|
||||
t.is(err.message, 'The first argument must be a function')
|
||||
}
|
||||
}
|
||||
|
||||
db.close(t.ifError.bind(t))
|
||||
})
|
||||
|
||||
test('defer() custom operation', function (t) {
|
||||
t.plan(6)
|
||||
|
||||
const db = mockLevel({
|
||||
custom (arg, callback) {
|
||||
if (this.status === 'opening') {
|
||||
t.is(arg, 123)
|
||||
this.defer(() => this.custom(456, callback))
|
||||
} else {
|
||||
t.is(db.status, 'open')
|
||||
t.is(arg, 456)
|
||||
this.nextTick(callback, null, 987)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
db.custom(123, function (err, result) {
|
||||
t.ifError(err, 'no custom() error')
|
||||
t.is(result, 987, 'result ok')
|
||||
|
||||
db.close(t.ifError.bind(t))
|
||||
})
|
||||
})
|
||||
|
||||
test('defer() custom operation with failed open', function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = mockLevel({
|
||||
_open (options, callback) {
|
||||
t.pass('opened')
|
||||
this.nextTick(callback, new Error('_open error'))
|
||||
},
|
||||
custom (arg, callback) {
|
||||
if (this.status === 'opening') {
|
||||
this.defer(() => this.custom(arg, callback))
|
||||
} else {
|
||||
t.is(db.status, 'closed')
|
||||
this.nextTick(callback, new Error('Database is not open (x)'))
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
db.custom(123, function (err, result) {
|
||||
t.is(err && err.message, 'Database is not open (x)')
|
||||
t.is(result, undefined, 'result ok')
|
||||
})
|
||||
})
|
||||
|
||||
test('defer() can drop custom synchronous operation', function (t) {
|
||||
t.plan(3)
|
||||
|
||||
const db = mockLevel({
|
||||
_open (options, callback) {
|
||||
t.pass('opened')
|
||||
this.nextTick(callback, new Error('_open error'))
|
||||
},
|
||||
custom (arg) {
|
||||
if (this.status === 'opening') {
|
||||
this.defer(() => this.custom(arg * 2))
|
||||
} else {
|
||||
// Handling other states is a userland responsibility
|
||||
t.is(db.status, 'closed')
|
||||
t.is(arg, 246)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
db.custom(123)
|
||||
})
|
98
node_modules/abstract-level/test/self/deferred-chained-batch-test.js
generated
vendored
Normal file
98
node_modules/abstract-level/test/self/deferred-chained-batch-test.js
generated
vendored
Normal file
@ -0,0 +1,98 @@
|
||||
'use strict'
|
||||
|
||||
const test = require('tape')
|
||||
const { mockLevel } = require('../util')
|
||||
const { DefaultChainedBatch } = require('../../lib/default-chained-batch')
|
||||
const identity = (v) => v
|
||||
|
||||
// NOTE: adapted from deferred-leveldown
|
||||
test('deferred chained batch encodes once', function (t) {
|
||||
t.plan(9)
|
||||
|
||||
let called = false
|
||||
|
||||
const keyEncoding = {
|
||||
format: 'utf8',
|
||||
encode (key) {
|
||||
t.is(called, false, 'not yet called')
|
||||
t.is(key, 'foo')
|
||||
return key.toUpperCase()
|
||||
},
|
||||
decode: identity
|
||||
}
|
||||
|
||||
const valueEncoding = {
|
||||
format: 'utf8',
|
||||
encode (value) {
|
||||
t.is(called, false, 'not yet called')
|
||||
t.is(value, 'bar')
|
||||
return value.toUpperCase()
|
||||
},
|
||||
decode: identity
|
||||
}
|
||||
|
||||
const db = mockLevel({
|
||||
_batch: function (array, options, callback) {
|
||||
called = true
|
||||
t.is(array[0] && array[0].key, 'FOO')
|
||||
t.is(array[0] && array[0].value, 'BAR')
|
||||
this.nextTick(callback)
|
||||
},
|
||||
_open: function (options, callback) {
|
||||
t.is(called, false, 'not yet called')
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}, { encodings: { utf8: true } }, {
|
||||
keyEncoding,
|
||||
valueEncoding
|
||||
})
|
||||
|
||||
db.once('open', function () {
|
||||
t.is(called, true, 'called')
|
||||
})
|
||||
|
||||
db.batch().put('foo', 'bar').write(function (err) {
|
||||
t.ifError(err, 'no write() error')
|
||||
})
|
||||
})
|
||||
|
||||
test('deferred chained batch is closed upon failed open', function (t) {
|
||||
t.plan(6)
|
||||
|
||||
const db = mockLevel({
|
||||
_open (options, callback) {
|
||||
t.pass('opening')
|
||||
this.nextTick(callback, new Error('_open error'))
|
||||
},
|
||||
_batch () {
|
||||
t.fail('should not be called')
|
||||
}
|
||||
})
|
||||
|
||||
const batch = db.batch()
|
||||
t.ok(batch instanceof DefaultChainedBatch)
|
||||
|
||||
batch.put('foo', 'bar')
|
||||
batch.del('123')
|
||||
|
||||
batch.write(function (err) {
|
||||
t.is(err && err.code, 'LEVEL_BATCH_NOT_OPEN')
|
||||
|
||||
// Should account for userland code that ignores errors
|
||||
try {
|
||||
batch.put('beep', 'boop')
|
||||
} catch (err) {
|
||||
t.is(err && err.code, 'LEVEL_BATCH_NOT_OPEN')
|
||||
}
|
||||
|
||||
try {
|
||||
batch.del('456')
|
||||
} catch (err) {
|
||||
t.is(err && err.code, 'LEVEL_BATCH_NOT_OPEN')
|
||||
}
|
||||
|
||||
batch.write(function (err) {
|
||||
t.is(err && err.code, 'LEVEL_BATCH_NOT_OPEN')
|
||||
})
|
||||
})
|
||||
})
|
298
node_modules/abstract-level/test/self/deferred-iterator-test.js
generated
vendored
Normal file
298
node_modules/abstract-level/test/self/deferred-iterator-test.js
generated
vendored
Normal file
@ -0,0 +1,298 @@
|
||||
'use strict'
|
||||
|
||||
const test = require('tape')
|
||||
const { DeferredIterator, DeferredKeyIterator, DeferredValueIterator } = require('../../lib/deferred-iterator')
|
||||
const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../..')
|
||||
const { mockLevel } = require('../util')
|
||||
const noop = () => {}
|
||||
const identity = (v) => v
|
||||
|
||||
for (const mode of ['iterator', 'keys', 'values']) {
|
||||
const RealCtor = mode === 'iterator' ? AbstractIterator : mode === 'keys' ? AbstractKeyIterator : AbstractValueIterator
|
||||
const DeferredCtor = mode === 'iterator' ? DeferredIterator : mode === 'keys' ? DeferredKeyIterator : DeferredValueIterator
|
||||
const nextArgs = mode === 'iterator' ? ['key', 'value'] : mode === 'keys' ? ['key'] : ['value']
|
||||
const privateMethod = '_' + mode
|
||||
const publicMethod = mode
|
||||
|
||||
// NOTE: adapted from deferred-leveldown
|
||||
test(`deferred ${mode}()`, function (t) {
|
||||
t.plan(8)
|
||||
|
||||
const keyEncoding = {
|
||||
format: 'utf8',
|
||||
encode (key) {
|
||||
t.is(key, 'foo', 'encoding got key')
|
||||
return key.toUpperCase()
|
||||
},
|
||||
decode: identity
|
||||
}
|
||||
|
||||
class MockIterator extends RealCtor {
|
||||
_next (cb) {
|
||||
this.nextTick(cb, null, ...nextArgs)
|
||||
}
|
||||
|
||||
_close (cb) {
|
||||
this.nextTick(cb)
|
||||
}
|
||||
}
|
||||
|
||||
const db = mockLevel({
|
||||
[privateMethod]: function (options) {
|
||||
t.is(options.gt, 'FOO', 'got encoded range option')
|
||||
return new MockIterator(this, options)
|
||||
},
|
||||
_open: function (options, callback) {
|
||||
t.pass('opened')
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}, { encodings: { utf8: true } }, {
|
||||
keyEncoding
|
||||
})
|
||||
|
||||
const it = db[publicMethod]({ gt: 'foo' })
|
||||
t.ok(it instanceof DeferredCtor, 'is deferred')
|
||||
|
||||
let nextFirst = false
|
||||
|
||||
it.next(function (err, ...rest) {
|
||||
nextFirst = true
|
||||
t.error(err, 'no next() error')
|
||||
t.same(rest, nextArgs)
|
||||
})
|
||||
|
||||
it.close(function (err) {
|
||||
t.error(err, 'no close() error')
|
||||
t.ok(nextFirst)
|
||||
})
|
||||
})
|
||||
|
||||
// NOTE: adapted from deferred-leveldown
|
||||
test(`deferred ${mode}(): non-deferred operations`, function (t) {
|
||||
t.plan(6)
|
||||
|
||||
class MockIterator extends RealCtor {
|
||||
_seek (target) {
|
||||
t.is(target, '123')
|
||||
}
|
||||
|
||||
_next (cb) {
|
||||
this.nextTick(cb, null, ...nextArgs)
|
||||
}
|
||||
}
|
||||
|
||||
const db = mockLevel({
|
||||
[privateMethod]: function (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
})
|
||||
|
||||
db.open(function (err) {
|
||||
t.error(err, 'no open() error')
|
||||
|
||||
it.seek(123)
|
||||
it.next(function (err, ...rest) {
|
||||
t.error(err, 'no next() error')
|
||||
t.same(rest, nextArgs)
|
||||
|
||||
it.close(function (err) {
|
||||
t.error(err, 'no close() error')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
const it = db[publicMethod]({ gt: 'foo' })
|
||||
t.ok(it instanceof DeferredCtor)
|
||||
})
|
||||
|
||||
// NOTE: adapted from deferred-leveldown
|
||||
test(`deferred ${mode}(): iterators are created in order`, function (t) {
|
||||
t.plan(6)
|
||||
|
||||
const order1 = []
|
||||
const order2 = []
|
||||
|
||||
class MockIterator extends RealCtor {}
|
||||
|
||||
function db (order) {
|
||||
return mockLevel({
|
||||
[privateMethod]: function (options) {
|
||||
order.push('iterator created')
|
||||
return new MockIterator(this, options)
|
||||
},
|
||||
_put: function (key, value, options, callback) {
|
||||
order.push('put')
|
||||
},
|
||||
_open: function (options, callback) {
|
||||
this.nextTick(callback)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const db1 = db(order1)
|
||||
const db2 = db(order2)
|
||||
|
||||
db1.open(function (err) {
|
||||
t.error(err, 'no error')
|
||||
t.same(order1, ['iterator created', 'put'])
|
||||
})
|
||||
|
||||
db2.open(function (err) {
|
||||
t.error(err, 'no error')
|
||||
t.same(order2, ['put', 'iterator created'])
|
||||
})
|
||||
|
||||
t.ok(db1[publicMethod]() instanceof DeferredCtor)
|
||||
db1.put('key', 'value', noop)
|
||||
|
||||
db2.put('key', 'value', noop)
|
||||
t.ok(db2[publicMethod]() instanceof DeferredCtor)
|
||||
})
|
||||
|
||||
for (const method of ['next', 'nextv', 'all']) {
|
||||
test(`deferred ${mode}(): closed upon failed open, verified by ${method}()`, function (t) {
|
||||
t.plan(5)
|
||||
|
||||
const db = mockLevel({
|
||||
_open (options, callback) {
|
||||
t.pass('opening')
|
||||
this.nextTick(callback, new Error('_open error'))
|
||||
},
|
||||
_iterator () {
|
||||
t.fail('should not be called')
|
||||
},
|
||||
[privateMethod] () {
|
||||
t.fail('should not be called')
|
||||
}
|
||||
})
|
||||
|
||||
const it = db[publicMethod]()
|
||||
t.ok(it instanceof DeferredCtor)
|
||||
|
||||
const original = it._close
|
||||
it._close = function (...args) {
|
||||
t.pass('closed')
|
||||
return original.call(this, ...args)
|
||||
}
|
||||
|
||||
verifyClosed(t, it, method, () => {})
|
||||
})
|
||||
|
||||
test(`deferred ${mode}(): deferred and real iterators are closed on db.close(), verified by ${method}()`, function (t) {
|
||||
t.plan(10)
|
||||
|
||||
class MockIterator extends RealCtor {
|
||||
_close (callback) {
|
||||
t.pass('closed')
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}
|
||||
|
||||
const db = mockLevel({
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
})
|
||||
|
||||
const it = db[publicMethod]()
|
||||
t.ok(it instanceof DeferredCtor)
|
||||
|
||||
const original = it._close
|
||||
it._close = function (...args) {
|
||||
t.pass('closed')
|
||||
return original.call(this, ...args)
|
||||
}
|
||||
|
||||
db.close(function (err) {
|
||||
t.ifError(err, 'no close() error')
|
||||
|
||||
verifyClosed(t, it, method, function () {
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open() error')
|
||||
|
||||
// Should still be closed
|
||||
verifyClosed(t, it, method, function () {
|
||||
db.close(t.ifError.bind(t))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
test(`deferred ${mode}(): deferred and real iterators are detached on db.close()`, function (t) {
|
||||
t.plan(4)
|
||||
|
||||
class MockIterator extends RealCtor {}
|
||||
|
||||
let real
|
||||
const db = mockLevel({
|
||||
[privateMethod] (options) {
|
||||
real = new MockIterator(this, options)
|
||||
return real
|
||||
}
|
||||
})
|
||||
|
||||
const it = db[publicMethod]()
|
||||
t.ok(it instanceof DeferredCtor)
|
||||
|
||||
db.close(function (err) {
|
||||
t.ifError(err, 'no close() error')
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open() error')
|
||||
|
||||
it.close = real.close = it._close = real._close = function () {
|
||||
t.fail('should not be called')
|
||||
}
|
||||
|
||||
db.close(t.ifError.bind(t))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test(`deferred ${mode}(): defers underlying close()`, function (t) {
|
||||
t.plan(3)
|
||||
|
||||
class MockIterator extends RealCtor {
|
||||
_close (callback) {
|
||||
order.push('_close')
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}
|
||||
|
||||
const order = []
|
||||
const db = mockLevel({
|
||||
_open (options, callback) {
|
||||
order.push('_open')
|
||||
this.nextTick(callback)
|
||||
},
|
||||
[privateMethod] (options) {
|
||||
order.push(privateMethod)
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
})
|
||||
|
||||
const it = db[publicMethod]()
|
||||
t.ok(it instanceof DeferredCtor)
|
||||
|
||||
it.close(function (err) {
|
||||
t.ifError(err, 'no close() error')
|
||||
t.same(order, ['_open', privateMethod, '_close'])
|
||||
})
|
||||
})
|
||||
|
||||
const verifyClosed = function (t, it, method, cb) {
|
||||
const requiredArgs = method === 'nextv' ? [10] : []
|
||||
|
||||
it[method](...requiredArgs, function (err) {
|
||||
t.is(err && err.code, 'LEVEL_ITERATOR_NOT_OPEN', `correct error on first ${method}()`)
|
||||
|
||||
// Should account for userland code that ignores errors
|
||||
it[method](...requiredArgs, function (err) {
|
||||
t.is(err && err.code, 'LEVEL_ITERATOR_NOT_OPEN', `correct error on second ${method}()`)
|
||||
cb()
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
93
node_modules/abstract-level/test/self/deferred-operations-test.js
generated
vendored
Normal file
93
node_modules/abstract-level/test/self/deferred-operations-test.js
generated
vendored
Normal file
@ -0,0 +1,93 @@
|
||||
'use strict'
|
||||
|
||||
const test = require('tape')
|
||||
const { mockLevel, mockIterator } = require('../util')
|
||||
|
||||
// NOTE: copied from deferred-leveldown
|
||||
test('deferred operations are called in order', function (t) {
|
||||
t.plan(15)
|
||||
|
||||
const calls = []
|
||||
const db = mockLevel({
|
||||
_put: function (key, value, options, callback) {
|
||||
this.nextTick(callback)
|
||||
calls.push({ type: 'put', key, value, options })
|
||||
},
|
||||
_get: function (key, options, callback) {
|
||||
this.nextTick(callback)
|
||||
calls.push({ type: 'get', key, options })
|
||||
},
|
||||
_del: function (key, options, callback) {
|
||||
this.nextTick(callback)
|
||||
calls.push({ type: 'del', key, options })
|
||||
},
|
||||
_batch: function (arr, options, callback) {
|
||||
this.nextTick(callback)
|
||||
calls.push({ type: 'batch', keys: arr.map(op => op.key).join(',') })
|
||||
},
|
||||
_clear: function (options, callback) {
|
||||
this.nextTick(callback)
|
||||
calls.push({ ...options, type: 'clear' })
|
||||
},
|
||||
_iterator (options) {
|
||||
calls.push({ type: 'iterator' })
|
||||
return mockIterator(this, options, {
|
||||
_next (callback) {
|
||||
this.nextTick(callback)
|
||||
calls.push({ type: 'iterator.next' })
|
||||
}
|
||||
})
|
||||
},
|
||||
_open: function (options, callback) {
|
||||
this.nextTick(callback)
|
||||
t.is(calls.length, 0, 'not yet called')
|
||||
}
|
||||
}, {
|
||||
encodings: {
|
||||
utf8: true,
|
||||
buffer: true
|
||||
}
|
||||
}, {
|
||||
keyEncoding: 'utf8',
|
||||
valueEncoding: 'utf8'
|
||||
})
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open() error')
|
||||
t.same(calls, [
|
||||
{ type: 'put', key: '001', value: 'bar1', options: { keyEncoding: 'utf8', valueEncoding: 'utf8' } },
|
||||
{ type: 'get', key: '002', options: { keyEncoding: 'utf8', valueEncoding: 'utf8' } },
|
||||
{ type: 'clear', reverse: false, limit: -1, keyEncoding: 'utf8' },
|
||||
{ type: 'put', key: '010', value: 'bar2', options: { keyEncoding: 'utf8', valueEncoding: 'utf8' } },
|
||||
{ type: 'get', key: Buffer.from('011'), options: { keyEncoding: 'buffer', valueEncoding: 'utf8' } },
|
||||
{ type: 'del', key: '020', options: { customOption: 123, keyEncoding: 'utf8' } },
|
||||
{ type: 'del', key: '021', options: { keyEncoding: 'utf8' } },
|
||||
{ type: 'batch', keys: '040,041' },
|
||||
{ type: 'iterator' },
|
||||
{ type: 'batch', keys: '050,051' },
|
||||
{ type: 'iterator.next' },
|
||||
{ type: 'clear', gt: '060', reverse: false, limit: -1, keyEncoding: 'utf8' }
|
||||
], 'calls correctly behaved')
|
||||
})
|
||||
|
||||
db.put('001', 'bar1', t.ifError.bind(t))
|
||||
db.get('002', t.ifError.bind(t))
|
||||
db.clear(t.ifError.bind(t))
|
||||
db.put('010', 'bar2', t.ifError.bind(t))
|
||||
db.get('011', { keyEncoding: 'buffer' }, t.ifError.bind(t))
|
||||
db.del('020', { customOption: 123 }, t.ifError.bind(t))
|
||||
db.del('021', t.ifError.bind(t))
|
||||
db.batch([
|
||||
{ type: 'put', key: '040', value: 'a' },
|
||||
{ type: 'put', key: '041', value: 'b' }
|
||||
], t.ifError.bind(t))
|
||||
const it = db.iterator()
|
||||
db.batch()
|
||||
.put('050', 'c')
|
||||
.put('051', 'd')
|
||||
.write(t.ifError.bind(t))
|
||||
it.next(t.ifError.bind(t))
|
||||
db.clear({ gt: '060' }, t.ifError.bind(t))
|
||||
|
||||
t.is(calls.length, 0, 'not yet called')
|
||||
})
|
426
node_modules/abstract-level/test/self/encoding-test.js
generated
vendored
Normal file
426
node_modules/abstract-level/test/self/encoding-test.js
generated
vendored
Normal file
@ -0,0 +1,426 @@
|
||||
'use strict'
|
||||
|
||||
// TODO: move to per-method test files
|
||||
|
||||
const test = require('tape')
|
||||
const { Buffer } = require('buffer')
|
||||
const { mockLevel, mockChainedBatch, nullishEncoding } = require('../util')
|
||||
const identity = (v) => v
|
||||
|
||||
const utf8Manifest = { encodings: { utf8: true } }
|
||||
const dualManifest = { encodings: { utf8: true, buffer: true } }
|
||||
const hasOwnProperty = Object.prototype.hasOwnProperty
|
||||
|
||||
for (const deferred of [false, true]) {
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`get() encodes utf8 key (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = mockLevel({
|
||||
_get (key, options, callback) {
|
||||
t.is(key, '8')
|
||||
t.is(options.keyEncoding, 'utf8')
|
||||
t.is(options.valueEncoding, 'utf8')
|
||||
this.nextTick(callback, null, 'foo')
|
||||
}
|
||||
}, utf8Manifest)
|
||||
|
||||
if (!deferred) await db.open()
|
||||
t.same(await db.get(8), 'foo')
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`get() takes encoding options (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = mockLevel({
|
||||
_get (key, options, callback) {
|
||||
t.is(key, '[1,"2"]')
|
||||
t.is(options.keyEncoding, 'utf8')
|
||||
t.is(options.valueEncoding, 'utf8')
|
||||
this.nextTick(callback, null, '123')
|
||||
}
|
||||
}, utf8Manifest)
|
||||
|
||||
if (!deferred) await db.open()
|
||||
t.same(await db.get([1, '2'], { keyEncoding: 'json', valueEncoding: 'json' }), 123)
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`get() with custom value encoding that wants a buffer (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
const db = mockLevel({
|
||||
_get (key, options, callback) {
|
||||
t.same(key, 'key')
|
||||
t.same(options, { keyEncoding: 'utf8', valueEncoding: 'buffer' })
|
||||
this.nextTick(callback, null, Buffer.alloc(1))
|
||||
}
|
||||
}, dualManifest, {
|
||||
keyEncoding: 'utf8',
|
||||
valueEncoding: { encode: identity, decode: identity, format: 'buffer' }
|
||||
})
|
||||
|
||||
if (!deferred) await db.open()
|
||||
t.same(await db.get('key'), Buffer.alloc(1))
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`get() with custom value encoding that wants a string (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
const db = mockLevel({
|
||||
_get (key, options, callback) {
|
||||
t.same(key, Buffer.from('key'))
|
||||
t.same(options, { keyEncoding: 'buffer', valueEncoding: 'utf8' })
|
||||
this.nextTick(callback, null, 'x')
|
||||
}
|
||||
}, dualManifest, {
|
||||
keyEncoding: 'buffer',
|
||||
valueEncoding: { encode: identity, decode: identity, format: 'utf8' }
|
||||
})
|
||||
|
||||
if (!deferred) await db.open()
|
||||
t.same(await db.get(Buffer.from('key')), 'x')
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`put() encodes utf8 key and value (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = mockLevel({
|
||||
_put (key, value, options, callback) {
|
||||
t.is(key, '8')
|
||||
t.is(value, '4')
|
||||
t.is(options.keyEncoding, 'utf8')
|
||||
t.is(options.valueEncoding, 'utf8')
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}, utf8Manifest)
|
||||
|
||||
if (!deferred) await db.open()
|
||||
await db.put(8, 4)
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`put() takes encoding options (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = mockLevel({
|
||||
_put (key, value, options, callback) {
|
||||
t.is(key, '[1,"2"]')
|
||||
t.is(value, '{"x":3}')
|
||||
t.is(options.keyEncoding, 'utf8')
|
||||
t.is(options.valueEncoding, 'utf8')
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}, utf8Manifest)
|
||||
|
||||
if (!deferred) await db.open()
|
||||
await db.put([1, '2'], { x: 3 }, { keyEncoding: 'json', valueEncoding: 'json' })
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`del() encodes utf8 key (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const db = mockLevel({
|
||||
_del (key, options, callback) {
|
||||
t.is(key, '2')
|
||||
t.is(options.keyEncoding, 'utf8')
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}, utf8Manifest)
|
||||
|
||||
if (!deferred) await db.open()
|
||||
await db.del(2)
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`del() takes keyEncoding option (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const db = mockLevel({
|
||||
_del (key, options, callback) {
|
||||
t.is(key, '[1,"2"]')
|
||||
t.is(options.keyEncoding, 'utf8')
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}, utf8Manifest)
|
||||
|
||||
if (!deferred) await db.open()
|
||||
await db.del([1, '2'], { keyEncoding: 'json' })
|
||||
})
|
||||
|
||||
test(`getMany() encodes utf8 key (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = mockLevel({
|
||||
_getMany (keys, options, callback) {
|
||||
t.same(keys, ['8', '29'])
|
||||
t.is(options.keyEncoding, 'utf8')
|
||||
t.is(options.valueEncoding, 'utf8')
|
||||
this.nextTick(callback, null, ['foo', 'bar'])
|
||||
}
|
||||
}, utf8Manifest)
|
||||
|
||||
if (!deferred) await db.open()
|
||||
t.same(await db.getMany([8, 29]), ['foo', 'bar'])
|
||||
})
|
||||
|
||||
test(`getMany() takes encoding options (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = mockLevel({
|
||||
_getMany (keys, options, callback) {
|
||||
t.same(keys, ['[1,"2"]', '"x"'])
|
||||
t.is(options.keyEncoding, 'utf8')
|
||||
t.is(options.valueEncoding, 'utf8')
|
||||
this.nextTick(callback, null, ['123', '"hi"'])
|
||||
}
|
||||
}, utf8Manifest)
|
||||
|
||||
if (!deferred) await db.open()
|
||||
t.same(await db.getMany([[1, '2'], 'x'], { keyEncoding: 'json', valueEncoding: 'json' }), [123, 'hi'])
|
||||
})
|
||||
|
||||
test(`getMany() with custom value encoding that wants a buffer (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
const db = mockLevel({
|
||||
_getMany (keys, options, callback) {
|
||||
t.same(keys, ['key'])
|
||||
t.same(options, { keyEncoding: 'utf8', valueEncoding: 'buffer' })
|
||||
this.nextTick(callback, null, [Buffer.alloc(1)])
|
||||
}
|
||||
}, dualManifest, {
|
||||
keyEncoding: 'utf8',
|
||||
valueEncoding: { encode: identity, decode: identity, format: 'buffer' }
|
||||
})
|
||||
|
||||
if (!deferred) await db.open()
|
||||
t.same(await db.getMany(['key']), [Buffer.alloc(1)])
|
||||
})
|
||||
|
||||
test(`getMany() with custom value encoding that wants a string (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
const db = mockLevel({
|
||||
_getMany (keys, options, callback) {
|
||||
t.same(keys, [Buffer.from('key')])
|
||||
t.same(options, { keyEncoding: 'buffer', valueEncoding: 'utf8' })
|
||||
this.nextTick(callback, null, ['x'])
|
||||
}
|
||||
}, dualManifest, {
|
||||
keyEncoding: 'buffer',
|
||||
valueEncoding: { encode: identity, decode: identity, format: 'utf8' }
|
||||
})
|
||||
|
||||
if (!deferred) await db.open()
|
||||
t.same(await db.getMany([Buffer.from('key')]), ['x'])
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`chainedBatch.put() and del() encode utf8 key and value (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(deferred ? 2 : 4)
|
||||
|
||||
let db
|
||||
|
||||
if (deferred) {
|
||||
db = mockLevel({
|
||||
_batch (array, options, callback) {
|
||||
t.same(array, [
|
||||
{ type: 'put', key: '1', value: '2', keyEncoding: 'utf8', valueEncoding: 'utf8' },
|
||||
{ type: 'del', key: '3', keyEncoding: 'utf8' }
|
||||
])
|
||||
t.same(options, {})
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}, utf8Manifest)
|
||||
} else {
|
||||
db = mockLevel({
|
||||
_chainedBatch () {
|
||||
return mockChainedBatch(this, {
|
||||
_put: function (key, value, options) {
|
||||
t.same({ key, value }, { key: '1', value: '2' })
|
||||
t.same(options, { keyEncoding: 'utf8', valueEncoding: 'utf8' })
|
||||
},
|
||||
_del: function (key, options) {
|
||||
t.is(key, '3')
|
||||
t.same(options, { keyEncoding: 'utf8' })
|
||||
}
|
||||
})
|
||||
}
|
||||
}, utf8Manifest)
|
||||
}
|
||||
|
||||
if (!deferred) await db.open()
|
||||
await db.batch().put(1, 2).del(3).write()
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`chainedBatch.put() and del() take encoding options (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(deferred ? 2 : 4)
|
||||
|
||||
let db
|
||||
|
||||
const putOptions = { keyEncoding: 'json', valueEncoding: 'json' }
|
||||
const delOptions = { keyEncoding: 'json' }
|
||||
|
||||
if (deferred) {
|
||||
db = mockLevel({
|
||||
_batch (array, options, callback) {
|
||||
t.same(array, [
|
||||
{ type: 'put', key: '"1"', value: '{"x":[2]}', keyEncoding: 'utf8', valueEncoding: 'utf8' },
|
||||
{ type: 'del', key: '"3"', keyEncoding: 'utf8' }
|
||||
])
|
||||
t.same(options, {})
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}, utf8Manifest)
|
||||
} else {
|
||||
db = mockLevel({
|
||||
_chainedBatch () {
|
||||
return mockChainedBatch(this, {
|
||||
_put: function (key, value, options) {
|
||||
t.same({ key, value }, { key: '"1"', value: '{"x":[2]}' })
|
||||
t.same(options, { keyEncoding: 'utf8', valueEncoding: 'utf8' })
|
||||
},
|
||||
_del: function (key, options) {
|
||||
t.is(key, '"3"')
|
||||
t.same(options, { keyEncoding: 'utf8' })
|
||||
}
|
||||
})
|
||||
}
|
||||
}, utf8Manifest)
|
||||
}
|
||||
|
||||
if (!deferred) await db.open()
|
||||
await db.batch().put('1', { x: [2] }, putOptions).del('3', delOptions).write()
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`clear() receives keyEncoding option (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const db = mockLevel({
|
||||
_clear: function (options, callback) {
|
||||
t.same(options, { keyEncoding: 'utf8', reverse: false, limit: -1 })
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}, utf8Manifest)
|
||||
|
||||
if (!deferred) await db.open()
|
||||
await db.clear()
|
||||
})
|
||||
|
||||
test(`clear() takes keyEncoding option (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const db = mockLevel({
|
||||
_clear: function (options, callback) {
|
||||
t.same(options, { keyEncoding: 'utf8', gt: '"a"', reverse: false, limit: -1 })
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}, utf8Manifest)
|
||||
|
||||
if (!deferred) await db.open()
|
||||
await db.clear({ keyEncoding: 'json', gt: 'a' })
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`clear() encodes range options (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(5)
|
||||
|
||||
const keyEncoding = {
|
||||
format: 'utf8',
|
||||
encode: function (key) {
|
||||
return 'encoded_' + key
|
||||
},
|
||||
decode: identity
|
||||
}
|
||||
|
||||
const db = mockLevel({
|
||||
_clear: function (options, callback) {
|
||||
t.is(options.gt, 'encoded_1')
|
||||
t.is(options.gte, 'encoded_2')
|
||||
t.is(options.lt, 'encoded_3')
|
||||
t.is(options.lte, 'encoded_4')
|
||||
t.is(options.foo, 5)
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}, utf8Manifest, { keyEncoding })
|
||||
|
||||
if (!deferred) await db.open()
|
||||
await db.clear({ gt: 1, gte: 2, lt: 3, lte: 4, foo: 5 })
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`clear() does not strip nullish range options (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(12)
|
||||
|
||||
const db1 = mockLevel({
|
||||
_clear: function (options, callback) {
|
||||
t.is(options.gt, '\x00', 'encoded null')
|
||||
t.is(options.gte, '\x00', 'encoded null')
|
||||
t.is(options.lt, '\x00', 'encoded null')
|
||||
t.is(options.lte, '\x00', 'encoded null')
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}, utf8Manifest, { keyEncoding: nullishEncoding, valueEncoding: nullishEncoding })
|
||||
|
||||
const db2 = mockLevel({
|
||||
_clear: function (options, callback) {
|
||||
t.is(hasOwnProperty.call(options, 'gt'), true)
|
||||
t.is(hasOwnProperty.call(options, 'gte'), true)
|
||||
t.is(hasOwnProperty.call(options, 'lt'), true)
|
||||
t.is(hasOwnProperty.call(options, 'lte'), true)
|
||||
|
||||
t.is(options.gt, '\xff', 'encoded undefined')
|
||||
t.is(options.gte, '\xff', 'encoded undefined')
|
||||
t.is(options.lt, '\xff', 'encoded undefined')
|
||||
t.is(options.lte, '\xff', 'encoded undefined')
|
||||
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}, utf8Manifest, { keyEncoding: nullishEncoding, valueEncoding: nullishEncoding })
|
||||
|
||||
if (!deferred) {
|
||||
await Promise.all([db1.open(), db2.open()])
|
||||
}
|
||||
|
||||
const promise1 = db1.clear({
|
||||
gt: null,
|
||||
gte: null,
|
||||
lt: null,
|
||||
lte: null
|
||||
})
|
||||
|
||||
const promise2 = db2.clear({
|
||||
gt: undefined,
|
||||
gte: undefined,
|
||||
lt: undefined,
|
||||
lte: undefined
|
||||
})
|
||||
|
||||
await Promise.all([promise1, promise2])
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`clear() does not add nullish range options (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = mockLevel({
|
||||
_clear: function (options, callback) {
|
||||
t.is(hasOwnProperty.call(options, 'gt'), false)
|
||||
t.is(hasOwnProperty.call(options, 'gte'), false)
|
||||
t.is(hasOwnProperty.call(options, 'lt'), false)
|
||||
t.is(hasOwnProperty.call(options, 'lte'), false)
|
||||
this.nextTick(callback)
|
||||
}
|
||||
})
|
||||
|
||||
if (!deferred) await db.open()
|
||||
await db.clear({})
|
||||
})
|
||||
}
|
829
node_modules/abstract-level/test/self/iterator-test.js
generated
vendored
Normal file
829
node_modules/abstract-level/test/self/iterator-test.js
generated
vendored
Normal file
@ -0,0 +1,829 @@
|
||||
'use strict'
|
||||
|
||||
const test = require('tape')
|
||||
const { Buffer } = require('buffer')
|
||||
const { AbstractLevel } = require('../..')
|
||||
const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../..')
|
||||
const { mockLevel, mockIterator, nullishEncoding } = require('../util')
|
||||
|
||||
const identity = (v) => v
|
||||
const utf8Manifest = { encodings: { utf8: true } }
|
||||
const dualManifest = { encodings: { utf8: true, buffer: true } }
|
||||
const tripleManifest = { encodings: { utf8: true, buffer: true, view: true } }
|
||||
|
||||
for (const deferred of [false, true]) {
|
||||
// Also test default fallback implementations of keys() and values()
|
||||
for (const [mode, def] of [['iterator', false], ['keys', false], ['values', false], ['keys', true], ['values', true]]) {
|
||||
const Ctor = mode === 'iterator' || def ? AbstractIterator : mode === 'keys' ? AbstractKeyIterator : AbstractValueIterator
|
||||
const privateMethod = def ? '_iterator' : '_' + mode
|
||||
const publicMethod = mode
|
||||
|
||||
test(`${mode}() (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
let called = false
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
t.is(this, db, 'thisArg is correct')
|
||||
t.is(arguments.length, 1, 'got one argument')
|
||||
|
||||
const kvOptions = mode === 'iterator' || def
|
||||
? { keys: mode !== 'values', values: mode !== 'keys' }
|
||||
: {}
|
||||
|
||||
t.same(options, {
|
||||
reverse: false,
|
||||
limit: -1,
|
||||
keyEncoding: 'utf8',
|
||||
valueEncoding: 'utf8',
|
||||
...kvOptions
|
||||
})
|
||||
|
||||
called = true
|
||||
return new Ctor(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(tripleManifest)
|
||||
if (!deferred) await db.open()
|
||||
|
||||
db[publicMethod]()
|
||||
t.is(called, !deferred)
|
||||
if (deferred) await db.open()
|
||||
})
|
||||
|
||||
test(`${mode}() with custom options (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
t.is(options.foo, 123)
|
||||
t.is(options.reverse, true)
|
||||
t.is(options.limit, 1)
|
||||
|
||||
return new Ctor(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(tripleManifest)
|
||||
if (!deferred) await db.open()
|
||||
db[publicMethod]({ foo: 123, reverse: true, limit: 1 })
|
||||
if (deferred) await db.open()
|
||||
})
|
||||
|
||||
for (const limit of [2, 0]) {
|
||||
test(`${mode}().next() skips _next() when limit ${limit} is reached (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
let calls = 0
|
||||
let yielded = 0
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_next (callback) {
|
||||
calls++
|
||||
|
||||
if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, 'a', 'a')
|
||||
} else {
|
||||
this.nextTick(callback, null, 'a')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(utf8Manifest)
|
||||
if (!deferred) await db.open()
|
||||
|
||||
const it = db[publicMethod]({ limit })
|
||||
|
||||
for (let i = 0; i < limit + 2; i++) {
|
||||
const item = await it.next()
|
||||
if (item === undefined) break
|
||||
yielded++
|
||||
}
|
||||
|
||||
t.is(it.count, limit, 'final count matches limit')
|
||||
t.is(calls, limit)
|
||||
t.is(yielded, limit)
|
||||
})
|
||||
|
||||
test(`${mode}().nextv() skips _nextv() when limit ${limit} is reached (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
let calls = 0
|
||||
let yielded = 0
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_nextv (size, options, callback) {
|
||||
calls++
|
||||
|
||||
if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, [['a', 'a']])
|
||||
} else {
|
||||
this.nextTick(callback, null, ['a'])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(utf8Manifest)
|
||||
if (!deferred) await db.open()
|
||||
|
||||
const it = db[publicMethod]({ limit })
|
||||
|
||||
for (let i = 0; i < limit + 2; i++) {
|
||||
const items = await it.nextv(1)
|
||||
yielded += items.length
|
||||
if (items.length === 0) break
|
||||
}
|
||||
|
||||
t.is(it.count, limit, 'final count matches limit')
|
||||
t.is(calls, limit)
|
||||
t.is(yielded, limit)
|
||||
})
|
||||
|
||||
test(`${mode}().all() skips _all() when limit ${limit} is reached (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
let nextCount = 0
|
||||
class MockIterator extends Ctor {
|
||||
_next (callback) {
|
||||
if (++nextCount > 10) {
|
||||
throw new Error('Potential infinite loop')
|
||||
} else if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, 'a', 'a')
|
||||
} else {
|
||||
this.nextTick(callback, null, 'a')
|
||||
}
|
||||
}
|
||||
|
||||
_all (options, callback) {
|
||||
t.fail('should not be called')
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(utf8Manifest)
|
||||
if (!deferred) await db.open()
|
||||
|
||||
const it = db[publicMethod]({ limit })
|
||||
|
||||
// Use next() because all() auto-closes and thus can't be used twice atm
|
||||
for (let i = 0; i < limit; i++) await it.next()
|
||||
|
||||
t.same(await it.all(), [])
|
||||
})
|
||||
}
|
||||
|
||||
test(`${mode}().nextv() reduces size for _nextv() when near limit (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_nextv (size, options, callback) {
|
||||
if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, Array(size).fill(['a', 'a']))
|
||||
} else {
|
||||
this.nextTick(callback, null, Array(size).fill('a'))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(utf8Manifest)
|
||||
if (!deferred) await db.open()
|
||||
|
||||
const it = db[publicMethod]({ limit: 3 })
|
||||
|
||||
t.is((await it.nextv(2)).length, 2)
|
||||
t.is((await it.nextv(2)).length, 1)
|
||||
t.is((await it.nextv(2)).length, 0)
|
||||
})
|
||||
|
||||
test(`${mode}().count increments by next(), nextv() and all() (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_next (callback) {
|
||||
if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, 'a', 'a')
|
||||
} else {
|
||||
this.nextTick(callback, null, 'a')
|
||||
}
|
||||
}
|
||||
|
||||
_nextv (size, options, callback) {
|
||||
if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, [['a', 'a'], ['b', 'b']])
|
||||
} else {
|
||||
this.nextTick(callback, null, ['a', 'b'])
|
||||
}
|
||||
}
|
||||
|
||||
_all (options, callback) {
|
||||
if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, [['c', 'c'], ['d', 'd'], ['e', 'e']])
|
||||
} else {
|
||||
this.nextTick(callback, null, ['c', 'd', 'e'])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(utf8Manifest)
|
||||
if (!deferred) await db.open()
|
||||
|
||||
const it = db[publicMethod]()
|
||||
|
||||
for (let i = 0; i < 2; i++) {
|
||||
t.isNot(await it.next(), undefined) // 2 * 1 = 2
|
||||
t.is((await it.nextv(2)).length, 2) // 2 * 2 = 4
|
||||
}
|
||||
|
||||
t.is(it.count, 2 + 4)
|
||||
t.is((await it.all()).length, 3)
|
||||
t.is(it.count, 2 + 4 + 3)
|
||||
})
|
||||
|
||||
test(`${mode}() forwards encoding options (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
t.is(options.keyEncoding, 'utf8')
|
||||
t.is(options.valueEncoding, 'buffer')
|
||||
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_next (callback) {
|
||||
if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, '281', Buffer.from('a'))
|
||||
} else if (mode === 'keys') {
|
||||
this.nextTick(callback, null, '281')
|
||||
} else {
|
||||
this.nextTick(callback, null, Buffer.from('a'))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(dualManifest)
|
||||
if (!deferred) await db.open()
|
||||
|
||||
const item = await db[publicMethod]({ keyEncoding: 'json', valueEncoding: 'hex' }).next()
|
||||
t.same(item, mode === 'iterator' ? [281, '61'] : mode === 'keys' ? 281 : '61')
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`${mode}() with custom encodings that want a buffer (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(5)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
t.is(options.keyEncoding, 'buffer')
|
||||
t.is(options.valueEncoding, 'buffer')
|
||||
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_next (callback) {
|
||||
if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, Buffer.from('a'), Buffer.from('b'))
|
||||
} else if (mode === 'keys') {
|
||||
this.nextTick(callback, null, Buffer.from('a'))
|
||||
} else {
|
||||
this.nextTick(callback, null, Buffer.from('b'))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(dualManifest)
|
||||
const encoding = { encode: spy(identity), decode: spy(identity), format: 'buffer' }
|
||||
if (!deferred) await db.open()
|
||||
|
||||
const it = db[publicMethod]({ keyEncoding: encoding, valueEncoding: encoding })
|
||||
const item = await it.next()
|
||||
|
||||
t.is(encoding.encode.calls, 0, 'did not need to encode anything')
|
||||
t.is(encoding.decode.calls, mode === 'iterator' ? 2 : 1)
|
||||
t.same(item, mode === 'iterator' ? [Buffer.from('a'), Buffer.from('b')] : Buffer.from(mode === 'keys' ? 'a' : 'b'))
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`${mode}() with custom encodings that want a string (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(5)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
t.is(options.keyEncoding, 'utf8')
|
||||
t.is(options.valueEncoding, 'utf8')
|
||||
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_next (callback) {
|
||||
if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, 'a', 'b')
|
||||
} else if (mode === 'keys') {
|
||||
this.nextTick(callback, null, 'a')
|
||||
} else {
|
||||
this.nextTick(callback, null, 'b')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(dualManifest)
|
||||
const encoding = { encode: spy(identity), decode: spy(identity), format: 'utf8' }
|
||||
if (!deferred) await db.open()
|
||||
|
||||
const it = db[publicMethod]({ keyEncoding: encoding, valueEncoding: encoding })
|
||||
const item = await it.next()
|
||||
|
||||
t.is(encoding.encode.calls, 0, 'did not need to encode anything')
|
||||
t.is(encoding.decode.calls, mode === 'iterator' ? 2 : 1)
|
||||
t.same(item, mode === 'iterator' ? ['a', 'b'] : mode === 'keys' ? 'a' : 'b')
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`${mode}() encodes range options (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(6)
|
||||
|
||||
let calls = 0
|
||||
const keyEncoding = {
|
||||
format: 'utf8',
|
||||
encode (key) {
|
||||
calls++
|
||||
return 'encoded_' + key
|
||||
},
|
||||
decode: identity
|
||||
}
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
t.is(options.gt, 'encoded_3')
|
||||
t.is(options.gte, 'encoded_4')
|
||||
t.is(options.lt, 'encoded_5')
|
||||
t.is(options.lte, 'encoded_6')
|
||||
t.is(options.foo, 7)
|
||||
return new Ctor(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(utf8Manifest, { keyEncoding })
|
||||
if (!deferred) await db.open()
|
||||
await db[publicMethod]({ gt: 3, gte: 4, lt: 5, lte: 6, foo: 7 }).next()
|
||||
t.is(calls, 4)
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`${mode}() does not strip nullish range options (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(12)
|
||||
|
||||
const db1 = mockLevel({
|
||||
[privateMethod] (options) {
|
||||
t.is(options.gt, '\x00', 'encoded null')
|
||||
t.is(options.gte, '\x00', 'encoded null')
|
||||
t.is(options.lt, '\x00', 'encoded null')
|
||||
t.is(options.lte, '\x00', 'encoded null')
|
||||
|
||||
return new Ctor(this, options)
|
||||
}
|
||||
}, utf8Manifest, { keyEncoding: nullishEncoding, valueEncoding: nullishEncoding })
|
||||
|
||||
const db2 = mockLevel({
|
||||
[privateMethod] (options) {
|
||||
t.is(hasOwnProperty.call(options, 'gt'), true)
|
||||
t.is(hasOwnProperty.call(options, 'gte'), true)
|
||||
t.is(hasOwnProperty.call(options, 'lt'), true)
|
||||
t.is(hasOwnProperty.call(options, 'lte'), true)
|
||||
|
||||
t.is(options.gt, '\xff', 'encoded undefined')
|
||||
t.is(options.gte, '\xff', 'encoded undefined')
|
||||
t.is(options.lt, '\xff', 'encoded undefined')
|
||||
t.is(options.lte, '\xff', 'encoded undefined')
|
||||
|
||||
return new Ctor(this, options)
|
||||
}
|
||||
}, utf8Manifest, { keyEncoding: nullishEncoding, valueEncoding: nullishEncoding })
|
||||
|
||||
if (!deferred) {
|
||||
await Promise.all([db1.open(), db2.open()])
|
||||
}
|
||||
|
||||
const promise1 = db1[publicMethod]({
|
||||
gt: null,
|
||||
gte: null,
|
||||
lt: null,
|
||||
lte: null
|
||||
}).next()
|
||||
|
||||
const promise2 = db2[publicMethod]({
|
||||
gt: undefined,
|
||||
gte: undefined,
|
||||
lt: undefined,
|
||||
lte: undefined
|
||||
}).next()
|
||||
|
||||
return Promise.all([promise1, promise2])
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`${mode}() does not add nullish range options (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = mockLevel({
|
||||
[privateMethod] (options) {
|
||||
t.is(hasOwnProperty.call(options, 'gt'), false)
|
||||
t.is(hasOwnProperty.call(options, 'gte'), false)
|
||||
t.is(hasOwnProperty.call(options, 'lt'), false)
|
||||
t.is(hasOwnProperty.call(options, 'lte'), false)
|
||||
|
||||
return new Ctor(this, options)
|
||||
}
|
||||
})
|
||||
|
||||
if (!deferred) await db.open()
|
||||
await db[publicMethod]({}).next()
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`${mode}() encodes seek target (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
const db = mockLevel({
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}, utf8Manifest, { keyEncoding: 'json' })
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_seek (target, options) {
|
||||
t.is(target, '"a"', 'encoded once')
|
||||
t.same(options, { keyEncoding: 'utf8' })
|
||||
}
|
||||
}
|
||||
|
||||
if (!deferred) await db.open()
|
||||
const it = db[publicMethod]()
|
||||
it.seek('a')
|
||||
await it.next()
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`${mode}() encodes seek target with custom encoding (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const targets = []
|
||||
const db = mockLevel({
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}, utf8Manifest)
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_seek (target) {
|
||||
targets.push(target)
|
||||
}
|
||||
}
|
||||
|
||||
if (!deferred) await db.open()
|
||||
|
||||
db[publicMethod]().seek('a')
|
||||
db[publicMethod]({ keyEncoding: 'json' }).seek('a')
|
||||
db[publicMethod]().seek('b', { keyEncoding: 'json' })
|
||||
|
||||
await db.open()
|
||||
t.same(targets, ['a', '"a"', '"b"'], 'encoded targets')
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`${mode}() encodes nullish seek target (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const targets = []
|
||||
const db = mockLevel({
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}, utf8Manifest, { keyEncoding: { encode: String, decode: identity, format: 'utf8' } })
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_seek (target) {
|
||||
targets.push(target)
|
||||
}
|
||||
}
|
||||
|
||||
if (!deferred) await db.open()
|
||||
|
||||
// Unlike keys, nullish targets should not be rejected;
|
||||
// assume that the encoding gives these types meaning.
|
||||
db[publicMethod]().seek(null)
|
||||
db[publicMethod]().seek(undefined)
|
||||
|
||||
await db.open()
|
||||
t.same(targets, ['null', 'undefined'], 'encoded')
|
||||
})
|
||||
|
||||
test(`${mode}() has default nextv() (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
const sizes = [[1, [0]], [1, [1]], [2, [2]], [3, [3]]]
|
||||
t.plan(sizes.length * 2)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
let pos = 0
|
||||
class MockIterator extends Ctor {
|
||||
_next (callback) {
|
||||
if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, 'k' + pos, 'v' + (pos++))
|
||||
} else if (mode === 'keys') {
|
||||
this.nextTick(callback, null, 'k' + (pos++))
|
||||
} else {
|
||||
this.nextTick(callback, null, 'v' + (pos++))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(utf8Manifest)
|
||||
if (!deferred) await db.open()
|
||||
|
||||
let expectedPos = 0
|
||||
const it = db[publicMethod]()
|
||||
|
||||
for (const [size, args] of sizes) {
|
||||
const actual = await it.nextv(...args)
|
||||
const expected = []
|
||||
|
||||
for (let i = 0; i < size; i++) {
|
||||
const pos = expectedPos++
|
||||
if (mode === 'iterator') expected.push(['k' + pos, 'v' + pos])
|
||||
else if (mode === 'keys') expected.push('k' + pos)
|
||||
else expected.push('v' + pos)
|
||||
}
|
||||
|
||||
t.is(actual.length, size)
|
||||
t.same(actual, expected)
|
||||
}
|
||||
})
|
||||
|
||||
test(`${mode}() default nextv() forwards next() error (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_next (callback) {
|
||||
t.pass('called')
|
||||
this.nextTick(callback, new Error('test'))
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(utf8Manifest)
|
||||
if (!deferred) await db.open()
|
||||
|
||||
try {
|
||||
await db[publicMethod]().nextv(10)
|
||||
} catch (err) {
|
||||
t.is(err.message, 'test')
|
||||
}
|
||||
})
|
||||
|
||||
test(`${mode}() has default all() (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(8)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
let pos = 0
|
||||
class MockIterator extends Ctor {
|
||||
_nextv (size, options, callback) {
|
||||
t.is(size, 1000)
|
||||
t.same(options, {})
|
||||
|
||||
if (pos === 4) {
|
||||
this.nextTick(callback, null, [])
|
||||
} else if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, [[String(pos++), 'a'], [String(pos++), 'b']])
|
||||
} else if (mode === 'keys') {
|
||||
this.nextTick(callback, null, [String(pos++), String(pos++)])
|
||||
} else {
|
||||
pos += 2
|
||||
this.nextTick(callback, null, ['a', 'b'])
|
||||
}
|
||||
}
|
||||
|
||||
_close (callback) {
|
||||
t.pass('closed')
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(utf8Manifest)
|
||||
if (!deferred) await db.open()
|
||||
|
||||
t.same(await db[publicMethod]().all(), [
|
||||
['0', 'a'],
|
||||
['1', 'b'],
|
||||
['2', 'a'],
|
||||
['3', 'b']
|
||||
].map(kv => mode === 'iterator' ? kv : kv[mode === 'keys' ? 0 : 1]))
|
||||
})
|
||||
|
||||
test(`${mode}() default all() forwards nextv() error (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_nextv (size, options, callback) {
|
||||
t.pass('called')
|
||||
this.nextTick(callback, new Error('test'))
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(utf8Manifest)
|
||||
if (!deferred) await db.open()
|
||||
|
||||
try {
|
||||
await db[publicMethod]().all()
|
||||
} catch (err) {
|
||||
t.is(err.message, 'test')
|
||||
}
|
||||
})
|
||||
|
||||
test(`${mode}() custom all() (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_all (options, callback) {
|
||||
t.same(options, {})
|
||||
|
||||
if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, [['k0', 'v0'], ['k1', 'v1']])
|
||||
} else if (mode === 'keys') {
|
||||
this.nextTick(callback, null, ['k0', 'k1'])
|
||||
} else {
|
||||
this.nextTick(callback, null, ['v0', 'v1'])
|
||||
}
|
||||
}
|
||||
|
||||
_close (callback) {
|
||||
t.pass('closed')
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(utf8Manifest)
|
||||
if (!deferred) await db.open()
|
||||
|
||||
t.same(await db[publicMethod]().all(), [
|
||||
['k0', 'v0'],
|
||||
['k1', 'v1']
|
||||
].map(kv => mode === 'iterator' ? kv : kv[mode === 'keys' ? 0 : 1]))
|
||||
})
|
||||
|
||||
test(`${mode}() custom all() forwards error and closes (deferred: ${deferred}, default implementation: ${def})`, async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_all (options, callback) {
|
||||
t.pass('_all called')
|
||||
this.nextTick(callback, new Error('test'))
|
||||
}
|
||||
|
||||
_close (callback) {
|
||||
t.pass('closed')
|
||||
this.nextTick(callback)
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel(utf8Manifest)
|
||||
if (!deferred) await db.open()
|
||||
|
||||
try {
|
||||
await db[publicMethod]().all()
|
||||
} catch (err) {
|
||||
t.is(err.message, 'test')
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for (const deferred of [false, true]) {
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`iterator() skips decoding keys if options.keys is false (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
const keyEncoding = {
|
||||
format: 'utf8',
|
||||
decode (key) {
|
||||
t.fail('should not be called')
|
||||
},
|
||||
encode: identity
|
||||
}
|
||||
|
||||
const db = mockLevel({
|
||||
_iterator (options) {
|
||||
t.is(options.keys, false)
|
||||
|
||||
return mockIterator(this, options, {
|
||||
_next (callback) {
|
||||
this.nextTick(callback, null, '', 'value')
|
||||
}
|
||||
})
|
||||
}
|
||||
}, utf8Manifest, { keyEncoding })
|
||||
|
||||
if (!deferred) await db.open()
|
||||
const [key, value] = await db.iterator({ keys: false }).next()
|
||||
|
||||
t.is(key, undefined, 'normalized key to undefined')
|
||||
t.is(value, 'value', 'got value')
|
||||
})
|
||||
|
||||
// NOTE: adapted from encoding-down
|
||||
test(`iterator() skips decoding values if options.values is false (deferred: ${deferred})`, async function (t) {
|
||||
t.plan(3)
|
||||
|
||||
const valueEncoding = {
|
||||
format: 'utf8',
|
||||
decode (value) {
|
||||
t.fail('should not be called')
|
||||
},
|
||||
encode: identity
|
||||
}
|
||||
|
||||
const db = mockLevel({
|
||||
_iterator (options) {
|
||||
t.is(options.values, false)
|
||||
|
||||
return mockIterator(this, options, {
|
||||
_next (callback) {
|
||||
callback(null, 'key', '')
|
||||
}
|
||||
})
|
||||
}
|
||||
}, utf8Manifest, { valueEncoding })
|
||||
|
||||
if (!deferred) await db.open()
|
||||
const [key, value] = await db.iterator({ values: false }).next()
|
||||
|
||||
t.is(key, 'key', 'got key')
|
||||
t.is(value, undefined, 'normalized value to undefined')
|
||||
})
|
||||
}
|
||||
|
||||
function spy (fn) {
|
||||
const wrapped = function (...args) {
|
||||
wrapped.calls++
|
||||
return fn(...args)
|
||||
}
|
||||
wrapped.calls = 0
|
||||
return wrapped
|
||||
}
|
931
node_modules/abstract-level/test/self/sublevel-test.js
generated
vendored
Normal file
931
node_modules/abstract-level/test/self/sublevel-test.js
generated
vendored
Normal file
@ -0,0 +1,931 @@
|
||||
'use strict'
|
||||
|
||||
const test = require('tape')
|
||||
const { Buffer } = require('buffer')
|
||||
const { AbstractLevel, AbstractSublevel } = require('../..')
|
||||
const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('../..')
|
||||
const nextTick = AbstractLevel.prototype.nextTick
|
||||
|
||||
class NoopLevel extends AbstractLevel {
|
||||
constructor (...args) {
|
||||
super(
|
||||
{ encodings: { utf8: true, buffer: true, view: true } },
|
||||
...args
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
test('sublevel is extensible', function (t) {
|
||||
t.plan(6)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
_sublevel (name, options) {
|
||||
t.is(name, 'test')
|
||||
t.same(options, { separator: '!', customOption: 123 })
|
||||
|
||||
return new MockSublevel(this, name, {
|
||||
...options,
|
||||
manifest: {
|
||||
encodings: { ignored: true },
|
||||
additionalMethods: { test: true },
|
||||
events: { foo: true }
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
class MockSublevel extends AbstractSublevel {
|
||||
test () {
|
||||
this.emit('foo')
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel({
|
||||
encodings: { utf8: true },
|
||||
additionalMethods: { ignored: true },
|
||||
events: { ignored: true }
|
||||
})
|
||||
|
||||
const sub = db.sublevel('test', { customOption: 123 })
|
||||
|
||||
t.is(sub.supports.encodings.ignored, undefined)
|
||||
t.same(sub.supports.additionalMethods, { test: true })
|
||||
t.same(sub.supports.events, {
|
||||
foo: true,
|
||||
|
||||
// Added by AbstractLevel
|
||||
opening: true,
|
||||
open: true,
|
||||
closing: true,
|
||||
closed: true,
|
||||
put: true,
|
||||
del: true,
|
||||
batch: true,
|
||||
clear: true
|
||||
})
|
||||
|
||||
sub.on('foo', () => t.pass('emitted'))
|
||||
sub.test()
|
||||
})
|
||||
|
||||
// NOTE: adapted from subleveldown
|
||||
test('sublevel prefix and options', function (t) {
|
||||
t.test('empty prefix', function (t) {
|
||||
const sub = new NoopLevel().sublevel('')
|
||||
t.is(sub.prefix, '!!')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('prefix without options', function (t) {
|
||||
const sub = new NoopLevel().sublevel('prefix')
|
||||
t.is(sub.prefix, '!prefix!')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('prefix and separator option', function (t) {
|
||||
const sub = new NoopLevel().sublevel('prefix', { separator: '%' })
|
||||
t.is(sub.prefix, '%prefix%')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('separator is trimmed from prefix', function (t) {
|
||||
const sub1 = new NoopLevel().sublevel('!prefix')
|
||||
t.is(sub1.prefix, '!prefix!')
|
||||
|
||||
const sub2 = new NoopLevel().sublevel('prefix!')
|
||||
t.is(sub2.prefix, '!prefix!')
|
||||
|
||||
const sub3 = new NoopLevel().sublevel('!!prefix!!')
|
||||
t.is(sub3.prefix, '!prefix!')
|
||||
|
||||
const sub4 = new NoopLevel().sublevel('@prefix@', { separator: '@' })
|
||||
t.is(sub4.prefix, '@prefix@')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('repeated separator can not result in empty prefix', function (t) {
|
||||
const sub = new NoopLevel().sublevel('!!!!')
|
||||
t.is(sub.prefix, '!!')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('invalid sublevel prefix', function (t) {
|
||||
t.throws(() => new NoopLevel().sublevel('foo\x05'), (err) => err.code === 'LEVEL_INVALID_PREFIX')
|
||||
t.throws(() => new NoopLevel().sublevel('foo\xff'), (err) => err.code === 'LEVEL_INVALID_PREFIX')
|
||||
t.throws(() => new NoopLevel().sublevel('foo!', { separator: '@' }), (err) => err.code === 'LEVEL_INVALID_PREFIX')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('legacy sublevel(down) options', function (t) {
|
||||
t.throws(() => new NoopLevel().sublevel('foo', 'bar'), (err) => err.code === 'LEVEL_LEGACY')
|
||||
t.throws(() => new NoopLevel().sublevel('foo', { open: () => {} }), (err) => err.code === 'LEVEL_LEGACY')
|
||||
t.end()
|
||||
})
|
||||
|
||||
// See https://github.com/Level/subleveldown/issues/78
|
||||
t.test('doubly nested sublevel has correct prefix', async function (t) {
|
||||
t.plan(1)
|
||||
|
||||
const keys = []
|
||||
class MockLevel extends AbstractLevel {
|
||||
_put (key, value, options, callback) {
|
||||
keys.push(key)
|
||||
nextTick(callback)
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel({ encodings: { utf8: true } })
|
||||
const sub1 = db.sublevel('1')
|
||||
const sub2 = sub1.sublevel('2')
|
||||
const sub3 = sub2.sublevel('3')
|
||||
|
||||
await sub1.put('a', 'value')
|
||||
await sub2.put('b', 'value')
|
||||
await sub3.put('c', 'value')
|
||||
|
||||
t.same(keys.sort(), [
|
||||
'!1!!2!!3!c',
|
||||
'!1!!2!b',
|
||||
'!1!a'
|
||||
])
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('sublevel.prefixKey()', function (t) {
|
||||
const db = new AbstractLevel({ encodings: { utf8: true, buffer: true, view: true } })
|
||||
const sub = db.sublevel('test')
|
||||
const textEncoder = new TextEncoder()
|
||||
|
||||
t.same(sub.prefixKey('', 'utf8'), '!test!')
|
||||
t.same(sub.prefixKey('a', 'utf8'), '!test!a')
|
||||
|
||||
t.same(sub.prefixKey(Buffer.from(''), 'buffer'), Buffer.from('!test!'))
|
||||
t.same(sub.prefixKey(Buffer.from('a'), 'buffer'), Buffer.from('!test!a'))
|
||||
|
||||
t.same(sub.prefixKey(textEncoder.encode(''), 'view'), textEncoder.encode('!test!'))
|
||||
t.same(sub.prefixKey(textEncoder.encode('a'), 'view'), textEncoder.encode('!test!a'))
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
// NOTE: adapted from subleveldown
|
||||
test('sublevel manifest and parent db', function (t) {
|
||||
t.test('sublevel inherits manifest from parent db', function (t) {
|
||||
const parent = new AbstractLevel({
|
||||
encodings: { utf8: true },
|
||||
seek: true,
|
||||
foo: true
|
||||
})
|
||||
const sub = parent.sublevel('')
|
||||
t.is(sub.supports.foo, true, 'AbstractSublevel inherits from parent')
|
||||
t.is(sub.supports.seek, true, 'AbstractSublevel inherits from parent')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('sublevel does not support additionalMethods', function (t) {
|
||||
const parent = new AbstractLevel({
|
||||
encodings: { utf8: true },
|
||||
additionalMethods: { foo: true }
|
||||
})
|
||||
|
||||
// We're expecting that AbstractSublevel removes the additionalMethod
|
||||
// because it can't automatically prefix any key(-like) arguments
|
||||
const sub = parent.sublevel('')
|
||||
t.same(sub.supports.additionalMethods, {})
|
||||
t.same(parent.supports.additionalMethods, { foo: true })
|
||||
t.is(typeof sub.foo, 'undefined', 'AbstractSublevel does not expose method')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('sublevel.db is set to parent db', function (t) {
|
||||
const db = new NoopLevel()
|
||||
const sub = db.sublevel('test')
|
||||
sub.once('open', function () {
|
||||
t.ok(sub.db instanceof NoopLevel)
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
// NOTE: adapted from subleveldown
|
||||
test('opening & closing sublevel', function (t) {
|
||||
t.test('error from open() does not bubble up to sublevel', function (t) {
|
||||
t.plan(5)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
_open (opts, cb) {
|
||||
nextTick(cb, new Error('error from underlying store'))
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel({ encodings: { buffer: true } })
|
||||
const sub = db.sublevel('test')
|
||||
|
||||
db.open((err) => {
|
||||
t.is(err && err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
t.is(err && err.cause && err.cause.message, 'error from underlying store')
|
||||
})
|
||||
|
||||
sub.open((err) => {
|
||||
t.is(err && err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
t.is(err && err.cause && err.cause.code, 'LEVEL_DATABASE_NOT_OPEN') // from db
|
||||
t.is(err && err.cause && err.cause.cause, undefined) // but does not have underlying error
|
||||
})
|
||||
})
|
||||
|
||||
t.test('cannot create a sublevel on a closed db', function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const db = new NoopLevel()
|
||||
|
||||
db.once('open', function () {
|
||||
db.close(function (err) {
|
||||
t.error(err, 'no error')
|
||||
|
||||
db.sublevel('test').open(function (err) {
|
||||
t.is(err && err.code, 'LEVEL_DATABASE_NOT_OPEN', 'sublevel not opened')
|
||||
|
||||
db.open(function (err) {
|
||||
t.error(err, 'no error')
|
||||
|
||||
db.sublevel('test').on('open', function () {
|
||||
t.pass('sublevel opened')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.test('can close db and sublevel once opened', function (t) {
|
||||
t.plan(3)
|
||||
|
||||
const db = new NoopLevel()
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open error')
|
||||
const sub = db.sublevel('test')
|
||||
|
||||
sub.once('open', function () {
|
||||
db.close(function (err) {
|
||||
t.ifError(err, 'no close error')
|
||||
})
|
||||
|
||||
sub.close(function (err) {
|
||||
t.ifError(err, 'no close error')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.test('sublevel rejects operations if parent db is closed', function (t) {
|
||||
t.plan(9)
|
||||
|
||||
const db = new NoopLevel()
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open error')
|
||||
|
||||
const sub = db.sublevel('test')
|
||||
const it = sub.iterator()
|
||||
|
||||
sub.once('open', function () {
|
||||
db.close(function (err) {
|
||||
t.ifError(err, 'no close error')
|
||||
|
||||
sub.put('foo', 'bar', verify)
|
||||
sub.get('foo', verify)
|
||||
sub.del('foo', verify)
|
||||
sub.clear(verify)
|
||||
sub.batch([{ type: 'del', key: 'foo' }], verify)
|
||||
|
||||
it.next(function (err) {
|
||||
t.is(err.code, 'LEVEL_ITERATOR_NOT_OPEN')
|
||||
it.close(t.ifError.bind(t))
|
||||
})
|
||||
|
||||
function verify (err) {
|
||||
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.test('cannot close db while sublevel is opening', function (t) {
|
||||
t.plan(5)
|
||||
|
||||
const db = new NoopLevel()
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open error')
|
||||
const sub = db.sublevel('test')
|
||||
|
||||
sub.open((err) => {
|
||||
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
})
|
||||
|
||||
db.close(function (err) {
|
||||
t.ifError(err, 'no close error')
|
||||
t.is(sub.status, 'closed')
|
||||
t.is(db.status, 'closed')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.test('cannot create sublevel while db is closing', function (t) {
|
||||
t.plan(5)
|
||||
|
||||
const db = new NoopLevel()
|
||||
|
||||
db.open(function (err) {
|
||||
t.ifError(err, 'no open error')
|
||||
|
||||
db.close(function (err) {
|
||||
t.ifError(err, 'no close error')
|
||||
t.is(db.status, 'closed')
|
||||
})
|
||||
|
||||
const sub = db.sublevel('test')
|
||||
|
||||
sub.open((err) => {
|
||||
t.is(err.code, 'LEVEL_DATABASE_NOT_OPEN')
|
||||
t.is(sub.status, 'closed')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.test('can wrap a sublevel and reopen the wrapped sublevel', function (t) {
|
||||
const db = new NoopLevel()
|
||||
const sub1 = db.sublevel('test1')
|
||||
const sub2 = sub1.sublevel('test2')
|
||||
|
||||
sub2.once('open', function () {
|
||||
verify()
|
||||
|
||||
sub2.close(function (err) {
|
||||
t.ifError(err, 'no close error')
|
||||
|
||||
// Prefixes should be the same after closing & reopening
|
||||
// See https://github.com/Level/subleveldown/issues/78
|
||||
sub2.open(function (err) {
|
||||
t.ifError(err, 'no open error')
|
||||
verify()
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
function verify () {
|
||||
t.is(sub1.prefix, '!test1!', 'sub1 prefix ok')
|
||||
t.ok(sub1.db instanceof NoopLevel)
|
||||
t.is(sub2.prefix, '!test1!!test2!', 'sub2 prefix ok')
|
||||
t.ok(sub2.db instanceof NoopLevel)
|
||||
}
|
||||
})
|
||||
|
||||
// Also test default fallback implementations of keys() and values()
|
||||
for (const [mode, def] of [['iterator', false], ['keys', false], ['values', false], ['keys', true], ['values', true]]) {
|
||||
const Ctor = mode === 'iterator' || def ? AbstractIterator : mode === 'keys' ? AbstractKeyIterator : AbstractValueIterator
|
||||
const privateMethod = def ? '_iterator' : '_' + mode
|
||||
const publicMethod = mode
|
||||
|
||||
t.test(`error from sublevel.${mode}() bubbles up (default implementation: ${def})`, function (t) {
|
||||
t.plan(2)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_next (callback) {
|
||||
this.nextTick(callback, new Error('next() error from parent database'))
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel({ encodings: { buffer: true } })
|
||||
const sub = db.sublevel('test')
|
||||
const it = sub[publicMethod]()
|
||||
|
||||
it.next(function (err) {
|
||||
t.is(err.message, 'next() error from parent database')
|
||||
|
||||
it.close(function () {
|
||||
t.pass('closed')
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('sublevel operations are prefixed', function (t) {
|
||||
t.test('sublevel.getMany() is prefixed', async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
_getMany (keys, options, callback) {
|
||||
t.same(keys, ['!test!a', '!test!b'])
|
||||
t.same(options, { keyEncoding: 'utf8', valueEncoding: 'utf8' })
|
||||
nextTick(callback, null, ['1', '2'])
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel({ encodings: { utf8: true } })
|
||||
const sub = db.sublevel('test')
|
||||
|
||||
await sub.open()
|
||||
await sub.getMany(['a', 'b'])
|
||||
})
|
||||
|
||||
// Also test default fallback implementations of keys() and values()
|
||||
for (const [mode, def] of [['iterator', false], ['keys', false], ['values', false], ['keys', true], ['values', true]]) {
|
||||
const Ctor = mode === 'iterator' || def ? AbstractIterator : mode === 'keys' ? AbstractKeyIterator : AbstractValueIterator
|
||||
const privateMethod = def ? '_iterator' : '_' + mode
|
||||
const publicMethod = mode
|
||||
|
||||
for (const deferred of [false, true]) {
|
||||
t.test(`sublevel ${mode}.seek() target is prefixed (default implementation: ${def}, deferred: ${deferred})`, async function (t) {
|
||||
t.plan(2)
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_seek (target, options) {
|
||||
t.is(target, '!sub!123')
|
||||
t.is(options.keyEncoding, 'utf8')
|
||||
}
|
||||
}
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel({ encodings: { utf8: true } })
|
||||
const sub = db.sublevel('sub', { keyEncoding: 'json' })
|
||||
|
||||
if (!deferred) await sub.open()
|
||||
|
||||
const it = sub[publicMethod]()
|
||||
it.seek(123)
|
||||
|
||||
if (deferred) await sub.open()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
t.test('sublevel.clear() is prefixed', async function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const calls = []
|
||||
class MockLevel extends AbstractLevel {
|
||||
_clear (options, callback) {
|
||||
calls.push(options)
|
||||
nextTick(callback)
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel({ encodings: { utf8: true } })
|
||||
const sub = db.sublevel('sub')
|
||||
|
||||
const test = async (options, expected) => {
|
||||
await sub.clear(options)
|
||||
t.same(calls.shift(), expected)
|
||||
}
|
||||
|
||||
await sub.open()
|
||||
|
||||
await test(undefined, {
|
||||
gte: '!sub!',
|
||||
lte: '!sub"',
|
||||
keyEncoding: 'utf8',
|
||||
reverse: false,
|
||||
limit: -1
|
||||
})
|
||||
|
||||
await test({ gt: 'a' }, {
|
||||
gt: '!sub!a',
|
||||
lte: '!sub"',
|
||||
keyEncoding: 'utf8',
|
||||
reverse: false,
|
||||
limit: -1
|
||||
})
|
||||
|
||||
await test({ gte: 'a', lt: 'x' }, {
|
||||
gte: '!sub!a',
|
||||
lt: '!sub!x',
|
||||
keyEncoding: 'utf8',
|
||||
reverse: false,
|
||||
limit: -1
|
||||
})
|
||||
|
||||
await test({ lte: 'x' }, {
|
||||
gte: '!sub!',
|
||||
lte: '!sub!x',
|
||||
keyEncoding: 'utf8',
|
||||
reverse: false,
|
||||
limit: -1
|
||||
})
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('sublevel encodings', function (t) {
|
||||
// NOTE: adapted from subleveldown
|
||||
t.test('different sublevels can have different encodings', function (t) {
|
||||
t.plan(10)
|
||||
|
||||
const puts = []
|
||||
const gets = []
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
_put (key, value, { keyEncoding, valueEncoding }, callback) {
|
||||
puts.push({ key, value, keyEncoding, valueEncoding })
|
||||
nextTick(callback)
|
||||
}
|
||||
|
||||
_get (key, { keyEncoding, valueEncoding }, callback) {
|
||||
gets.push({ key, keyEncoding, valueEncoding })
|
||||
nextTick(callback, null, puts.shift().value)
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel({ encodings: { buffer: true, utf8: true } })
|
||||
const sub1 = db.sublevel('test1', { valueEncoding: 'json' })
|
||||
const sub2 = db.sublevel('test2', { keyEncoding: 'buffer', valueEncoding: 'buffer' })
|
||||
|
||||
sub1.put('foo', { some: 'json' }, function (err) {
|
||||
t.error(err, 'no error')
|
||||
|
||||
t.same(puts, [{
|
||||
key: '!test1!foo',
|
||||
value: '{"some":"json"}',
|
||||
keyEncoding: 'utf8',
|
||||
valueEncoding: 'utf8'
|
||||
}])
|
||||
|
||||
sub1.get('foo', function (err, value) {
|
||||
t.error(err, 'no error')
|
||||
t.same(value, { some: 'json' })
|
||||
t.same(gets.shift(), {
|
||||
key: '!test1!foo',
|
||||
keyEncoding: 'utf8',
|
||||
valueEncoding: 'utf8'
|
||||
})
|
||||
|
||||
sub2.put(Buffer.from([1, 2]), Buffer.from([3]), function (err) {
|
||||
t.error(err, 'no error')
|
||||
|
||||
t.same(puts, [{
|
||||
key: Buffer.from('!test2!\x01\x02'),
|
||||
value: Buffer.from([3]),
|
||||
keyEncoding: 'buffer',
|
||||
valueEncoding: 'buffer'
|
||||
}])
|
||||
|
||||
sub2.get(Buffer.from([1, 2]), function (err, value) {
|
||||
t.error(err, 'no error')
|
||||
t.same(value, Buffer.from([3]))
|
||||
t.same(gets.shift(), {
|
||||
key: Buffer.from('!test2!\x01\x02'),
|
||||
keyEncoding: 'buffer',
|
||||
valueEncoding: 'buffer'
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.test('sublevel indirectly supports transcoded encoding', function (t) {
|
||||
t.plan(5)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
_put (key, value, { keyEncoding, valueEncoding }, callback) {
|
||||
t.same({ key, value, keyEncoding, valueEncoding }, {
|
||||
key: Buffer.from('!test!foo'),
|
||||
value: Buffer.from('{"some":"json"}'),
|
||||
keyEncoding: 'buffer',
|
||||
valueEncoding: 'buffer'
|
||||
})
|
||||
nextTick(callback)
|
||||
}
|
||||
|
||||
_get (key, { keyEncoding, valueEncoding }, callback) {
|
||||
t.same({ key, keyEncoding, valueEncoding }, {
|
||||
key: Buffer.from('!test!foo'),
|
||||
keyEncoding: 'buffer',
|
||||
valueEncoding: 'buffer'
|
||||
})
|
||||
nextTick(callback, null, Buffer.from('{"some":"json"}'))
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel({ encodings: { buffer: true } })
|
||||
const sub = db.sublevel('test', { valueEncoding: 'json' })
|
||||
|
||||
sub.put('foo', { some: 'json' }, function (err) {
|
||||
t.error(err, 'no error')
|
||||
|
||||
sub.get('foo', function (err, value) {
|
||||
t.error(err, 'no error')
|
||||
t.same(value, { some: 'json' })
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.test('concatenating sublevel Buffer keys', function (t) {
|
||||
t.plan(10)
|
||||
|
||||
const key = Buffer.from('00ff', 'hex')
|
||||
const prefixedKey = Buffer.concat([Buffer.from('!test!'), key])
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
_put (key, value, options, callback) {
|
||||
t.is(options.keyEncoding, 'buffer')
|
||||
t.is(options.valueEncoding, 'buffer')
|
||||
t.same(key, prefixedKey)
|
||||
t.same(value, Buffer.from('bar'))
|
||||
nextTick(callback)
|
||||
}
|
||||
|
||||
_get (key, options, callback) {
|
||||
t.is(options.keyEncoding, 'buffer')
|
||||
t.is(options.valueEncoding, 'buffer')
|
||||
t.same(key, prefixedKey)
|
||||
nextTick(callback, null, Buffer.from('bar'))
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel({ encodings: { buffer: true } })
|
||||
const sub = db.sublevel('test', { keyEncoding: 'buffer' })
|
||||
|
||||
sub.put(key, 'bar', function (err) {
|
||||
t.ifError(err)
|
||||
sub.get(key, function (err, value) {
|
||||
t.ifError(err)
|
||||
t.is(value, 'bar')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.test('concatenating sublevel Uint8Array keys', function (t) {
|
||||
t.plan(10)
|
||||
|
||||
const key = new Uint8Array([0, 255])
|
||||
const textEncoder = new TextEncoder()
|
||||
const prefix = textEncoder.encode('!test!')
|
||||
const prefixedKey = new Uint8Array(prefix.byteLength + key.byteLength)
|
||||
|
||||
prefixedKey.set(prefix, 0)
|
||||
prefixedKey.set(key, prefix.byteLength)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
_put (key, value, options, callback) {
|
||||
t.is(options.keyEncoding, 'view')
|
||||
t.is(options.valueEncoding, 'view')
|
||||
t.same(key, prefixedKey)
|
||||
t.same(value, textEncoder.encode('bar'))
|
||||
nextTick(callback)
|
||||
}
|
||||
|
||||
_get (key, options, callback) {
|
||||
t.is(options.keyEncoding, 'view')
|
||||
t.is(options.valueEncoding, 'view')
|
||||
t.same(key, prefixedKey)
|
||||
nextTick(callback, null, textEncoder.encode('bar'))
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel({ encodings: { view: true } })
|
||||
const sub = db.sublevel('test', { keyEncoding: 'view' })
|
||||
|
||||
sub.put(key, 'bar', function (err) {
|
||||
t.ifError(err)
|
||||
sub.get(key, function (err, value) {
|
||||
t.ifError(err)
|
||||
t.is(value, 'bar')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// Also test default fallback implementations of keys() and values()
|
||||
for (const [mode, def] of [['iterator', false], ['keys', false], ['values', false], ['keys', true], ['values', true]]) {
|
||||
const Ctor = mode === 'iterator' || def ? AbstractIterator : mode === 'keys' ? AbstractKeyIterator : AbstractValueIterator
|
||||
const privateMethod = def ? '_iterator' : '_' + mode
|
||||
const publicMethod = mode
|
||||
|
||||
t.test(`unfixing sublevel.${mode}() Buffer keys (default implementation: ${def})`, function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const testKey = Buffer.from('00ff', 'hex')
|
||||
const prefixedKey = Buffer.concat([Buffer.from('!test!'), testKey])
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_next (callback) {
|
||||
if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, prefixedKey, 'bar')
|
||||
} else if (mode === 'keys') {
|
||||
this.nextTick(callback, null, prefixedKey)
|
||||
} else {
|
||||
this.nextTick(callback, null, 'bar')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
t.is(options.keyEncoding, 'buffer')
|
||||
t.is(options.valueEncoding, 'utf8')
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel({ encodings: { buffer: true, view: true, utf8: true } })
|
||||
const sub = db.sublevel('test', { keyEncoding: 'buffer' })
|
||||
|
||||
sub[publicMethod]().next(function (err, keyOrValue) {
|
||||
t.ifError(err)
|
||||
t.same(keyOrValue, mode === 'values' ? 'bar' : testKey)
|
||||
})
|
||||
})
|
||||
|
||||
t.test(`unfixing sublevel.${mode}() Uint8Array keys (default implementation: ${def})`, function (t) {
|
||||
t.plan(4)
|
||||
|
||||
const testKey = new Uint8Array([0, 255])
|
||||
const textEncoder = new TextEncoder()
|
||||
const prefix = textEncoder.encode('!test!')
|
||||
const prefixedKey = new Uint8Array(prefix.byteLength + testKey.byteLength)
|
||||
|
||||
prefixedKey.set(prefix, 0)
|
||||
prefixedKey.set(testKey, prefix.byteLength)
|
||||
|
||||
class MockIterator extends Ctor {
|
||||
_next (callback) {
|
||||
if (mode === 'iterator' || def) {
|
||||
this.nextTick(callback, null, prefixedKey, 'bar')
|
||||
} else if (mode === 'keys') {
|
||||
this.nextTick(callback, null, prefixedKey)
|
||||
} else {
|
||||
this.nextTick(callback, null, 'bar')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
[privateMethod] (options) {
|
||||
t.is(options.keyEncoding, 'view')
|
||||
t.is(options.valueEncoding, 'utf8')
|
||||
return new MockIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel({ encodings: { buffer: true, view: true, utf8: true } })
|
||||
const sub = db.sublevel('test', { keyEncoding: 'view' })
|
||||
|
||||
sub[publicMethod]().next(function (err, keyOrValue) {
|
||||
t.ifError(err)
|
||||
t.same(keyOrValue, mode === 'values' ? 'bar' : testKey)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
for (const chained of [false, true]) {
|
||||
for (const deferred of [false, true]) {
|
||||
test(`batch() with sublevel per operation (chained: ${chained}, deferred: ${deferred})`, async function (t) {
|
||||
t.plan(6)
|
||||
|
||||
class MockLevel extends AbstractLevel {
|
||||
_batch (operations, options, callback) {
|
||||
t.same(operations, [
|
||||
{
|
||||
type: 'put',
|
||||
sublevel: null,
|
||||
key: '!1!a',
|
||||
value: '{"foo":123}',
|
||||
keyEncoding: 'utf8',
|
||||
valueEncoding: 'utf8'
|
||||
},
|
||||
{
|
||||
type: 'put',
|
||||
sublevel: null,
|
||||
key: '!2!a-y',
|
||||
value: '[object Object]',
|
||||
keyEncoding: 'utf8',
|
||||
valueEncoding: 'utf8'
|
||||
},
|
||||
{
|
||||
type: 'put',
|
||||
sublevel: null,
|
||||
key: '!1!b',
|
||||
value: '[object Object]',
|
||||
keyEncoding: 'utf8',
|
||||
valueEncoding: 'utf8'
|
||||
},
|
||||
{
|
||||
type: 'put',
|
||||
sublevel: null,
|
||||
key: '!2!b',
|
||||
value: 'b',
|
||||
keyEncoding: 'utf8',
|
||||
valueEncoding: 'utf8'
|
||||
},
|
||||
{
|
||||
type: 'del',
|
||||
sublevel: null,
|
||||
key: '!2!c1',
|
||||
keyEncoding: 'utf8'
|
||||
},
|
||||
{
|
||||
type: 'del',
|
||||
sublevel: null,
|
||||
key: '!2!c2-y',
|
||||
keyEncoding: 'utf8'
|
||||
},
|
||||
{
|
||||
type: 'del',
|
||||
key: 'd-x',
|
||||
keyEncoding: 'utf8'
|
||||
}
|
||||
])
|
||||
t.same(options, {})
|
||||
nextTick(callback)
|
||||
}
|
||||
}
|
||||
|
||||
const db = new MockLevel({ encodings: { utf8: true } }, {
|
||||
keyEncoding: {
|
||||
encode: (key) => key + '-x',
|
||||
decode: (key) => key.slice(0, -2),
|
||||
name: 'x',
|
||||
format: 'utf8'
|
||||
}
|
||||
})
|
||||
|
||||
const sub1 = db.sublevel('1', { valueEncoding: 'json' })
|
||||
const sub2 = db.sublevel('2', {
|
||||
keyEncoding: {
|
||||
encode: (key) => key + '-y',
|
||||
decode: (key) => key.slice(0, -2),
|
||||
name: 'y',
|
||||
format: 'utf8'
|
||||
}
|
||||
})
|
||||
|
||||
if (!deferred) await sub1.open()
|
||||
|
||||
t.is(sub1.keyEncoding().name, 'utf8')
|
||||
t.is(sub1.valueEncoding().name, 'json')
|
||||
t.is(sub2.keyEncoding().name, 'y')
|
||||
t.is(sub2.valueEncoding().name, 'utf8')
|
||||
|
||||
if (chained) {
|
||||
await db.batch()
|
||||
// keyEncoding: utf8 (sublevel), valueEncoding: json (sublevel)
|
||||
.put('a', { foo: 123 }, { sublevel: sub1 })
|
||||
|
||||
// keyEncoding: y (sublevel), valueEncoding: utf8 (sublevel)
|
||||
.put('a', { foo: 123 }, { sublevel: sub2 })
|
||||
|
||||
// keyEncoding: utf8 (sublevel), valueEncoding: utf8 (operation)
|
||||
.put('b', { foo: 123 }, { sublevel: sub1, valueEncoding: 'utf8' })
|
||||
|
||||
// keyEncoding: utf8 (operation), valueEncoding: utf8 (sublevel)
|
||||
.put('b', 'b', { sublevel: sub2, keyEncoding: 'utf8' })
|
||||
|
||||
// keyEncoding: utf8 (operation)
|
||||
.del('c1', { sublevel: sub2, keyEncoding: 'utf8' })
|
||||
|
||||
// keyEncoding: y (sublevel)
|
||||
.del('c2', { sublevel: sub2 })
|
||||
|
||||
// keyEncoding: x (db). Should not affect sublevels.
|
||||
.del('d')
|
||||
.write()
|
||||
} else {
|
||||
await db.batch([
|
||||
{ type: 'put', sublevel: sub1, key: 'a', value: { foo: 123 } },
|
||||
{ type: 'put', sublevel: sub2, key: 'a', value: { foo: 123 } },
|
||||
{ type: 'put', sublevel: sub1, key: 'b', value: { foo: 123 }, valueEncoding: 'utf8' },
|
||||
{ type: 'put', sublevel: sub2, key: 'b', value: 'b', keyEncoding: 'utf8' },
|
||||
{ type: 'del', key: 'c1', sublevel: sub2, keyEncoding: 'utf8' },
|
||||
{ type: 'del', key: 'c2', sublevel: sub2 },
|
||||
{ type: 'del', key: 'd' }
|
||||
])
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
108
node_modules/abstract-level/test/sublevel-test.js
generated
vendored
Normal file
108
node_modules/abstract-level/test/sublevel-test.js
generated
vendored
Normal file
@ -0,0 +1,108 @@
|
||||
'use strict'
|
||||
|
||||
const { Buffer } = require('buffer')
|
||||
|
||||
exports.all = function (test, testCommon) {
|
||||
for (const deferred of [false, true]) {
|
||||
// NOTE: adapted from subleveldown
|
||||
test(`sublevel.clear() (deferred: ${deferred})`, async function (t) {
|
||||
const db = testCommon.factory()
|
||||
const sub1 = db.sublevel('1')
|
||||
const sub2 = db.sublevel('2')
|
||||
|
||||
if (!deferred) await sub1.open()
|
||||
if (!deferred) await sub2.open()
|
||||
|
||||
await populate([sub1, sub2], ['a', 'b'])
|
||||
await verify(['!1!a', '!1!b', '!2!a', '!2!b'])
|
||||
|
||||
await clear([sub1], {})
|
||||
await verify(['!2!a', '!2!b'])
|
||||
|
||||
await populate([sub1], ['a', 'b'])
|
||||
await clear([sub2], { lt: 'b' })
|
||||
await verify(['!1!a', '!1!b', '!2!b'])
|
||||
await db.close()
|
||||
|
||||
async function populate (subs, items) {
|
||||
return Promise.all(subs.map(sub => {
|
||||
return sub.batch(items.map(function (item) {
|
||||
return { type: 'put', key: item, value: item }
|
||||
}))
|
||||
}))
|
||||
}
|
||||
|
||||
async function clear (subs, opts) {
|
||||
return Promise.all(subs.map(sub => {
|
||||
return sub.clear(opts)
|
||||
}))
|
||||
}
|
||||
|
||||
async function verify (expected) {
|
||||
const keys = await db.keys().all()
|
||||
t.same(keys, expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
for (const deferred of [false, true]) {
|
||||
for (const keyEncoding of ['buffer', 'view']) {
|
||||
if (!testCommon.supports.encodings[keyEncoding]) return
|
||||
|
||||
// NOTE: adapted from subleveldown. See https://github.com/Level/subleveldown/issues/87
|
||||
test(`iterate sublevel keys with bytes above 196 (${keyEncoding}, deferred: ${deferred})`, async function (t) {
|
||||
const db = testCommon.factory()
|
||||
const sub1 = db.sublevel('a', { keyEncoding })
|
||||
const sub2 = db.sublevel('b', { keyEncoding })
|
||||
const length = (db) => db.keys().all().then(x => x.length)
|
||||
|
||||
if (!deferred) await sub1.open()
|
||||
if (!deferred) await sub2.open()
|
||||
|
||||
const batch1 = sub1.batch()
|
||||
const batch2 = sub2.batch()
|
||||
const keys = []
|
||||
|
||||
for (let i = 0; i < 256; i++) {
|
||||
const key = keyEncoding === 'buffer' ? Buffer.from([i]) : new Uint8Array([i])
|
||||
keys.push(key)
|
||||
batch1.put(key, 'aa')
|
||||
batch2.put(key, 'bb')
|
||||
}
|
||||
|
||||
await Promise.all([batch1.write(), batch2.write()])
|
||||
|
||||
const entries1 = await sub1.iterator().all()
|
||||
const entries2 = await sub2.iterator().all()
|
||||
|
||||
t.is(entries1.length, 256, 'sub1 yielded all entries')
|
||||
t.is(entries2.length, 256, 'sub2 yielded all entries')
|
||||
t.ok(entries1.every(x => x[1] === 'aa'))
|
||||
t.ok(entries2.every(x => x[1] === 'bb'))
|
||||
|
||||
const many1 = await sub1.getMany(keys)
|
||||
const many2 = await sub2.getMany(keys)
|
||||
|
||||
t.is(many1.length, 256, 'sub1 yielded all values')
|
||||
t.is(many2.length, 256, 'sub2 yielded all values')
|
||||
t.ok(many1.every(x => x === 'aa'))
|
||||
t.ok(many2.every(x => x === 'bb'))
|
||||
|
||||
const singles1 = await Promise.all(keys.map(k => sub1.get(k)))
|
||||
const singles2 = await Promise.all(keys.map(k => sub2.get(k)))
|
||||
|
||||
t.is(singles1.length, 256, 'sub1 yielded all values')
|
||||
t.is(singles2.length, 256, 'sub2 yielded all values')
|
||||
t.ok(singles1.every(x => x === 'aa'))
|
||||
t.ok(singles2.every(x => x === 'bb'))
|
||||
|
||||
await sub1.clear()
|
||||
|
||||
t.same(await length(sub1), 0, 'cleared sub1')
|
||||
t.same(await length(sub2), 256, 'did not clear sub2')
|
||||
|
||||
await db.close()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
280
node_modules/abstract-level/test/util.js
generated
vendored
Normal file
280
node_modules/abstract-level/test/util.js
generated
vendored
Normal file
@ -0,0 +1,280 @@
|
||||
'use strict'
|
||||
|
||||
const ModuleError = require('module-error')
|
||||
const { AbstractLevel, AbstractChainedBatch } = require('..')
|
||||
const { AbstractIterator, AbstractKeyIterator, AbstractValueIterator } = require('..')
|
||||
|
||||
const spies = []
|
||||
|
||||
exports.verifyNotFoundError = function (err) {
|
||||
return err.code === 'LEVEL_NOT_FOUND' && err.notFound === true && err.status === 404
|
||||
}
|
||||
|
||||
exports.illegalKeys = [
|
||||
{ name: 'null key', key: null },
|
||||
{ name: 'undefined key', key: undefined }
|
||||
]
|
||||
|
||||
exports.illegalValues = [
|
||||
{ name: 'null key', value: null },
|
||||
{ name: 'undefined value', value: undefined }
|
||||
]
|
||||
|
||||
/**
|
||||
* Wrap a callback to check that it's called asynchronously. Must be
|
||||
* combined with a `ctx()`, `with()` or `end()` call.
|
||||
*
|
||||
* @param {function} cb Callback to check.
|
||||
* @param {string} name Optional callback name to use in assertion messages.
|
||||
* @returns {function} Wrapped callback.
|
||||
*/
|
||||
exports.assertAsync = function (cb, name) {
|
||||
const spy = {
|
||||
called: false,
|
||||
name: name || cb.name || 'anonymous'
|
||||
}
|
||||
|
||||
spies.push(spy)
|
||||
|
||||
return function (...args) {
|
||||
spy.called = true
|
||||
return cb.apply(this, args)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify that callbacks wrapped with `assertAsync()` were not yet called.
|
||||
* @param {import('tape').Test} t Tape test object.
|
||||
*/
|
||||
exports.assertAsync.end = function (t) {
|
||||
for (const { called, name } of spies.splice(0, spies.length)) {
|
||||
t.is(called, false, `callback (${name}) is asynchronous`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap a test function to verify `assertAsync()` spies at the end.
|
||||
* @param {import('tape').TestCase} test Test function to be passed to `tape()`.
|
||||
* @returns {import('tape').TestCase} Wrapped test function.
|
||||
*/
|
||||
exports.assertAsync.ctx = function (test) {
|
||||
return function (...args) {
|
||||
const ret = test.call(this, ...args)
|
||||
exports.assertAsync.end(args[0])
|
||||
return ret
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap an arbitrary callback to verify `assertAsync()` spies at the end.
|
||||
* @param {import('tape').Test} t Tape test object.
|
||||
* @param {function} cb Callback to wrap.
|
||||
* @returns {function} Wrapped callback.
|
||||
*/
|
||||
exports.assertAsync.with = function (t, cb) {
|
||||
return function (...args) {
|
||||
const ret = cb.call(this, ...args)
|
||||
exports.assertAsync.end(t)
|
||||
return ret
|
||||
}
|
||||
}
|
||||
|
||||
exports.mockLevel = function (methods, ...args) {
|
||||
class TestLevel extends AbstractLevel {}
|
||||
for (const k in methods) TestLevel.prototype[k] = methods[k]
|
||||
if (!args.length) args = [{ encodings: { utf8: true } }]
|
||||
return new TestLevel(...args)
|
||||
}
|
||||
|
||||
exports.mockIterator = function (db, options, methods, ...args) {
|
||||
class TestIterator extends AbstractIterator {}
|
||||
for (const k in methods) TestIterator.prototype[k] = methods[k]
|
||||
return new TestIterator(db, options, ...args)
|
||||
}
|
||||
|
||||
exports.mockChainedBatch = function (db, methods, ...args) {
|
||||
class TestBatch extends AbstractChainedBatch {}
|
||||
for (const k in methods) TestBatch.prototype[k] = methods[k]
|
||||
return new TestBatch(db, ...args)
|
||||
}
|
||||
|
||||
// Mock encoding where null and undefined are significant types
|
||||
exports.nullishEncoding = {
|
||||
name: 'nullish',
|
||||
format: 'utf8',
|
||||
encode (v) {
|
||||
return v === null ? '\x00' : v === undefined ? '\xff' : String(v)
|
||||
},
|
||||
decode (v) {
|
||||
return v === '\x00' ? null : v === '\xff' ? undefined : v
|
||||
}
|
||||
}
|
||||
|
||||
const kEntries = Symbol('entries')
|
||||
const kPosition = Symbol('position')
|
||||
const kOptions = Symbol('options')
|
||||
|
||||
/**
|
||||
* A minimal and non-optimized implementation for use in tests. Only supports utf8.
|
||||
* Don't use this as a reference implementation.
|
||||
*/
|
||||
class MinimalLevel extends AbstractLevel {
|
||||
constructor (options) {
|
||||
super({ encodings: { utf8: true }, seek: true }, options)
|
||||
this[kEntries] = new Map()
|
||||
}
|
||||
|
||||
_put (key, value, options, callback) {
|
||||
this[kEntries].set(key, value)
|
||||
this.nextTick(callback)
|
||||
}
|
||||
|
||||
_get (key, options, callback) {
|
||||
const value = this[kEntries].get(key)
|
||||
|
||||
if (value === undefined) {
|
||||
return this.nextTick(callback, new ModuleError(`Key ${key} was not found`, {
|
||||
code: 'LEVEL_NOT_FOUND'
|
||||
}))
|
||||
}
|
||||
|
||||
this.nextTick(callback, null, value)
|
||||
}
|
||||
|
||||
_getMany (keys, options, callback) {
|
||||
const values = keys.map(k => this[kEntries].get(k))
|
||||
this.nextTick(callback, null, values)
|
||||
}
|
||||
|
||||
_del (key, options, callback) {
|
||||
this[kEntries].delete(key)
|
||||
this.nextTick(callback)
|
||||
}
|
||||
|
||||
_clear (options, callback) {
|
||||
for (const [k] of sliceEntries(this[kEntries], options, true)) {
|
||||
this[kEntries].delete(k)
|
||||
}
|
||||
|
||||
this.nextTick(callback)
|
||||
}
|
||||
|
||||
_batch (operations, options, callback) {
|
||||
const entries = new Map(this[kEntries])
|
||||
|
||||
for (const op of operations) {
|
||||
if (op.type === 'put') entries.set(op.key, op.value)
|
||||
else entries.delete(op.key)
|
||||
}
|
||||
|
||||
this[kEntries] = entries
|
||||
this.nextTick(callback)
|
||||
}
|
||||
|
||||
_iterator (options) {
|
||||
return new MinimalIterator(this, options)
|
||||
}
|
||||
|
||||
_keys (options) {
|
||||
return new MinimalKeyIterator(this, options)
|
||||
}
|
||||
|
||||
_values (options) {
|
||||
return new MinimalValueIterator(this, options)
|
||||
}
|
||||
}
|
||||
|
||||
class MinimalIterator extends AbstractIterator {
|
||||
constructor (db, options) {
|
||||
super(db, options)
|
||||
this[kEntries] = sliceEntries(db[kEntries], options, false)
|
||||
this[kOptions] = options
|
||||
this[kPosition] = 0
|
||||
}
|
||||
}
|
||||
|
||||
class MinimalKeyIterator extends AbstractKeyIterator {
|
||||
constructor (db, options) {
|
||||
super(db, options)
|
||||
this[kEntries] = sliceEntries(db[kEntries], options, false)
|
||||
this[kOptions] = options
|
||||
this[kPosition] = 0
|
||||
}
|
||||
}
|
||||
|
||||
class MinimalValueIterator extends AbstractValueIterator {
|
||||
constructor (db, options) {
|
||||
super(db, options)
|
||||
this[kEntries] = sliceEntries(db[kEntries], options, false)
|
||||
this[kOptions] = options
|
||||
this[kPosition] = 0
|
||||
}
|
||||
}
|
||||
|
||||
for (const Ctor of [MinimalIterator, MinimalKeyIterator, MinimalValueIterator]) {
|
||||
const mapEntry = Ctor === MinimalIterator ? e => e : Ctor === MinimalKeyIterator ? e => e[0] : e => e[1]
|
||||
|
||||
Ctor.prototype._next = function (callback) {
|
||||
const entry = this[kEntries][this[kPosition]++]
|
||||
if (entry === undefined) return this.nextTick(callback)
|
||||
if (Ctor === MinimalIterator) this.nextTick(callback, null, entry[0], entry[1])
|
||||
else this.nextTick(callback, null, mapEntry(entry))
|
||||
}
|
||||
|
||||
Ctor.prototype._nextv = function (size, options, callback) {
|
||||
const entries = this[kEntries].slice(this[kPosition], this[kPosition] + size)
|
||||
this[kPosition] += entries.length
|
||||
this.nextTick(callback, null, entries.map(mapEntry))
|
||||
}
|
||||
|
||||
Ctor.prototype._all = function (options, callback) {
|
||||
const end = this.limit - this.count + this[kPosition]
|
||||
const entries = this[kEntries].slice(this[kPosition], end)
|
||||
this[kPosition] = this[kEntries].length
|
||||
this.nextTick(callback, null, entries.map(mapEntry))
|
||||
}
|
||||
|
||||
Ctor.prototype._seek = function (target, options) {
|
||||
this[kPosition] = this[kEntries].length
|
||||
|
||||
if (!outOfRange(target, this[kOptions])) {
|
||||
// Don't care about performance here
|
||||
for (let i = 0; i < this[kPosition]; i++) {
|
||||
const key = this[kEntries][i][0]
|
||||
|
||||
if (this[kOptions].reverse ? key <= target : key >= target) {
|
||||
this[kPosition] = i
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const outOfRange = function (target, options) {
|
||||
if ('gte' in options) {
|
||||
if (target < options.gte) return true
|
||||
} else if ('gt' in options) {
|
||||
if (target <= options.gt) return true
|
||||
}
|
||||
|
||||
if ('lte' in options) {
|
||||
if (target > options.lte) return true
|
||||
} else if ('lt' in options) {
|
||||
if (target >= options.lt) return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
const sliceEntries = function (entries, options, applyLimit) {
|
||||
entries = Array.from(entries)
|
||||
.filter((e) => !outOfRange(e[0], options))
|
||||
.sort((a, b) => a[0] > b[0] ? 1 : a[0] < b[0] ? -1 : 0)
|
||||
|
||||
if (options.reverse) entries.reverse()
|
||||
if (applyLimit && options.limit !== -1) entries = entries.slice(0, options.limit)
|
||||
|
||||
return entries
|
||||
}
|
||||
|
||||
exports.MinimalLevel = MinimalLevel
|
123
node_modules/abstract-level/types/abstract-chained-batch.d.ts
generated
vendored
Normal file
123
node_modules/abstract-level/types/abstract-chained-batch.d.ts
generated
vendored
Normal file
@ -0,0 +1,123 @@
|
||||
import * as Transcoder from 'level-transcoder'
|
||||
import { AbstractSublevel } from './abstract-sublevel'
|
||||
import { NodeCallback } from './interfaces'
|
||||
|
||||
export class AbstractChainedBatch<TDatabase, KDefault, VDefault> {
|
||||
constructor (db: TDatabase)
|
||||
|
||||
/**
|
||||
* A reference to the database that created this chained batch.
|
||||
*/
|
||||
db: TDatabase
|
||||
|
||||
/**
|
||||
* The number of queued operations on the current batch.
|
||||
*/
|
||||
get length (): number
|
||||
|
||||
/**
|
||||
* Queue a _put_ operation on this batch, not committed until {@link write} is
|
||||
* called.
|
||||
*/
|
||||
put (key: KDefault, value: VDefault): this
|
||||
|
||||
put<K = KDefault, V = VDefault> (
|
||||
key: K,
|
||||
value: V,
|
||||
options: AbstractChainedBatchPutOptions<TDatabase, K, V>
|
||||
): this
|
||||
|
||||
/**
|
||||
* Queue a _del_ operation on this batch, not committed until {@link write} is
|
||||
* called.
|
||||
*/
|
||||
del (key: KDefault): this
|
||||
del<K = KDefault> (key: K, options: AbstractChainedBatchDelOptions<TDatabase, K>): this
|
||||
|
||||
/**
|
||||
* Clear all queued operations on this batch.
|
||||
*/
|
||||
clear (): this
|
||||
|
||||
/**
|
||||
* Commit the queued operations for this batch. All operations will be written
|
||||
* atomically, that is, they will either all succeed or fail with no partial
|
||||
* commits.
|
||||
*/
|
||||
write (): Promise<void>
|
||||
write (options: AbstractChainedBatchWriteOptions): Promise<void>
|
||||
write (callback: NodeCallback<void>): void
|
||||
write (options: AbstractChainedBatchWriteOptions, callback: NodeCallback<void>): void
|
||||
|
||||
/**
|
||||
* Free up underlying resources. This should be done even if the chained batch has
|
||||
* zero queued operations. Automatically called by {@link write} so normally not
|
||||
* necessary to call, unless the intent is to discard a chained batch without
|
||||
* committing it.
|
||||
*/
|
||||
close (): Promise<void>
|
||||
close (callback: NodeCallback<void>): void
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for the {@link AbstractChainedBatch.put} method.
|
||||
*/
|
||||
export interface AbstractChainedBatchPutOptions<TDatabase, K, V> {
|
||||
/**
|
||||
* Custom key encoding for this _put_ operation, used to encode the `key`.
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
|
||||
|
||||
/**
|
||||
* Custom value encoding for this _put_ operation, used to encode the `value`.
|
||||
*/
|
||||
valueEncoding?: string | Transcoder.PartialEncoder<V> | undefined
|
||||
|
||||
/**
|
||||
* Act as though the _put_ operation is performed on the given sublevel, to similar
|
||||
* effect as:
|
||||
*
|
||||
* ```js
|
||||
* await sublevel.batch().put(key, value).write()
|
||||
* ```
|
||||
*
|
||||
* This allows atomically committing data to multiple sublevels. The `key` will be
|
||||
* prefixed with the `prefix` of the sublevel, and the `key` and `value` will be
|
||||
* encoded by the sublevel (using the default encodings of the sublevel unless
|
||||
* {@link keyEncoding} and / or {@link valueEncoding} are provided).
|
||||
*/
|
||||
sublevel?: AbstractSublevel<TDatabase, any, any, any> | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for the {@link AbstractChainedBatch.del} method.
|
||||
*/
|
||||
export interface AbstractChainedBatchDelOptions<TDatabase, K> {
|
||||
/**
|
||||
* Custom key encoding for this _del_ operation, used to encode the `key`.
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
|
||||
|
||||
/**
|
||||
* Act as though the _del_ operation is performed on the given sublevel, to similar
|
||||
* effect as:
|
||||
*
|
||||
* ```js
|
||||
* await sublevel.batch().del(key).write()
|
||||
* ```
|
||||
*
|
||||
* This allows atomically committing data to multiple sublevels. The `key` will be
|
||||
* prefixed with the `prefix` of the sublevel, and the `key` will be encoded by the
|
||||
* sublevel (using the default key encoding of the sublevel unless {@link keyEncoding}
|
||||
* is provided).
|
||||
*/
|
||||
sublevel?: AbstractSublevel<TDatabase, any, any, any> | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for the {@link AbstractChainedBatch.write} method.
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-interface
|
||||
export interface AbstractChainedBatchWriteOptions {
|
||||
// There are no abstract options but implementations may add theirs.
|
||||
}
|
252
node_modules/abstract-level/types/abstract-iterator.d.ts
generated
vendored
Normal file
252
node_modules/abstract-level/types/abstract-iterator.d.ts
generated
vendored
Normal file
@ -0,0 +1,252 @@
|
||||
import * as Transcoder from 'level-transcoder'
|
||||
import { RangeOptions, NodeCallback } from './interfaces'
|
||||
|
||||
export interface AbstractIteratorOptions<K, V> extends RangeOptions<K> {
|
||||
/**
|
||||
* Whether to return the key of each entry. Defaults to `true`. If set to `false`,
|
||||
* the iterator will yield keys that are `undefined`.
|
||||
*/
|
||||
keys?: boolean | undefined
|
||||
|
||||
/**
|
||||
* Whether to return the value of each entry. Defaults to `true`. If set to
|
||||
* `false`, the iterator will yield values that are `undefined`.
|
||||
*/
|
||||
values?: boolean | undefined
|
||||
|
||||
/**
|
||||
* Custom key encoding for this iterator, used to encode range options, to encode
|
||||
* {@link AbstractIterator.seek} targets and to decode keys.
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
|
||||
|
||||
/**
|
||||
* Custom value encoding for this iterator, used to decode values.
|
||||
*/
|
||||
valueEncoding?: string | Transcoder.PartialDecoder<V> | undefined
|
||||
}
|
||||
|
||||
export interface AbstractKeyIteratorOptions<K> extends RangeOptions<K> {
|
||||
/**
|
||||
* Custom key encoding for this iterator, used to encode range options, to encode
|
||||
* {@link AbstractKeyIterator.seek} targets and to decode keys.
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
|
||||
}
|
||||
|
||||
export interface AbstractValueIteratorOptions<K, V> extends RangeOptions<K> {
|
||||
/**
|
||||
* Custom key encoding for this iterator, used to encode range options and
|
||||
* {@link AbstractValueIterator.seek} targets.
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
|
||||
|
||||
/**
|
||||
* Custom value encoding for this iterator, used to decode values.
|
||||
*/
|
||||
valueEncoding?: string | Transcoder.PartialDecoder<V> | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* @template TDatabase Type of the database that created this iterator.
|
||||
* @template T Type of items yielded. Items can be entries, keys or values.
|
||||
*/
|
||||
declare class CommonIterator<TDatabase, T> {
|
||||
/**
|
||||
* A reference to the database that created this iterator.
|
||||
*/
|
||||
db: TDatabase
|
||||
|
||||
/**
|
||||
* Read-only getter that indicates how many items have been yielded so far (by any
|
||||
* method) excluding calls that errored or yielded `undefined`.
|
||||
*/
|
||||
get count (): number
|
||||
|
||||
/**
|
||||
* Read-only getter that reflects the `limit` that was set in options. Greater than or
|
||||
* equal to zero. Equals {@link Infinity} if no limit.
|
||||
*/
|
||||
get limit (): number
|
||||
|
||||
[Symbol.asyncIterator] (): AsyncGenerator<T, void, unknown>
|
||||
|
||||
/**
|
||||
* Free up underlying resources. Not necessary to call if [`for await...of`][1] or
|
||||
* `all()` is used.
|
||||
*
|
||||
* [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
|
||||
*/
|
||||
close (): Promise<void>
|
||||
close (callback: NodeCallback<void>): void
|
||||
}
|
||||
|
||||
export class AbstractIterator<TDatabase, K, V> extends CommonIterator<TDatabase, [K, V]> {
|
||||
constructor (db: TDatabase, options: AbstractIteratorOptions<K, V>)
|
||||
|
||||
/**
|
||||
* Advance to the next entry and yield that entry. When possible, prefer to use
|
||||
* [`for await...of`][1] instead.
|
||||
*
|
||||
* [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
|
||||
*/
|
||||
next (): Promise<[K, V] | undefined>
|
||||
next (callback: NextCallback<K, V>): void
|
||||
|
||||
/**
|
||||
* Advance repeatedly and get at most {@link size} amount of entries in a single call.
|
||||
* Can be faster than repeated {@link next()} calls. The natural end of the iterator
|
||||
* will be signaled by yielding an empty array.
|
||||
*
|
||||
* @param size Get at most this many entries. Has a soft minimum of 1.
|
||||
* @param options Options (none at the moment, reserved for future use).
|
||||
* @param callback Error-first callback. If none is provided, a promise is returned.
|
||||
*/
|
||||
nextv (size: number, options: {}, callback: NodeCallback<Array<[K, V]>>): void
|
||||
nextv (size: number, callback: NodeCallback<Array<[K, V]>>): void
|
||||
nextv (size: number, options: {}): Promise<Array<[K, V]>>
|
||||
nextv (size: number): Promise<Array<[K, V]>>
|
||||
|
||||
/**
|
||||
* Advance repeatedly and get all (remaining) entries as an array, automatically
|
||||
* closing the iterator. Assumes that those entries fit in memory. If that's not the
|
||||
* case, instead use {@link next()}, {@link nextv()} or [`for await...of`][1].
|
||||
*
|
||||
* [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
|
||||
*
|
||||
* @param options Options (none at the moment, reserved for future use).
|
||||
* @param callback Error-first callback. If none is provided, a promise is returned.
|
||||
*/
|
||||
all (options: {}, callback: NodeCallback<Array<[K, V]>>): void
|
||||
all (callback: NodeCallback<Array<[K, V]>>): void
|
||||
all (options: {}): Promise<Array<[K, V]>>
|
||||
all (): Promise<Array<[K, V]>>
|
||||
|
||||
/**
|
||||
* Seek to the key closest to {@link target}. Subsequent calls to {@link next()},
|
||||
* {@link nextv()} or {@link all()} (including implicit calls in a `for await...of`
|
||||
* loop) will yield entries with keys equal to or larger than {@link target}, or equal
|
||||
* to or smaller than {@link target} if the {@link AbstractIteratorOptions.reverse}
|
||||
* option was true.
|
||||
*/
|
||||
seek (target: K): void
|
||||
seek<TTarget = K> (target: TTarget, options: AbstractSeekOptions<TTarget>): void
|
||||
}
|
||||
|
||||
export class AbstractKeyIterator<TDatabase, K> extends CommonIterator<TDatabase, K> {
|
||||
constructor (db: TDatabase, options: AbstractKeyIteratorOptions<K>)
|
||||
|
||||
/**
|
||||
* Advance to the next key and yield that key. When possible, prefer to use
|
||||
* [`for await...of`][1] instead.
|
||||
*
|
||||
* [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
|
||||
*/
|
||||
next (): Promise<K | undefined>
|
||||
next (callback: NodeCallback<K>): void
|
||||
|
||||
/**
|
||||
* Advance repeatedly and get at most {@link size} amount of keys in a single call. Can
|
||||
* be faster than repeated {@link next()} calls. The natural end of the iterator will
|
||||
* be signaled by yielding an empty array.
|
||||
*
|
||||
* @param size Get at most this many keys. Has a soft minimum of 1.
|
||||
* @param options Options (none at the moment, reserved for future use).
|
||||
* @param callback Error-first callback. If none is provided, a promise is returned.
|
||||
*/
|
||||
nextv (size: number, options: {}, callback: NodeCallback<[K]>): void
|
||||
nextv (size: number, callback: NodeCallback<[K]>): void
|
||||
nextv (size: number, options: {}): Promise<[K]>
|
||||
nextv (size: number): Promise<[K]>
|
||||
|
||||
/**
|
||||
* Advance repeatedly and get all (remaining) keys as an array, automatically closing
|
||||
* the iterator. Assumes that those keys fit in memory. If that's not the case, instead
|
||||
* use {@link next()}, {@link nextv()} or [`for await...of`][1].
|
||||
*
|
||||
* [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
|
||||
*
|
||||
* @param options Options (none at the moment, reserved for future use).
|
||||
* @param callback Error-first callback. If none is provided, a promise is returned.
|
||||
*/
|
||||
all (options: {}, callback: NodeCallback<[K]>): void
|
||||
all (callback: NodeCallback<[K]>): void
|
||||
all (options: {}): Promise<[K]>
|
||||
all (): Promise<[K]>
|
||||
|
||||
/**
|
||||
* Seek to the key closest to {@link target}. Subsequent calls to {@link next()},
|
||||
* {@link nextv()} or {@link all()} (including implicit calls in a `for await...of`
|
||||
* loop) will yield keys equal to or larger than {@link target}, or equal to or smaller
|
||||
* than {@link target} if the {@link AbstractKeyIteratorOptions.reverse} option was
|
||||
* true.
|
||||
*/
|
||||
seek (target: K): void
|
||||
seek<TTarget = K> (target: TTarget, options: AbstractSeekOptions<TTarget>): void
|
||||
}
|
||||
|
||||
export class AbstractValueIterator<TDatabase, K, V> extends CommonIterator<TDatabase, V> {
|
||||
constructor (db: TDatabase, options: AbstractValueIteratorOptions<K, V>)
|
||||
|
||||
/**
|
||||
* Advance to the next value and yield that value. When possible, prefer
|
||||
* to use [`for await...of`][1] instead.
|
||||
*
|
||||
* [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
|
||||
*/
|
||||
next (): Promise<V | undefined>
|
||||
next (callback: NodeCallback<V>): void
|
||||
|
||||
/**
|
||||
* Advance repeatedly and get at most {@link size} amount of values in a single call.
|
||||
* Can be faster than repeated {@link next()} calls. The natural end of the iterator
|
||||
* will be signaled by yielding an empty array.
|
||||
*
|
||||
* @param size Get at most this many values. Has a soft minimum of 1.
|
||||
* @param options Options (none at the moment, reserved for future use).
|
||||
* @param callback Error-first callback. If none is provided, a promise is returned.
|
||||
*/
|
||||
nextv (size: number, options: {}, callback: NodeCallback<[V]>): void
|
||||
nextv (size: number, callback: NodeCallback<[V]>): void
|
||||
nextv (size: number, options: {}): Promise<[V]>
|
||||
nextv (size: number): Promise<[V]>
|
||||
|
||||
/**
|
||||
* Advance repeatedly and get all (remaining) values as an array, automatically closing
|
||||
* the iterator. Assumes that those values fit in memory. If that's not the case,
|
||||
* instead use {@link next()}, {@link nextv()} or [`for await...of`][1].
|
||||
*
|
||||
* [1]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of
|
||||
*
|
||||
* @param options Options (none at the moment, reserved for future use).
|
||||
* @param callback Error-first callback. If none is provided, a promise is returned.
|
||||
*/
|
||||
all (options: {}, callback: NodeCallback<[V]>): void
|
||||
all (callback: NodeCallback<[V]>): void
|
||||
all (options: {}): Promise<[V]>
|
||||
all (): Promise<[V]>
|
||||
|
||||
/**
|
||||
* Seek to the key closest to {@link target}. Subsequent calls to {@link next()},
|
||||
* {@link nextv()} or {@link all()} (including implicit calls in a `for await...of`
|
||||
* loop) will yield the values of keys equal to or larger than {@link target}, or equal
|
||||
* to or smaller than {@link target} if the {@link AbstractValueIteratorOptions.reverse}
|
||||
* option was true.
|
||||
*/
|
||||
seek (target: K): void
|
||||
seek<TTarget = K> (target: TTarget, options: AbstractSeekOptions<TTarget>): void
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for the {@link AbstractIterator.seek} method.
|
||||
*/
|
||||
export interface AbstractSeekOptions<K> {
|
||||
/**
|
||||
* Custom key encoding, used to encode the `target`. By default the keyEncoding option
|
||||
* of the iterator is used, or (if that wasn't set) the keyEncoding of the database.
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
|
||||
}
|
||||
|
||||
declare type NextCallback<K, V> =
|
||||
(err: Error | undefined | null, key?: K | undefined, value?: V | undefined) => void
|
485
node_modules/abstract-level/types/abstract-level.d.ts
generated
vendored
Normal file
485
node_modules/abstract-level/types/abstract-level.d.ts
generated
vendored
Normal file
@ -0,0 +1,485 @@
|
||||
import { IManifest } from 'level-supports'
|
||||
import * as Transcoder from 'level-transcoder'
|
||||
import { EventEmitter } from 'events'
|
||||
import { AbstractChainedBatch } from './abstract-chained-batch'
|
||||
import { AbstractSublevel, AbstractSublevelOptions } from './abstract-sublevel'
|
||||
|
||||
import {
|
||||
AbstractIterator,
|
||||
AbstractIteratorOptions,
|
||||
AbstractKeyIterator,
|
||||
AbstractKeyIteratorOptions,
|
||||
AbstractValueIterator,
|
||||
AbstractValueIteratorOptions
|
||||
} from './abstract-iterator'
|
||||
|
||||
import { NodeCallback, RangeOptions } from './interfaces'
|
||||
|
||||
/**
|
||||
* Abstract class for a lexicographically sorted key-value database.
|
||||
*
|
||||
* @template TFormat The type used internally by the database to store data.
|
||||
* @template KDefault The default type of keys if not overridden on operations.
|
||||
* @template VDefault The default type of values if not overridden on operations.
|
||||
*/
|
||||
declare class AbstractLevel<TFormat, KDefault = string, VDefault = string>
|
||||
extends EventEmitter {
|
||||
/**
|
||||
* Private database constructor.
|
||||
*
|
||||
* @param manifest A [manifest](https://github.com/Level/supports) describing the
|
||||
* features supported by (the private API of) this database.
|
||||
* @param options Options, of which some will be forwarded to {@link open}.
|
||||
*/
|
||||
constructor (
|
||||
manifest: Partial<IManifest>,
|
||||
options?: AbstractDatabaseOptions<KDefault, VDefault> | undefined
|
||||
)
|
||||
|
||||
/**
|
||||
* A [manifest](https://github.com/Level/supports) describing the features
|
||||
* supported by this database.
|
||||
*/
|
||||
supports: IManifest
|
||||
|
||||
/**
|
||||
* Read-only getter that returns a string reflecting the current state of the database:
|
||||
*
|
||||
* - `'opening'` - waiting for the database to be opened
|
||||
* - `'open'` - successfully opened the database
|
||||
* - `'closing'` - waiting for the database to be closed
|
||||
* - `'closed'` - successfully closed the database.
|
||||
*/
|
||||
get status (): 'opening' | 'open' | 'closing' | 'closed'
|
||||
|
||||
/**
|
||||
* Open the database.
|
||||
*/
|
||||
open (): Promise<void>
|
||||
open (options: AbstractOpenOptions): Promise<void>
|
||||
open (callback: NodeCallback<void>): void
|
||||
open (options: AbstractOpenOptions, callback: NodeCallback<void>): void
|
||||
|
||||
/**
|
||||
* Close the database.
|
||||
*/
|
||||
close (): Promise<void>
|
||||
close (callback: NodeCallback<void>): void
|
||||
|
||||
/**
|
||||
* Get a value from the database by {@link key}.
|
||||
*/
|
||||
get (key: KDefault): Promise<VDefault>
|
||||
get (key: KDefault, callback: NodeCallback<VDefault>): void
|
||||
|
||||
get<K = KDefault, V = VDefault> (
|
||||
key: K,
|
||||
options: AbstractGetOptions<K, V>
|
||||
): Promise<V>
|
||||
|
||||
get<K = KDefault, V = VDefault> (
|
||||
key: K,
|
||||
options: AbstractGetOptions<K, V>,
|
||||
callback: NodeCallback<V>
|
||||
): void
|
||||
|
||||
/**
|
||||
* Get multiple values from the database by an array of {@link keys}.
|
||||
*/
|
||||
getMany (keys: KDefault[]): Promise<VDefault[]>
|
||||
getMany (keys: KDefault[], callback: NodeCallback<VDefault[]>): void
|
||||
|
||||
getMany<K = KDefault, V = VDefault> (
|
||||
keys: K[],
|
||||
options: AbstractGetManyOptions<K, V>
|
||||
): Promise<V[]>
|
||||
|
||||
getMany<K = KDefault, V = VDefault> (
|
||||
keys: K[],
|
||||
options: AbstractGetManyOptions<K, V>,
|
||||
callback: NodeCallback<V[]>
|
||||
): void
|
||||
|
||||
/**
|
||||
* Add a new entry or overwrite an existing entry.
|
||||
*/
|
||||
put (key: KDefault, value: VDefault): Promise<void>
|
||||
put (key: KDefault, value: VDefault, callback: NodeCallback<void>): void
|
||||
|
||||
put<K = KDefault, V = VDefault> (
|
||||
key: K,
|
||||
value: V,
|
||||
options: AbstractPutOptions<K, V>
|
||||
): Promise<void>
|
||||
|
||||
put<K = KDefault, V = VDefault> (
|
||||
key: K,
|
||||
value: V,
|
||||
options: AbstractPutOptions<K, V>,
|
||||
callback: NodeCallback<void>
|
||||
): void
|
||||
|
||||
/**
|
||||
* Delete an entry by {@link key}.
|
||||
*/
|
||||
del (key: KDefault): Promise<void>
|
||||
del (key: KDefault, callback: NodeCallback<void>): void
|
||||
|
||||
del<K = KDefault> (
|
||||
key: K,
|
||||
options: AbstractDelOptions<K>
|
||||
): Promise<void>
|
||||
|
||||
del<K = KDefault> (
|
||||
key: K,
|
||||
options: AbstractDelOptions<K>,
|
||||
callback: NodeCallback<void>
|
||||
): void
|
||||
|
||||
/**
|
||||
* Perform multiple _put_ and/or _del_ operations in bulk.
|
||||
*/
|
||||
batch (
|
||||
operations: Array<AbstractBatchOperation<typeof this, KDefault, VDefault>>
|
||||
): Promise<void>
|
||||
|
||||
batch (
|
||||
operations: Array<AbstractBatchOperation<typeof this, KDefault, VDefault>>,
|
||||
callback: NodeCallback<void>
|
||||
): void
|
||||
|
||||
batch<K = KDefault, V = VDefault> (
|
||||
operations: Array<AbstractBatchOperation<typeof this, K, V>>,
|
||||
options: AbstractBatchOptions<K, V>
|
||||
): Promise<void>
|
||||
|
||||
batch<K = KDefault, V = VDefault> (
|
||||
operations: Array<AbstractBatchOperation<typeof this, K, V>>,
|
||||
options: AbstractBatchOptions<K, V>,
|
||||
callback: NodeCallback<void>
|
||||
): void
|
||||
|
||||
batch (): AbstractChainedBatch<typeof this, KDefault, VDefault>
|
||||
|
||||
/**
|
||||
* Create an iterator. For example:
|
||||
*
|
||||
* ```js
|
||||
* for await (const [key, value] of db.iterator({ gte: 'a' })) {
|
||||
* console.log([key, value])
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
iterator (): AbstractIterator<typeof this, KDefault, VDefault>
|
||||
iterator<K = KDefault, V = VDefault> (
|
||||
options: AbstractIteratorOptions<K, V>
|
||||
): AbstractIterator<typeof this, K, V>
|
||||
|
||||
/**
|
||||
* Create a key iterator. For example:
|
||||
*
|
||||
* ```js
|
||||
* for await (const key of db.keys({ gte: 'a' })) {
|
||||
* console.log(key)
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
keys (): AbstractKeyIterator<typeof this, KDefault>
|
||||
keys<K = KDefault> (
|
||||
options: AbstractKeyIteratorOptions<K>
|
||||
): AbstractKeyIterator<typeof this, K>
|
||||
|
||||
/**
|
||||
* Create a value iterator. For example:
|
||||
*
|
||||
* ```js
|
||||
* for await (const value of db.values({ gte: 'a' })) {
|
||||
* console.log(value)
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
values (): AbstractValueIterator<typeof this, KDefault, VDefault>
|
||||
values<K = KDefault, V = VDefault> (
|
||||
options: AbstractValueIteratorOptions<K, V>
|
||||
): AbstractValueIterator<typeof this, K, V>
|
||||
|
||||
/**
|
||||
* Delete all entries or a range.
|
||||
*/
|
||||
clear (): Promise<void>
|
||||
clear (callback: NodeCallback<void>): void
|
||||
clear<K = KDefault> (options: AbstractClearOptions<K>): Promise<void>
|
||||
clear<K = KDefault> (options: AbstractClearOptions<K>, callback: NodeCallback<void>): void
|
||||
|
||||
/**
|
||||
* Create a sublevel.
|
||||
* @param name Name of the sublevel, used to prefix keys.
|
||||
*/
|
||||
sublevel (name: string): AbstractSublevel<typeof this, TFormat, string, string>
|
||||
sublevel<K = string, V = string> (
|
||||
name: string,
|
||||
options: AbstractSublevelOptions<K, V>
|
||||
): AbstractSublevel<typeof this, TFormat, K, V>
|
||||
|
||||
/**
|
||||
* Add sublevel prefix to the given {@link key}, which must be already-encoded. If this
|
||||
* database is not a sublevel, the given {@link key} is returned as-is.
|
||||
*
|
||||
* @param key Key to add prefix to.
|
||||
* @param keyFormat Format of {@link key}. One of `'utf8'`, `'buffer'`, `'view'`.
|
||||
* If `'utf8'` then {@link key} must be a string and the return value will be a string.
|
||||
* If `'buffer'` then Buffer, if `'view'` then Uint8Array.
|
||||
*/
|
||||
prefixKey (key: string, keyFormat: 'utf8'): string
|
||||
prefixKey (key: Buffer, keyFormat: 'buffer'): Buffer
|
||||
prefixKey (key: Uint8Array, keyFormat: 'view'): Uint8Array
|
||||
|
||||
/**
|
||||
* Returns the given {@link encoding} argument as a normalized encoding object
|
||||
* that follows the [`level-transcoder`](https://github.com/Level/transcoder)
|
||||
* encoding interface.
|
||||
*/
|
||||
keyEncoding<N extends Transcoder.KnownEncodingName> (
|
||||
encoding: N
|
||||
): Transcoder.KnownEncoding<N, TFormat>
|
||||
|
||||
keyEncoding<TIn, TOut> (
|
||||
encoding: Transcoder.MixedEncoding<TIn, any, TOut>
|
||||
): Transcoder.Encoding<TIn, TFormat, TOut>
|
||||
|
||||
/**
|
||||
* Returns the default key encoding of the database as a normalized encoding
|
||||
* object that follows the [`level-transcoder`](https://github.com/Level/transcoder)
|
||||
* encoding interface.
|
||||
*/
|
||||
keyEncoding (): Transcoder.Encoding<KDefault, TFormat, KDefault>
|
||||
|
||||
/**
|
||||
* Returns the given {@link encoding} argument as a normalized encoding object
|
||||
* that follows the [`level-transcoder`](https://github.com/Level/transcoder)
|
||||
* encoding interface.
|
||||
*/
|
||||
valueEncoding<N extends Transcoder.KnownEncodingName> (
|
||||
encoding: N
|
||||
): Transcoder.KnownEncoding<N, TFormat>
|
||||
|
||||
valueEncoding<TIn, TOut> (
|
||||
encoding: Transcoder.MixedEncoding<TIn, any, TOut>
|
||||
): Transcoder.Encoding<TIn, TFormat, TOut>
|
||||
|
||||
/**
|
||||
* Returns the default value encoding of the database as a normalized encoding
|
||||
* object that follows the [`level-transcoder`](https://github.com/Level/transcoder)
|
||||
* encoding interface.
|
||||
*/
|
||||
valueEncoding (): Transcoder.Encoding<VDefault, TFormat, VDefault>
|
||||
|
||||
/**
|
||||
* Call the function {@link fn} at a later time when {@link status} changes to
|
||||
* `'open'` or `'closed'`.
|
||||
*/
|
||||
defer (fn: Function): void
|
||||
|
||||
/**
|
||||
* Schedule the function {@link fn} to be called in a next tick of the JavaScript
|
||||
* event loop, using a microtask scheduler. It will be called with the provided
|
||||
* {@link args}.
|
||||
*/
|
||||
nextTick (fn: Function, ...args: any[]): void
|
||||
}
|
||||
|
||||
export { AbstractLevel }
|
||||
|
||||
/**
|
||||
* Options for the database constructor.
|
||||
*/
|
||||
export interface AbstractDatabaseOptions<K, V>
|
||||
extends Omit<AbstractOpenOptions, 'passive'> {
|
||||
/**
|
||||
* Encoding to use for keys.
|
||||
* @defaultValue `'utf8'`
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
|
||||
|
||||
/**
|
||||
* Encoding to use for values.
|
||||
* @defaultValue `'utf8'`
|
||||
*/
|
||||
valueEncoding?: string | Transcoder.PartialEncoding<V> | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for the {@link AbstractLevel.open} method.
|
||||
*/
|
||||
export interface AbstractOpenOptions {
|
||||
/**
|
||||
* If `true`, create an empty database if one doesn't already exist. If `false`
|
||||
* and the database doesn't exist, opening will fail.
|
||||
*
|
||||
* @defaultValue `true`
|
||||
*/
|
||||
createIfMissing?: boolean | undefined
|
||||
|
||||
/**
|
||||
* If `true` and the database already exists, opening will fail.
|
||||
*
|
||||
* @defaultValue `false`
|
||||
*/
|
||||
errorIfExists?: boolean | undefined
|
||||
|
||||
/**
|
||||
* Wait for, but do not initiate, opening of the database.
|
||||
*
|
||||
* @defaultValue `false`
|
||||
*/
|
||||
passive?: boolean | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for the {@link AbstractLevel.get} method.
|
||||
*/
|
||||
export interface AbstractGetOptions<K, V> {
|
||||
/**
|
||||
* Custom key encoding for this operation, used to encode the `key`.
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
|
||||
|
||||
/**
|
||||
* Custom value encoding for this operation, used to decode the value.
|
||||
*/
|
||||
valueEncoding?: string | Transcoder.PartialDecoder<V> | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for the {@link AbstractLevel.getMany} method.
|
||||
*/
|
||||
export interface AbstractGetManyOptions<K, V> {
|
||||
/**
|
||||
* Custom key encoding for this operation, used to encode the `keys`.
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
|
||||
|
||||
/**
|
||||
* Custom value encoding for this operation, used to decode values.
|
||||
*/
|
||||
valueEncoding?: string | Transcoder.PartialDecoder<V> | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for the {@link AbstractLevel.put} method.
|
||||
*/
|
||||
export interface AbstractPutOptions<K, V> {
|
||||
/**
|
||||
* Custom key encoding for this operation, used to encode the `key`.
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
|
||||
|
||||
/**
|
||||
* Custom value encoding for this operation, used to encode the `value`.
|
||||
*/
|
||||
valueEncoding?: string | Transcoder.PartialEncoder<V> | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for the {@link AbstractLevel.del} method.
|
||||
*/
|
||||
export interface AbstractDelOptions<K> {
|
||||
/**
|
||||
* Custom key encoding for this operation, used to encode the `key`.
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for the {@link AbstractLevel.batch} method.
|
||||
*/
|
||||
export interface AbstractBatchOptions<K, V> {
|
||||
/**
|
||||
* Custom key encoding for this batch, used to encode keys.
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoder<K> | undefined
|
||||
|
||||
/**
|
||||
* Custom value encoding for this batch, used to encode values.
|
||||
*/
|
||||
valueEncoding?: string | Transcoder.PartialEncoder<V> | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* A _put_ or _del_ operation to be committed with the {@link AbstractLevel.batch}
|
||||
* method.
|
||||
*/
|
||||
export type AbstractBatchOperation<TDatabase, K, V> =
|
||||
AbstractBatchPutOperation<TDatabase, K, V> | AbstractBatchDelOperation<TDatabase, K>
|
||||
|
||||
/**
|
||||
* A _put_ operation to be committed with the {@link AbstractLevel.batch} method.
|
||||
*/
|
||||
export interface AbstractBatchPutOperation<TDatabase, K, V> {
|
||||
type: 'put'
|
||||
key: K
|
||||
value: V
|
||||
|
||||
/**
|
||||
* Custom key encoding for this _put_ operation, used to encode the {@link key}.
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
|
||||
|
||||
/**
|
||||
* Custom key encoding for this _put_ operation, used to encode the {@link value}.
|
||||
*/
|
||||
valueEncoding?: string | Transcoder.PartialEncoding<V> | undefined
|
||||
|
||||
/**
|
||||
* Act as though the _put_ operation is performed on the given sublevel, to similar
|
||||
* effect as:
|
||||
*
|
||||
* ```js
|
||||
* await sublevel.batch([{ type: 'put', key, value }])
|
||||
* ```
|
||||
*
|
||||
* This allows atomically committing data to multiple sublevels. The {@link key} will
|
||||
* be prefixed with the `prefix` of the sublevel, and the {@link key} and {@link value}
|
||||
* will be encoded by the sublevel (using the default encodings of the sublevel unless
|
||||
* {@link keyEncoding} and / or {@link valueEncoding} are provided).
|
||||
*/
|
||||
sublevel?: AbstractSublevel<TDatabase, any, any, any> | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* A _del_ operation to be committed with the {@link AbstractLevel.batch} method.
|
||||
*/
|
||||
export interface AbstractBatchDelOperation<TDatabase, K> {
|
||||
type: 'del'
|
||||
key: K
|
||||
|
||||
/**
|
||||
* Custom key encoding for this _del_ operation, used to encode the {@link key}.
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
|
||||
|
||||
/**
|
||||
* Act as though the _del_ operation is performed on the given sublevel, to similar
|
||||
* effect as:
|
||||
*
|
||||
* ```js
|
||||
* await sublevel.batch([{ type: 'del', key }])
|
||||
* ```
|
||||
*
|
||||
* This allows atomically committing data to multiple sublevels. The {@link key} will
|
||||
* be prefixed with the `prefix` of the sublevel, and the {@link key} will be encoded
|
||||
* by the sublevel (using the default key encoding of the sublevel unless
|
||||
* {@link keyEncoding} is provided).
|
||||
*/
|
||||
sublevel?: AbstractSublevel<TDatabase, any, any, any> | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for the {@link AbstractLevel.clear} method.
|
||||
*/
|
||||
export interface AbstractClearOptions<K> extends RangeOptions<K> {
|
||||
/**
|
||||
* Custom key encoding for this operation, used to encode range options.
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
|
||||
}
|
58
node_modules/abstract-level/types/abstract-sublevel.d.ts
generated
vendored
Normal file
58
node_modules/abstract-level/types/abstract-sublevel.d.ts
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
import * as Transcoder from 'level-transcoder'
|
||||
import { AbstractLevel } from './abstract-level'
|
||||
|
||||
/**
|
||||
* @template TDatabase Type of parent database.
|
||||
* @template TFormat The type used internally by the parent database to store data.
|
||||
* @template KDefault The default type of keys if not overridden on operations.
|
||||
* @template VDefault The default type of values if not overridden on operations.
|
||||
*/
|
||||
declare class AbstractSublevel<TDatabase, TFormat, KDefault, VDefault>
|
||||
extends AbstractLevel<TFormat, KDefault, VDefault> {
|
||||
/**
|
||||
* Sublevel constructor.
|
||||
*
|
||||
* @param db Parent database.
|
||||
* @param name Name of the sublevel, used to prefix keys.
|
||||
*/
|
||||
constructor (
|
||||
db: TDatabase,
|
||||
name: string,
|
||||
options?: AbstractSublevelOptions<KDefault, VDefault> | undefined
|
||||
)
|
||||
|
||||
/**
|
||||
* Prefix of the sublevel. A read-only string property.
|
||||
*/
|
||||
get prefix (): string
|
||||
|
||||
/**
|
||||
* Parent database. A read-only property.
|
||||
*/
|
||||
get db (): TDatabase
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for the {@link AbstractLevel.sublevel} method.
|
||||
*/
|
||||
export interface AbstractSublevelOptions<K, V> {
|
||||
/**
|
||||
* Character for separating sublevel names from user keys and each other. Must sort
|
||||
* before characters used in `name`. An error will be thrown if that's not the case.
|
||||
*
|
||||
* @defaultValue `'!'`
|
||||
*/
|
||||
separator?: string | undefined
|
||||
|
||||
/**
|
||||
* Encoding to use for keys.
|
||||
* @defaultValue `'utf8'`
|
||||
*/
|
||||
keyEncoding?: string | Transcoder.PartialEncoding<K> | undefined
|
||||
|
||||
/**
|
||||
* Encoding to use for values.
|
||||
* @defaultValue `'utf8'`
|
||||
*/
|
||||
valueEncoding?: string | Transcoder.PartialEncoding<V> | undefined
|
||||
}
|
14
node_modules/abstract-level/types/interfaces.d.ts
generated
vendored
Normal file
14
node_modules/abstract-level/types/interfaces.d.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
/**
|
||||
* An error-first callback in the style of Node.js.
|
||||
*/
|
||||
export type NodeCallback<T> =
|
||||
(err: Error | undefined | null, result?: T | undefined) => void
|
||||
|
||||
export interface RangeOptions<K> {
|
||||
gt?: K
|
||||
gte?: K
|
||||
lt?: K
|
||||
lte?: K
|
||||
reverse?: boolean | undefined
|
||||
limit?: number | undefined
|
||||
}
|
Reference in New Issue
Block a user