Initial import with skill sheet working

This commit is contained in:
2024-12-04 00:11:23 +01:00
commit 9050c80ab4
4488 changed files with 671048 additions and 0 deletions

118
node_modules/@seald-io/nedb/lib/byline.js generated vendored Normal file
View File

@@ -0,0 +1,118 @@
// Forked from https://github.com/jahewson/node-byline
// Copyright (C) 2011-2015 John Hewson
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
/**
* @module byline
* @private
*/
const stream = require('stream')
const timers = require('timers')
const { Buffer } = require('buffer')
const createLineStream = (readStream, options) => {
if (!readStream) throw new Error('expected readStream')
if (!readStream.readable) throw new Error('readStream must be readable')
const ls = new LineStream(options)
readStream.pipe(ls)
return ls
}
/**
* Fork from {@link https://github.com/jahewson/node-byline}.
* @see https://github.com/jahewson/node-byline
* @alias module:byline.LineStream
* @private
*/
class LineStream extends stream.Transform {
constructor (options) {
super(options)
options = options || {}
// use objectMode to stop the output from being buffered
// which re-concatanates the lines, just without newlines.
this._readableState.objectMode = true
this._lineBuffer = []
this._keepEmptyLines = options.keepEmptyLines || false
this._lastChunkEndedWithCR = false
// take the source's encoding if we don't have one
this.once('pipe', src => {
if (!this.encoding && src instanceof stream.Readable) this.encoding = src._readableState.encoding // but we can't do this for old-style streams
})
}
_transform (chunk, encoding, done) {
// decode binary chunks as UTF-8
encoding = encoding || 'utf8'
if (Buffer.isBuffer(chunk)) {
if (encoding === 'buffer') {
chunk = chunk.toString() // utf8
encoding = 'utf8'
} else chunk = chunk.toString(encoding)
}
this._chunkEncoding = encoding
// see: http://www.unicode.org/reports/tr18/#Line_Boundaries
const lines = chunk.split(/\r\n|[\n\v\f\r\x85\u2028\u2029]/g)
// don't split CRLF which spans chunks
if (this._lastChunkEndedWithCR && chunk[0] === '\n') lines.shift()
if (this._lineBuffer.length > 0) {
this._lineBuffer[this._lineBuffer.length - 1] += lines[0]
lines.shift()
}
this._lastChunkEndedWithCR = chunk[chunk.length - 1] === '\r'
this._lineBuffer = this._lineBuffer.concat(lines)
this._pushBuffer(encoding, 1, done)
}
_pushBuffer (encoding, keep, done) {
// always buffer the last (possibly partial) line
while (this._lineBuffer.length > keep) {
const line = this._lineBuffer.shift()
// skip empty lines
if (this._keepEmptyLines || line.length > 0) {
if (!this.push(this._reencode(line, encoding))) {
// when the high-water mark is reached, defer pushes until the next tick
timers.setImmediate(() => { this._pushBuffer(encoding, keep, done) })
return
}
}
}
done()
}
_flush (done) {
this._pushBuffer(this._chunkEncoding, 0, done)
}
// see Readable::push
_reencode (line, chunkEncoding) {
if (this.encoding && this.encoding !== chunkEncoding) return Buffer.from(line, chunkEncoding).toString(this.encoding)
else if (this.encoding) return line // this should be the most common case, i.e. we're using an encoded source stream
else return Buffer.from(line, chunkEncoding)
}
}
module.exports = createLineStream

250
node_modules/@seald-io/nedb/lib/cursor.js generated vendored Executable file
View File

@@ -0,0 +1,250 @@
const model = require('./model.js')
const { callbackify } = require('util')
/**
* Has a callback
* @callback Cursor~mapFn
* @param {document[]} res
* @return {*|Promise<*>}
*/
/**
* Manage access to data, be it to find, update or remove it.
*
* It extends `Promise` so that its methods (which return `this`) are chainable & awaitable.
* @extends Promise
*/
class Cursor {
/**
* Create a new cursor for this collection.
* @param {Datastore} db - The datastore this cursor is bound to
* @param {query} query - The query this cursor will operate on
* @param {Cursor~mapFn} [mapFn] - Handler to be executed after cursor has found the results and before the callback passed to find/findOne/update/remove
*/
constructor (db, query, mapFn) {
/**
* @protected
* @type {Datastore}
*/
this.db = db
/**
* @protected
* @type {query}
*/
this.query = query || {}
/**
* The handler to be executed after cursor has found the results.
* @type {Cursor~mapFn}
* @protected
*/
if (mapFn) this.mapFn = mapFn
/**
* @see Cursor#limit
* @type {undefined|number}
* @private
*/
this._limit = undefined
/**
* @see Cursor#skip
* @type {undefined|number}
* @private
*/
this._skip = undefined
/**
* @see Cursor#sort
* @type {undefined|Object.<string, number>}
* @private
*/
this._sort = undefined
/**
* @see Cursor#projection
* @type {undefined|Object.<string, number>}
* @private
*/
this._projection = undefined
}
/**
* Set a limit to the number of results for the given Cursor.
* @param {Number} limit
* @return {Cursor} the same instance of Cursor, (useful for chaining).
*/
limit (limit) {
this._limit = limit
return this
}
/**
* Skip a number of results for the given Cursor.
* @param {Number} skip
* @return {Cursor} the same instance of Cursor, (useful for chaining).
*/
skip (skip) {
this._skip = skip
return this
}
/**
* Sort results of the query for the given Cursor.
* @param {Object.<string, number>} sortQuery - sortQuery is { field: order }, field can use the dot-notation, order is 1 for ascending and -1 for descending
* @return {Cursor} the same instance of Cursor, (useful for chaining).
*/
sort (sortQuery) {
this._sort = sortQuery
return this
}
/**
* Add the use of a projection to the given Cursor.
* @param {Object.<string, number>} projection - MongoDB-style projection. {} means take all fields. Then it's { key1: 1, key2: 1 } to take only key1 and key2
* { key1: 0, key2: 0 } to omit only key1 and key2. Except _id, you can't mix takes and omits.
* @return {Cursor} the same instance of Cursor, (useful for chaining).
*/
projection (projection) {
this._projection = projection
return this
}
/**
* Apply the projection.
*
* This is an internal function. You should use {@link Cursor#execAsync} or {@link Cursor#exec}.
* @param {document[]} candidates
* @return {document[]}
* @private
*/
_project (candidates) {
const res = []
let action
if (this._projection === undefined || Object.keys(this._projection).length === 0) {
return candidates
}
const keepId = this._projection._id !== 0
const { _id, ...rest } = this._projection
this._projection = rest
// Check for consistency
const keys = Object.keys(this._projection)
keys.forEach(k => {
if (action !== undefined && this._projection[k] !== action) throw new Error('Can\'t both keep and omit fields except for _id')
action = this._projection[k]
})
// Do the actual projection
candidates.forEach(candidate => {
let toPush
if (action === 1) { // pick-type projection
toPush = { $set: {} }
keys.forEach(k => {
toPush.$set[k] = model.getDotValue(candidate, k)
if (toPush.$set[k] === undefined) delete toPush.$set[k]
})
toPush = model.modify({}, toPush)
} else { // omit-type projection
toPush = { $unset: {} }
keys.forEach(k => { toPush.$unset[k] = true })
toPush = model.modify(candidate, toPush)
}
if (keepId) toPush._id = candidate._id
else delete toPush._id
res.push(toPush)
})
return res
}
/**
* Get all matching elements
* Will return pointers to matched elements (shallow copies), returning full copies is the role of find or findOne
* This is an internal function, use execAsync which uses the executor
* @return {document[]|Promise<*>}
* @private
*/
async _execAsync () {
let res = []
let added = 0
let skipped = 0
const candidates = await this.db._getCandidatesAsync(this.query)
for (const candidate of candidates) {
if (model.match(candidate, this.query)) {
// If a sort is defined, wait for the results to be sorted before applying limit and skip
if (!this._sort) {
if (this._skip && this._skip > skipped) skipped += 1
else {
res.push(candidate)
added += 1
if (this._limit && this._limit <= added) break
}
} else res.push(candidate)
}
}
// Apply all sorts
if (this._sort) {
// Sorting
const criteria = Object.entries(this._sort).map(([key, direction]) => ({ key, direction }))
res.sort((a, b) => {
for (const criterion of criteria) {
const compare = criterion.direction * model.compareThings(model.getDotValue(a, criterion.key), model.getDotValue(b, criterion.key), this.db.compareStrings)
if (compare !== 0) return compare
}
return 0
})
// Applying limit and skip
const limit = this._limit || res.length
const skip = this._skip || 0
res = res.slice(skip, skip + limit)
}
// Apply projection
res = this._project(res)
if (this.mapFn) return this.mapFn(res)
return res
}
/**
* @callback Cursor~execCallback
* @param {Error} err
* @param {document[]|*} res If a mapFn was given to the Cursor, then the type of this parameter is the one returned by the mapFn.
*/
/**
* Callback version of {@link Cursor#exec}.
* @param {Cursor~execCallback} _callback
* @see Cursor#execAsync
*/
exec (_callback) {
callbackify(() => this.execAsync())(_callback)
}
/**
* Get all matching elements.
* Will return pointers to matched elements (shallow copies), returning full copies is the role of {@link Datastore#findAsync} or {@link Datastore#findOneAsync}.
* @return {Promise<document[]|*>}
* @async
*/
execAsync () {
return this.db.executor.pushAsync(() => this._execAsync())
}
then (onFulfilled, onRejected) {
return this.execAsync().then(onFulfilled, onRejected)
}
catch (onRejected) {
return this.execAsync().catch(onRejected)
}
finally (onFinally) {
return this.execAsync().finally(onFinally)
}
}
// Interface
module.exports = Cursor

26
node_modules/@seald-io/nedb/lib/customUtils.js generated vendored Executable file
View File

@@ -0,0 +1,26 @@
/**
* Utility functions that need to be reimplemented for each environment.
* This is the version for Node.js
* @module customUtilsNode
* @private
*/
const crypto = require('crypto')
/**
* Return a random alphanumerical string of length len
* There is a very small probability (less than 1/1,000,000) for the length to be less than len
* (il the base64 conversion yields too many pluses and slashes) but
* that's not an issue here
* The probability of a collision is extremely small (need 3*10^12 documents to have one chance in a million of a collision)
* See http://en.wikipedia.org/wiki/Birthday_problem
* @param {number} len
* @return {string}
* @alias module:customUtilsNode.uid
*/
const uid = len => crypto.randomBytes(Math.ceil(Math.max(8, len * 2)))
.toString('base64')
.replace(/[+/]/g, '')
.slice(0, len)
// Interface
module.exports.uid = uid

1137
node_modules/@seald-io/nedb/lib/datastore.js generated vendored Executable file

File diff suppressed because it is too large Load Diff

79
node_modules/@seald-io/nedb/lib/executor.js generated vendored Executable file
View File

@@ -0,0 +1,79 @@
const Waterfall = require('./waterfall')
/**
* Executes operations sequentially.
* Has an option for a buffer that can be triggered afterwards.
* @private
*/
class Executor {
/**
* Instantiates a new Executor.
*/
constructor () {
/**
* If this.ready is `false`, then every task pushed will be buffered until this.processBuffer is called.
* @type {boolean}
* @private
*/
this.ready = false
/**
* The main queue
* @type {Waterfall}
* @private
*/
this.queue = new Waterfall()
/**
* The buffer queue
* @type {Waterfall}
* @private
*/
this.buffer = null
/**
* Method to trigger the buffer processing.
*
* Do not be use directly, use `this.processBuffer` instead.
* @function
* @private
*/
this._triggerBuffer = null
this.resetBuffer()
}
/**
* If executor is ready, queue task (and process it immediately if executor was idle)
* If not, buffer task for later processing
* @param {AsyncFunction} task Function to execute
* @param {boolean} [forceQueuing = false] Optional (defaults to false) force executor to queue task even if it is not ready
* @return {Promise<*>}
* @async
* @see Executor#push
*/
pushAsync (task, forceQueuing = false) {
if (this.ready || forceQueuing) return this.queue.waterfall(task)()
else return this.buffer.waterfall(task)()
}
/**
* Queue all tasks in buffer (in the same order they came in)
* Automatically sets executor as ready
*/
processBuffer () {
this.ready = true
this._triggerBuffer()
this.queue.waterfall(() => this.buffer.guardian)
}
/**
* Removes all tasks queued up in the buffer
*/
resetBuffer () {
this.buffer = new Waterfall()
this.buffer.chain(new Promise(resolve => {
this._triggerBuffer = resolve
}))
if (this.ready) this._triggerBuffer()
}
}
// Interface
module.exports = Executor

333
node_modules/@seald-io/nedb/lib/indexes.js generated vendored Executable file
View File

@@ -0,0 +1,333 @@
const BinarySearchTree = require('@seald-io/binary-search-tree').AVLTree
const model = require('./model.js')
const { uniq, isDate } = require('./utils.js')
/**
* Two indexed pointers are equal if they point to the same place
* @param {*} a
* @param {*} b
* @return {boolean}
* @private
*/
const checkValueEquality = (a, b) => a === b
/**
* Type-aware projection
* @param {*} elt
* @return {string|*}
* @private
*/
const projectForUnique = elt => {
if (elt === null) return '$null'
if (typeof elt === 'string') return '$string' + elt
if (typeof elt === 'boolean') return '$boolean' + elt
if (typeof elt === 'number') return '$number' + elt
if (isDate(elt)) return '$date' + elt.getTime()
return elt // Arrays and objects, will check for pointer equality
}
/**
* Indexes on field names, with atomic operations and which can optionally enforce a unique constraint or allow indexed
* fields to be undefined
* @private
*/
class Index {
/**
* Create a new index
* All methods on an index guarantee that either the whole operation was successful and the index changed
* or the operation was unsuccessful and an error is thrown while the index is unchanged
* @param {object} options
* @param {string} options.fieldName On which field should the index apply, can use dot notation to index on sub fields, can use comma-separated notation to use compound indexes
* @param {boolean} [options.unique = false] Enforces a unique constraint
* @param {boolean} [options.sparse = false] Allows a sparse index (we can have documents for which fieldName is `undefined`)
*/
constructor (options) {
/**
* On which field the index applies to, can use dot notation to index on sub fields, can use comma-separated notation to use compound indexes.
* @type {string}
*/
this.fieldName = options.fieldName
if (typeof this.fieldName !== 'string') throw new Error('fieldName must be a string')
/**
* Internal property which is an Array representing the fieldName split with `,`, useful only for compound indexes.
* @type {string[]}
* @private
*/
this._fields = this.fieldName.split(',')
/**
* Defines if the index enforces a unique constraint for this index.
* @type {boolean}
*/
this.unique = options.unique || false
/**
* Defines if we can have documents for which fieldName is `undefined`
* @type {boolean}
*/
this.sparse = options.sparse || false
/**
* Options object given to the underlying BinarySearchTree.
* @type {{unique: boolean, checkValueEquality: (function(*, *): boolean), compareKeys: ((function(*, *, compareStrings): (number|number))|*)}}
*/
this.treeOptions = { unique: this.unique, compareKeys: model.compareThings, checkValueEquality }
/**
* Underlying BinarySearchTree for this index. Uses an AVLTree for optimization.
* @type {AVLTree}
*/
this.tree = new BinarySearchTree(this.treeOptions)
}
/**
* Reset an index
* @param {?document|?document[]} [newData] Data to initialize the index with. If an error is thrown during
* insertion, the index is not modified.
*/
reset (newData) {
this.tree = new BinarySearchTree(this.treeOptions)
if (newData) this.insert(newData)
}
/**
* Insert a new document in the index
* If an array is passed, we insert all its elements (if one insertion fails the index is not modified)
* O(log(n))
* @param {document|document[]} doc The document, or array of documents, to insert.
*/
insert (doc) {
let keys
let failingIndex
let error
if (Array.isArray(doc)) {
this.insertMultipleDocs(doc)
return
}
const key = model.getDotValues(doc, this._fields)
// We don't index documents that don't contain the field if the index is sparse
if ((key === undefined || (typeof key === 'object' && key !== null && Object.values(key).every(el => el === undefined))) && this.sparse) return
if (!Array.isArray(key)) this.tree.insert(key, doc)
else {
// If an insert fails due to a unique constraint, roll back all inserts before it
keys = uniq(key, projectForUnique)
for (let i = 0; i < keys.length; i += 1) {
try {
this.tree.insert(keys[i], doc)
} catch (e) {
error = e
failingIndex = i
break
}
}
if (error) {
for (let i = 0; i < failingIndex; i += 1) {
this.tree.delete(keys[i], doc)
}
throw error
}
}
}
/**
* Insert an array of documents in the index
* If a constraint is violated, the changes should be rolled back and an error thrown
* @param {document[]} docs Array of documents to insert.
* @private
*/
insertMultipleDocs (docs) {
let error
let failingIndex
for (let i = 0; i < docs.length; i += 1) {
try {
this.insert(docs[i])
} catch (e) {
error = e
failingIndex = i
break
}
}
if (error) {
for (let i = 0; i < failingIndex; i += 1) {
this.remove(docs[i])
}
throw error
}
}
/**
* Removes a document from the index.
* If an array is passed, we remove all its elements
* The remove operation is safe with regards to the 'unique' constraint
* O(log(n))
* @param {document[]|document} doc The document, or Array of documents, to remove.
*/
remove (doc) {
if (Array.isArray(doc)) {
doc.forEach(d => { this.remove(d) })
return
}
const key = model.getDotValues(doc, this._fields)
if (key === undefined && this.sparse) return
if (!Array.isArray(key)) {
this.tree.delete(key, doc)
} else {
uniq(key, projectForUnique).forEach(_key => {
this.tree.delete(_key, doc)
})
}
}
/**
* Update a document in the index
* If a constraint is violated, changes are rolled back and an error thrown
* Naive implementation, still in O(log(n))
* @param {document|Array.<{oldDoc: document, newDoc: document}>} oldDoc Document to update, or an `Array` of
* `{oldDoc, newDoc}` pairs.
* @param {document} [newDoc] Document to replace the oldDoc with. If the first argument is an `Array` of
* `{oldDoc, newDoc}` pairs, this second argument is ignored.
*/
update (oldDoc, newDoc) {
if (Array.isArray(oldDoc)) {
this.updateMultipleDocs(oldDoc)
return
}
this.remove(oldDoc)
try {
this.insert(newDoc)
} catch (e) {
this.insert(oldDoc)
throw e
}
}
/**
* Update multiple documents in the index
* If a constraint is violated, the changes need to be rolled back
* and an error thrown
* @param {Array.<{oldDoc: document, newDoc: document}>} pairs
*
* @private
*/
updateMultipleDocs (pairs) {
let failingIndex
let error
for (let i = 0; i < pairs.length; i += 1) {
this.remove(pairs[i].oldDoc)
}
for (let i = 0; i < pairs.length; i += 1) {
try {
this.insert(pairs[i].newDoc)
} catch (e) {
error = e
failingIndex = i
break
}
}
// If an error was raised, roll back changes in the inverse order
if (error) {
for (let i = 0; i < failingIndex; i += 1) {
this.remove(pairs[i].newDoc)
}
for (let i = 0; i < pairs.length; i += 1) {
this.insert(pairs[i].oldDoc)
}
throw error
}
}
/**
* Revert an update
* @param {document|Array.<{oldDoc: document, newDoc: document}>} oldDoc Document to revert to, or an `Array` of `{oldDoc, newDoc}` pairs.
* @param {document} [newDoc] Document to revert from. If the first argument is an Array of {oldDoc, newDoc}, this second argument is ignored.
*/
revertUpdate (oldDoc, newDoc) {
const revert = []
if (!Array.isArray(oldDoc)) this.update(newDoc, oldDoc)
else {
oldDoc.forEach(pair => {
revert.push({ oldDoc: pair.newDoc, newDoc: pair.oldDoc })
})
this.update(revert)
}
}
/**
* Get all documents in index whose key match value (if it is a Thing) or one of the elements of value (if it is an array of Things)
* @param {Array.<*>|*} value Value to match the key against
* @return {document[]}
*/
getMatching (value) {
if (!Array.isArray(value)) return this.tree.search(value)
else {
const _res = {}
const res = []
value.forEach(v => {
this.getMatching(v).forEach(doc => {
_res[doc._id] = doc
})
})
Object.keys(_res).forEach(_id => {
res.push(_res[_id])
})
return res
}
}
/**
* Get all documents in index whose key is between bounds are they are defined by query
* Documents are sorted by key
* @param {object} query An object with at least one matcher among $gt, $gte, $lt, $lte.
* @param {*} [query.$gt] Greater than matcher.
* @param {*} [query.$gte] Greater than or equal matcher.
* @param {*} [query.$lt] Lower than matcher.
* @param {*} [query.$lte] Lower than or equal matcher.
* @return {document[]}
*/
getBetweenBounds (query) {
return this.tree.betweenBounds(query)
}
/**
* Get all elements in the index
* @return {document[]}
*/
getAll () {
const res = []
this.tree.executeOnEveryNode(node => {
res.push(...node.data)
})
return res
}
}
// Interface
module.exports = Index

827
node_modules/@seald-io/nedb/lib/model.js generated vendored Executable file
View File

@@ -0,0 +1,827 @@
/**
* Handle models (i.e. docs)
* Serialization/deserialization
* Copying
* Querying, update
* @module model
* @private
*/
const { uniq, isDate, isRegExp } = require('./utils.js')
/**
* Check a key, throw an error if the key is non valid
* @param {string} k key
* @param {document} v value, needed to treat the Date edge case
* Non-treatable edge cases here: if part of the object if of the form { $$date: number } or { $$deleted: true }
* Its serialized-then-deserialized version it will transformed into a Date object
* But you really need to want it to trigger such behaviour, even when warned not to use '$' at the beginning of the field names...
* @private
*/
const checkKey = (k, v) => {
if (typeof k === 'number') k = k.toString()
if (
k[0] === '$' &&
!(k === '$$date' && typeof v === 'number') &&
!(k === '$$deleted' && v === true) &&
!(k === '$$indexCreated') &&
!(k === '$$indexRemoved')
) throw new Error('Field names cannot begin with the $ character')
if (k.indexOf('.') !== -1) throw new Error('Field names cannot contain a .')
}
/**
* Check a DB object and throw an error if it's not valid
* Works by applying the above checkKey function to all fields recursively
* @param {document|document[]} obj
* @alias module:model.checkObject
*/
const checkObject = obj => {
if (Array.isArray(obj)) {
obj.forEach(o => {
checkObject(o)
})
}
if (typeof obj === 'object' && obj !== null) {
for (const k in obj) {
if (Object.prototype.hasOwnProperty.call(obj, k)) {
checkKey(k, obj[k])
checkObject(obj[k])
}
}
}
}
/**
* Serialize an object to be persisted to a one-line string
* For serialization/deserialization, we use the native JSON parser and not eval or Function
* That gives us less freedom but data entered in the database may come from users
* so eval and the like are not safe
* Accepted primitive types: Number, String, Boolean, Date, null
* Accepted secondary types: Objects, Arrays
* @param {document} obj
* @return {string}
* @alias module:model.serialize
*/
const serialize = obj => {
return JSON.stringify(obj, function (k, v) {
checkKey(k, v)
if (v === undefined) return undefined
if (v === null) return null
// Hackish way of checking if object is Date (this way it works between execution contexts in node-webkit).
// We can't use value directly because for dates it is already string in this function (date.toJSON was already called), so we use this
if (typeof this[k].getTime === 'function') return { $$date: this[k].getTime() }
return v
})
}
/**
* From a one-line representation of an object generate by the serialize function
* Return the object itself
* @param {string} rawData
* @return {document}
* @alias module:model.deserialize
*/
const deserialize = rawData => JSON.parse(rawData, function (k, v) {
if (k === '$$date') return new Date(v)
if (
typeof v === 'string' ||
typeof v === 'number' ||
typeof v === 'boolean' ||
v === null
) return v
if (v && v.$$date) return v.$$date
return v
})
/**
* Deep copy a DB object
* The optional strictKeys flag (defaulting to false) indicates whether to copy everything or only fields
* where the keys are valid, i.e. don't begin with $ and don't contain a .
* @param {?document} obj
* @param {boolean} [strictKeys=false]
* @return {?document}
* @alias module:modelel:(.*)
*/
function deepCopy (obj, strictKeys) {
if (
typeof obj === 'boolean' ||
typeof obj === 'number' ||
typeof obj === 'string' ||
obj === null ||
(isDate(obj))
) return obj
if (Array.isArray(obj)) return obj.map(o => deepCopy(o, strictKeys))
if (typeof obj === 'object') {
const res = {}
for (const k in obj) {
if (
Object.prototype.hasOwnProperty.call(obj, k) &&
(!strictKeys || (k[0] !== '$' && k.indexOf('.') === -1))
) {
res[k] = deepCopy(obj[k], strictKeys)
}
}
return res
}
return undefined // For now everything else is undefined. We should probably throw an error instead
}
/**
* Tells if an object is a primitive type or a "real" object
* Arrays are considered primitive
* @param {*} obj
* @return {boolean}
* @alias module:modelel:(.*)
*/
const isPrimitiveType = obj => (
typeof obj === 'boolean' ||
typeof obj === 'number' ||
typeof obj === 'string' ||
obj === null ||
isDate(obj) ||
Array.isArray(obj)
)
/**
* Utility functions for comparing things
* Assumes type checking was already done (a and b already have the same type)
* compareNSB works for numbers, strings and booleans
* @param {number|string|boolean} a
* @param {number|string|boolean} b
* @return {number} 0 if a == b, 1 i a > b, -1 if a < b
* @private
*/
const compareNSB = (a, b) => {
if (a < b) return -1
if (a > b) return 1
return 0
}
/**
* Utility function for comparing array
* Assumes type checking was already done (a and b already have the same type)
* compareNSB works for numbers, strings and booleans
* @param {Array} a
* @param {Array} b
* @return {number} 0 if arrays have the same length and all elements equal one another. Else either 1 or -1.
* @private
*/
const compareArrays = (a, b) => {
const minLength = Math.min(a.length, b.length)
for (let i = 0; i < minLength; i += 1) {
const comp = compareThings(a[i], b[i])
if (comp !== 0) return comp
}
// Common section was identical, longest one wins
return compareNSB(a.length, b.length)
}
/**
* Compare { things U undefined }
* Things are defined as any native types (string, number, boolean, null, date) and objects
* We need to compare with undefined as it will be used in indexes
* In the case of objects and arrays, we deep-compare
* If two objects dont have the same type, the (arbitrary) type hierarchy is: undefined, null, number, strings, boolean, dates, arrays, objects
* Return -1 if a < b, 1 if a > b and 0 if a = b (note that equality here is NOT the same as defined in areThingsEqual!)
* @param {*} a
* @param {*} b
* @param {compareStrings} [_compareStrings] String comparing function, returning -1, 0 or 1, overriding default string comparison (useful for languages with accented letters)
* @return {number}
* @alias module:model.compareThings
*/
const compareThings = (a, b, _compareStrings) => {
const compareStrings = _compareStrings || compareNSB
// undefined
if (a === undefined) return b === undefined ? 0 : -1
if (b === undefined) return 1 // no need to test if a === undefined
// null
if (a === null) return b === null ? 0 : -1
if (b === null) return 1 // no need to test if a === null
// Numbers
if (typeof a === 'number') return typeof b === 'number' ? compareNSB(a, b) : -1
if (typeof b === 'number') return typeof a === 'number' ? compareNSB(a, b) : 1
// Strings
if (typeof a === 'string') return typeof b === 'string' ? compareStrings(a, b) : -1
if (typeof b === 'string') return typeof a === 'string' ? compareStrings(a, b) : 1
// Booleans
if (typeof a === 'boolean') return typeof b === 'boolean' ? compareNSB(a, b) : -1
if (typeof b === 'boolean') return typeof a === 'boolean' ? compareNSB(a, b) : 1
// Dates
if (isDate(a)) return isDate(b) ? compareNSB(a.getTime(), b.getTime()) : -1
if (isDate(b)) return isDate(a) ? compareNSB(a.getTime(), b.getTime()) : 1
// Arrays (first element is most significant and so on)
if (Array.isArray(a)) return Array.isArray(b) ? compareArrays(a, b) : -1
if (Array.isArray(b)) return Array.isArray(a) ? compareArrays(a, b) : 1
// Objects
const aKeys = Object.keys(a).sort()
const bKeys = Object.keys(b).sort()
for (let i = 0; i < Math.min(aKeys.length, bKeys.length); i += 1) {
const comp = compareThings(a[aKeys[i]], b[bKeys[i]])
if (comp !== 0) return comp
}
return compareNSB(aKeys.length, bKeys.length)
}
// ==============================================================
// Updating documents
// ==============================================================
/**
* @callback modifierFunction
* The signature of modifier functions is as follows
* Their structure is always the same: recursively follow the dot notation while creating
* the nested documents if needed, then apply the "last step modifier"
* @param {Object} obj The model to modify
* @param {String} field Can contain dots, in that case that means we will set a subfield recursively
* @param {document} value
*/
/**
* Create the complete modifier function
* @param {modifierFunction} lastStepModifierFunction a lastStepModifierFunction
* @param {boolean} [unset = false] Bad looking specific fix, needs to be generalized modifiers that behave like $unset are implemented
* @return {modifierFunction}
* @private
*/
const createModifierFunction = (lastStepModifierFunction, unset = false) => (obj, field, value) => {
const func = (obj, field, value) => {
const fieldParts = typeof field === 'string' ? field.split('.') : field
if (fieldParts.length === 1) lastStepModifierFunction(obj, field, value)
else {
if (obj[fieldParts[0]] === undefined) {
if (unset) return
obj[fieldParts[0]] = {}
}
func(obj[fieldParts[0]], fieldParts.slice(1), value)
}
}
return func(obj, field, value)
}
const $addToSetPartial = (obj, field, value) => {
// Create the array if it doesn't exist
if (!Object.prototype.hasOwnProperty.call(obj, field)) { obj[field] = [] }
if (!Array.isArray(obj[field])) throw new Error('Can\'t $addToSet an element on non-array values')
if (value !== null && typeof value === 'object' && value.$each) {
if (Object.keys(value).length > 1) throw new Error('Can\'t use another field in conjunction with $each')
if (!Array.isArray(value.$each)) throw new Error('$each requires an array value')
value.$each.forEach(v => {
$addToSetPartial(obj, field, v)
})
} else {
let addToSet = true
obj[field].forEach(v => {
if (compareThings(v, value) === 0) addToSet = false
})
if (addToSet) obj[field].push(value)
}
}
/**
* @enum {modifierFunction}
*/
const modifierFunctions = {
/**
* Set a field to a new value
*/
$set: createModifierFunction((obj, field, value) => {
obj[field] = value
}),
/**
* Unset a field
*/
$unset: createModifierFunction((obj, field, value) => {
delete obj[field]
}, true),
/**
* Updates the value of the field, only if specified field is smaller than the current value of the field
*/
$min: createModifierFunction((obj, field, value) => {
if (typeof obj[field] === 'undefined') obj[field] = value
else if (value < obj[field]) obj[field] = value
}),
/**
* Updates the value of the field, only if specified field is greater than the current value of the field
*/
$max: createModifierFunction((obj, field, value) => {
if (typeof obj[field] === 'undefined') obj[field] = value
else if (value > obj[field]) obj[field] = value
}),
/**
* Increment a numeric field's value
*/
$inc: createModifierFunction((obj, field, value) => {
if (typeof value !== 'number') throw new Error(`${value} must be a number`)
if (typeof obj[field] !== 'number') {
if (!Object.prototype.hasOwnProperty.call(obj, field)) obj[field] = value
else throw new Error('Don\'t use the $inc modifier on non-number fields')
} else obj[field] += value
}),
/**
* Removes all instances of a value from an existing array
*/
$pull: createModifierFunction((obj, field, value) => {
if (!Array.isArray(obj[field])) throw new Error('Can\'t $pull an element from non-array values')
const arr = obj[field]
for (let i = arr.length - 1; i >= 0; i -= 1) {
if (match(arr[i], value)) arr.splice(i, 1)
}
}),
/**
* Remove the first or last element of an array
*/
$pop: createModifierFunction((obj, field, value) => {
if (!Array.isArray(obj[field])) throw new Error('Can\'t $pop an element from non-array values')
if (typeof value !== 'number') throw new Error(`${value} isn't an integer, can't use it with $pop`)
if (value === 0) return
if (value > 0) obj[field] = obj[field].slice(0, obj[field].length - 1)
else obj[field] = obj[field].slice(1)
}),
/**
* Add an element to an array field only if it is not already in it
* No modification if the element is already in the array
* Note that it doesn't check whether the original array contains duplicates
*/
$addToSet: createModifierFunction($addToSetPartial),
/**
* Push an element to the end of an array field
* Optional modifier $each instead of value to push several values
* Optional modifier $slice to slice the resulting array, see https://docs.mongodb.org/manual/reference/operator/update/slice/
* Difference with MongoDB: if $slice is specified and not $each, we act as if value is an empty array
*/
$push: createModifierFunction((obj, field, value) => {
// Create the array if it doesn't exist
if (!Object.prototype.hasOwnProperty.call(obj, field)) obj[field] = []
if (!Array.isArray(obj[field])) throw new Error('Can\'t $push an element on non-array values')
if (
value !== null &&
typeof value === 'object' &&
value.$slice &&
value.$each === undefined
) value.$each = []
if (value !== null && typeof value === 'object' && value.$each) {
if (
Object.keys(value).length >= 3 ||
(Object.keys(value).length === 2 && value.$slice === undefined)
) throw new Error('Can only use $slice in cunjunction with $each when $push to array')
if (!Array.isArray(value.$each)) throw new Error('$each requires an array value')
value.$each.forEach(v => {
obj[field].push(v)
})
if (value.$slice === undefined || typeof value.$slice !== 'number') return
if (value.$slice === 0) obj[field] = []
else {
let start
let end
const n = obj[field].length
if (value.$slice < 0) {
start = Math.max(0, n + value.$slice)
end = n
} else if (value.$slice > 0) {
start = 0
end = Math.min(n, value.$slice)
}
obj[field] = obj[field].slice(start, end)
}
} else {
obj[field].push(value)
}
})
}
/**
* Modify a DB object according to an update query
* @param {document} obj
* @param {query} updateQuery
* @return {document}
* @alias module:model.modify
*/
const modify = (obj, updateQuery) => {
const keys = Object.keys(updateQuery)
const firstChars = keys.map(item => item[0])
const dollarFirstChars = firstChars.filter(c => c === '$')
let newDoc
let modifiers
if (keys.indexOf('_id') !== -1 && updateQuery._id !== obj._id) throw new Error('You cannot change a document\'s _id')
if (dollarFirstChars.length !== 0 && dollarFirstChars.length !== firstChars.length) throw new Error('You cannot mix modifiers and normal fields')
if (dollarFirstChars.length === 0) {
// Simply replace the object with the update query contents
newDoc = deepCopy(updateQuery)
newDoc._id = obj._id
} else {
// Apply modifiers
modifiers = uniq(keys)
newDoc = deepCopy(obj)
modifiers.forEach(m => {
if (!modifierFunctions[m]) throw new Error(`Unknown modifier ${m}`)
// Can't rely on Object.keys throwing on non objects since ES6
// Not 100% satisfying as non objects can be interpreted as objects but no false negatives so we can live with it
if (typeof updateQuery[m] !== 'object') throw new Error(`Modifier ${m}'s argument must be an object`)
const keys = Object.keys(updateQuery[m])
keys.forEach(k => {
modifierFunctions[m](newDoc, k, updateQuery[m][k])
})
})
}
// Check result is valid and return it
checkObject(newDoc)
if (obj._id !== newDoc._id) throw new Error('You can\'t change a document\'s _id')
return newDoc
}
// ==============================================================
// Finding documents
// ==============================================================
/**
* Get a value from object with dot notation
* @param {object} obj
* @param {string} field
* @return {*}
* @alias module:model.getDotValue
*/
const getDotValue = (obj, field) => {
const fieldParts = typeof field === 'string' ? field.split('.') : field
if (!obj) return undefined // field cannot be empty so that means we should return undefined so that nothing can match
if (fieldParts.length === 0) return obj
if (fieldParts.length === 1) return obj[fieldParts[0]]
if (Array.isArray(obj[fieldParts[0]])) {
// If the next field is an integer, return only this item of the array
const i = parseInt(fieldParts[1], 10)
if (typeof i === 'number' && !isNaN(i)) return getDotValue(obj[fieldParts[0]][i], fieldParts.slice(2))
// Return the array of values
return obj[fieldParts[0]].map(el => getDotValue(el, fieldParts.slice(1)))
} else return getDotValue(obj[fieldParts[0]], fieldParts.slice(1))
}
/**
* Get dot values for either a bunch of fields or just one.
*/
const getDotValues = (obj, fields) => {
if (!Array.isArray(fields)) throw new Error('fields must be an Array')
if (fields.length > 1) {
const key = {}
for (const field of fields) {
key[field] = getDotValue(obj, field)
}
return key
} else return getDotValue(obj, fields[0])
}
/**
* Check whether 'things' are equal
* Things are defined as any native types (string, number, boolean, null, date) and objects
* In the case of object, we check deep equality
* Returns true if they are, false otherwise
* @param {*} a
* @param {*} a
* @return {boolean}
* @alias module:model.areThingsEqual
*/
const areThingsEqual = (a, b) => {
// Strings, booleans, numbers, null
if (
a === null ||
typeof a === 'string' ||
typeof a === 'boolean' ||
typeof a === 'number' ||
b === null ||
typeof b === 'string' ||
typeof b === 'boolean' ||
typeof b === 'number'
) return a === b
// Dates
if (isDate(a) || isDate(b)) return isDate(a) && isDate(b) && a.getTime() === b.getTime()
// Arrays (no match since arrays are used as a $in)
// undefined (no match since they mean field doesn't exist and can't be serialized)
if (
(!(Array.isArray(a) && Array.isArray(b)) && (Array.isArray(a) || Array.isArray(b))) ||
a === undefined || b === undefined
) return false
// General objects (check for deep equality)
// a and b should be objects at this point
let aKeys
let bKeys
try {
aKeys = Object.keys(a)
bKeys = Object.keys(b)
} catch (e) {
return false
}
if (aKeys.length !== bKeys.length) return false
for (const el of aKeys) {
if (bKeys.indexOf(el) === -1) return false
if (!areThingsEqual(a[el], b[el])) return false
}
return true
}
/**
* Check that two values are comparable
* @param {*} a
* @param {*} a
* @return {boolean}
* @private
*/
const areComparable = (a, b) => {
if (
typeof a !== 'string' &&
typeof a !== 'number' &&
!isDate(a) &&
typeof b !== 'string' &&
typeof b !== 'number' &&
!isDate(b)
) return false
if (typeof a !== typeof b) return false
return true
}
/**
* @callback comparisonOperator
* Arithmetic and comparison operators
* @param {*} a Value in the object
* @param {*} b Value in the query
* @return {boolean}
*/
/**
* @enum {comparisonOperator}
*/
const comparisonFunctions = {
/** Lower than */
$lt: (a, b) => areComparable(a, b) && a < b,
/** Lower than or equals */
$lte: (a, b) => areComparable(a, b) && a <= b,
/** Greater than */
$gt: (a, b) => areComparable(a, b) && a > b,
/** Greater than or equals */
$gte: (a, b) => areComparable(a, b) && a >= b,
/** Does not equal */
$ne: (a, b) => a === undefined || !areThingsEqual(a, b),
/** Is in Array */
$in: (a, b) => {
if (!Array.isArray(b)) throw new Error('$in operator called with a non-array')
for (const el of b) {
if (areThingsEqual(a, el)) return true
}
return false
},
/** Is not in Array */
$nin: (a, b) => {
if (!Array.isArray(b)) throw new Error('$nin operator called with a non-array')
return !comparisonFunctions.$in(a, b)
},
/** Matches Regexp */
$regex: (a, b) => {
if (!isRegExp(b)) throw new Error('$regex operator called with non regular expression')
if (typeof a !== 'string') return false
else return b.test(a)
},
/** Returns true if field exists */
$exists: (a, b) => {
// This will be true for all values of stat except false, null, undefined and 0
// That's strange behaviour (we should only use true/false) but that's the way Mongo does it...
if (b || b === '') b = true
else b = false
if (a === undefined) return !b
else return b
},
/** Specific to Arrays, returns true if a length equals b */
$size: (a, b) => {
if (!Array.isArray(a)) return false
if (b % 1 !== 0) throw new Error('$size operator called without an integer')
return a.length === b
},
/** Specific to Arrays, returns true if some elements of a match the query b */
$elemMatch: (a, b) => {
if (!Array.isArray(a)) return false
return a.some(el => match(el, b))
}
}
const arrayComparisonFunctions = { $size: true, $elemMatch: true }
/**
* @enum
*/
const logicalOperators = {
/**
* Match any of the subqueries
* @param {document} obj
* @param {query[]} query
* @return {boolean}
*/
$or: (obj, query) => {
if (!Array.isArray(query)) throw new Error('$or operator used without an array')
for (let i = 0; i < query.length; i += 1) {
if (match(obj, query[i])) return true
}
return false
},
/**
* Match all of the subqueries
* @param {document} obj
* @param {query[]} query
* @return {boolean}
*/
$and: (obj, query) => {
if (!Array.isArray(query)) throw new Error('$and operator used without an array')
for (let i = 0; i < query.length; i += 1) {
if (!match(obj, query[i])) return false
}
return true
},
/**
* Inverted match of the query
* @param {document} obj
* @param {query} query
* @return {boolean}
*/
$not: (obj, query) => !match(obj, query),
/**
* @callback whereCallback
* @param {document} obj
* @return {boolean}
*/
/**
* Use a function to match
* @param {document} obj
* @param {whereCallback} fn
* @return {boolean}
*/
$where: (obj, fn) => {
if (typeof fn !== 'function') throw new Error('$where operator used without a function')
const result = fn.call(obj)
if (typeof result !== 'boolean') throw new Error('$where function must return boolean')
return result
}
}
/**
* Tell if a given document matches a query
* @param {document} obj Document to check
* @param {query} query
* @return {boolean}
* @alias module:model.match
*/
const match = (obj, query) => {
// Primitive query against a primitive type
// This is a bit of a hack since we construct an object with an arbitrary key only to dereference it later
// But I don't have time for a cleaner implementation now
if (isPrimitiveType(obj) || isPrimitiveType(query)) return matchQueryPart({ needAKey: obj }, 'needAKey', query)
// Normal query
for (const queryKey in query) {
if (Object.prototype.hasOwnProperty.call(query, queryKey)) {
const queryValue = query[queryKey]
if (queryKey[0] === '$') {
if (!logicalOperators[queryKey]) throw new Error(`Unknown logical operator ${queryKey}`)
if (!logicalOperators[queryKey](obj, queryValue)) return false
} else if (!matchQueryPart(obj, queryKey, queryValue)) return false
}
}
return true
}
/**
* Match an object against a specific { key: value } part of a query
* if the treatObjAsValue flag is set, don't try to match every part separately, but the array as a whole
* @param {object} obj
* @param {string} queryKey
* @param {*} queryValue
* @param {boolean} [treatObjAsValue=false]
* @return {boolean}
* @private
*/
function matchQueryPart (obj, queryKey, queryValue, treatObjAsValue) {
const objValue = getDotValue(obj, queryKey)
// Check if the value is an array if we don't force a treatment as value
if (Array.isArray(objValue) && !treatObjAsValue) {
// If the queryValue is an array, try to perform an exact match
if (Array.isArray(queryValue)) return matchQueryPart(obj, queryKey, queryValue, true)
// Check if we are using an array-specific comparison function
if (queryValue !== null && typeof queryValue === 'object' && !isRegExp(queryValue)) {
for (const key in queryValue) {
if (Object.prototype.hasOwnProperty.call(queryValue, key) && arrayComparisonFunctions[key]) { return matchQueryPart(obj, queryKey, queryValue, true) }
}
}
// If not, treat it as an array of { obj, query } where there needs to be at least one match
for (const el of objValue) {
if (matchQueryPart({ k: el }, 'k', queryValue)) return true // k here could be any string
}
return false
}
// queryValue is an actual object. Determine whether it contains comparison operators
// or only normal fields. Mixed objects are not allowed
if (queryValue !== null && typeof queryValue === 'object' && !isRegExp(queryValue) && !Array.isArray(queryValue)) {
const keys = Object.keys(queryValue)
const firstChars = keys.map(item => item[0])
const dollarFirstChars = firstChars.filter(c => c === '$')
if (dollarFirstChars.length !== 0 && dollarFirstChars.length !== firstChars.length) throw new Error('You cannot mix operators and normal fields')
// queryValue is an object of this form: { $comparisonOperator1: value1, ... }
if (dollarFirstChars.length > 0) {
for (const key of keys) {
if (!comparisonFunctions[key]) throw new Error(`Unknown comparison function ${key}`)
if (!comparisonFunctions[key](objValue, queryValue[key])) return false
}
return true
}
}
// Using regular expressions with basic querying
if (isRegExp(queryValue)) return comparisonFunctions.$regex(objValue, queryValue)
// queryValue is either a native value or a normal object
// Basic matching is possible
return areThingsEqual(objValue, queryValue)
}
// Interface
module.exports.serialize = serialize
module.exports.deserialize = deserialize
module.exports.deepCopy = deepCopy
module.exports.checkObject = checkObject
module.exports.isPrimitiveType = isPrimitiveType
module.exports.modify = modify
module.exports.getDotValue = getDotValue
module.exports.getDotValues = getDotValues
module.exports.match = match
module.exports.areThingsEqual = areThingsEqual
module.exports.compareThings = compareThings

380
node_modules/@seald-io/nedb/lib/persistence.js generated vendored Executable file
View File

@@ -0,0 +1,380 @@
const { deprecate } = require('util')
const byline = require('./byline')
const customUtils = require('./customUtils.js')
const Index = require('./indexes.js')
const model = require('./model.js')
const storage = require('./storage.js')
const DEFAULT_DIR_MODE = 0o755
const DEFAULT_FILE_MODE = 0o644
/**
* Under the hood, NeDB's persistence uses an append-only format, meaning that all
* updates and deletes actually result in lines added at the end of the datafile,
* for performance reasons. The database is automatically compacted (i.e. put back
* in the one-line-per-document format) every time you load each database within
* your application.
*
* Persistence handles the compaction exposed in the Datastore {@link Datastore#compactDatafileAsync},
* {@link Datastore#setAutocompactionInterval}.
*
* Since version 3.0.0, using {@link Datastore.persistence} methods manually is deprecated.
*
* Compaction takes a bit of time (not too much: 130ms for 50k
* records on a typical development machine) and no other operation can happen when
* it does, so most projects actually don't need to use it.
*
* Compaction will also immediately remove any documents whose data line has become
* corrupted, assuming that the total percentage of all corrupted documents in that
* database still falls below the specified `corruptAlertThreshold` option's value.
*
* Durability works similarly to major databases: compaction forces the OS to
* physically flush data to disk, while appends to the data file do not (the OS is
* responsible for flushing the data). That guarantees that a server crash can
* never cause complete data loss, while preserving performance. The worst that can
* happen is a crash between two syncs, causing a loss of all data between the two
* syncs. Usually syncs are 30 seconds appart so that's at most 30 seconds of
* data. [This post by Antirez on Redis persistence](http://oldblog.antirez.com/post/redis-persistence-demystified.html)
* explains this in more details, NeDB being very close to Redis AOF persistence
* with `appendfsync` option set to `no`.
*/
class Persistence {
/**
* Create a new Persistence object for database options.db
* @param {Datastore} options.db
* @param {Number} [options.corruptAlertThreshold] Optional, threshold after which an alert is thrown if too much data is corrupt
* @param {serializationHook} [options.beforeDeserialization] Hook you can use to transform data after it was serialized and before it is written to disk.
* @param {serializationHook} [options.afterSerialization] Inverse of `afterSerialization`.
* @param {object} [options.modes] Modes to use for FS permissions. Will not work on Windows.
* @param {number} [options.modes.fileMode=0o644] Mode to use for files.
* @param {number} [options.modes.dirMode=0o755] Mode to use for directories.
* @param {boolean} [options.testSerializationHooks=true] Whether to test the serialization hooks or not, might be CPU-intensive
*/
constructor (options) {
this.db = options.db
this.inMemoryOnly = this.db.inMemoryOnly
this.filename = this.db.filename
this.corruptAlertThreshold = options.corruptAlertThreshold !== undefined ? options.corruptAlertThreshold : 0.1
this.modes = options.modes !== undefined ? options.modes : { fileMode: DEFAULT_FILE_MODE, dirMode: DEFAULT_DIR_MODE }
if (this.modes.fileMode === undefined) this.modes.fileMode = DEFAULT_FILE_MODE
if (this.modes.dirMode === undefined) this.modes.dirMode = DEFAULT_DIR_MODE
if (
!this.inMemoryOnly &&
this.filename &&
this.filename.charAt(this.filename.length - 1) === '~'
) throw new Error('The datafile name can\'t end with a ~, which is reserved for crash safe backup files')
// After serialization and before deserialization hooks with some basic sanity checks
if (
options.afterSerialization &&
!options.beforeDeserialization
) throw new Error('Serialization hook defined but deserialization hook undefined, cautiously refusing to start NeDB to prevent dataloss')
if (
!options.afterSerialization &&
options.beforeDeserialization
) throw new Error('Serialization hook undefined but deserialization hook defined, cautiously refusing to start NeDB to prevent dataloss')
this.afterSerialization = options.afterSerialization || (s => s)
this.beforeDeserialization = options.beforeDeserialization || (s => s)
if (options.testSerializationHooks === undefined || options.testSerializationHooks) {
for (let i = 1; i < 30; i += 1) {
for (let j = 0; j < 10; j += 1) {
const randomString = customUtils.uid(i)
if (this.beforeDeserialization(this.afterSerialization(randomString)) !== randomString) {
throw new Error('beforeDeserialization is not the reverse of afterSerialization, cautiously refusing to start NeDB to prevent dataloss')
}
}
}
}
}
/**
* Internal version without using the {@link Datastore#executor} of {@link Datastore#compactDatafileAsync}, use it instead.
* @return {Promise<void>}
* @private
*/
async persistCachedDatabaseAsync () {
const lines = []
if (this.inMemoryOnly) return
this.db.getAllData().forEach(doc => {
lines.push(this.afterSerialization(model.serialize(doc)))
})
Object.keys(this.db.indexes).forEach(fieldName => {
if (fieldName !== '_id') { // The special _id index is managed by datastore.js, the others need to be persisted
lines.push(this.afterSerialization(model.serialize({
$$indexCreated: {
fieldName: this.db.indexes[fieldName].fieldName,
unique: this.db.indexes[fieldName].unique,
sparse: this.db.indexes[fieldName].sparse
}
})))
}
})
await storage.crashSafeWriteFileLinesAsync(this.filename, lines, this.modes)
this.db.emit('compaction.done')
}
/**
* @see Datastore#compactDatafile
* @deprecated
* @param {NoParamCallback} [callback = () => {}]
* @see Persistence#compactDatafileAsync
*/
compactDatafile (callback) {
deprecate(_callback => this.db.compactDatafile(_callback), '@seald-io/nedb: calling Datastore#persistence#compactDatafile is deprecated, please use Datastore#compactDatafile, it will be removed in the next major version.')(callback)
}
/**
* @see Datastore#setAutocompactionInterval
* @deprecated
*/
setAutocompactionInterval (interval) {
deprecate(_interval => this.db.setAutocompactionInterval(_interval), '@seald-io/nedb: calling Datastore#persistence#setAutocompactionInterval is deprecated, please use Datastore#setAutocompactionInterval, it will be removed in the next major version.')(interval)
}
/**
* @see Datastore#stopAutocompaction
* @deprecated
*/
stopAutocompaction () {
deprecate(() => this.db.stopAutocompaction(), '@seald-io/nedb: calling Datastore#persistence#stopAutocompaction is deprecated, please use Datastore#stopAutocompaction, it will be removed in the next major version.')()
}
/**
* Persist new state for the given newDocs (can be insertion, update or removal)
* Use an append-only format
*
* Do not use directly, it should only used by a {@link Datastore} instance.
* @param {document[]} newDocs Can be empty if no doc was updated/removed
* @return {Promise}
* @private
*/
async persistNewStateAsync (newDocs) {
let toPersist = ''
// In-memory only datastore
if (this.inMemoryOnly) return
newDocs.forEach(doc => {
toPersist += this.afterSerialization(model.serialize(doc)) + '\n'
})
if (toPersist.length === 0) return
await storage.appendFileAsync(this.filename, toPersist, { encoding: 'utf8', mode: this.modes.fileMode })
}
/**
* @typedef rawIndex
* @property {string} fieldName
* @property {boolean} [unique]
* @property {boolean} [sparse]
*/
/**
* From a database's raw data, return the corresponding machine understandable collection.
*
* Do not use directly, it should only used by a {@link Datastore} instance.
* @param {string} rawData database file
* @return {{data: document[], indexes: Object.<string, rawIndex>}}
* @private
*/
treatRawData (rawData) {
const data = rawData.split('\n')
const dataById = {}
const indexes = {}
let dataLength = data.length
// Last line of every data file is usually blank so not really corrupt
let corruptItems = 0
for (const datum of data) {
if (datum === '') { dataLength--; continue }
try {
const doc = model.deserialize(this.beforeDeserialization(datum))
if (doc._id) {
if (doc.$$deleted === true) delete dataById[doc._id]
else dataById[doc._id] = doc
} else if (doc.$$indexCreated && doc.$$indexCreated.fieldName != null) indexes[doc.$$indexCreated.fieldName] = doc.$$indexCreated
else if (typeof doc.$$indexRemoved === 'string') delete indexes[doc.$$indexRemoved]
} catch (e) {
corruptItems += 1
}
}
// A bit lenient on corruption
if (dataLength > 0) {
const corruptionRate = corruptItems / dataLength
if (corruptionRate > this.corruptAlertThreshold) {
const error = new Error(`${Math.floor(100 * corruptionRate)}% of the data file is corrupt, more than given corruptAlertThreshold (${Math.floor(100 * this.corruptAlertThreshold)}%). Cautiously refusing to start NeDB to prevent dataloss.`)
error.corruptionRate = corruptionRate
error.corruptItems = corruptItems
error.dataLength = dataLength
throw error
}
}
const tdata = Object.values(dataById)
return { data: tdata, indexes }
}
/**
* From a database's raw data stream, return the corresponding machine understandable collection
* Is only used by a {@link Datastore} instance.
*
* Is only used in the Node.js version, since [React-Native]{@link module:storageReactNative} &
* [browser]{@link module:storageBrowser} storage modules don't provide an equivalent of
* {@link module:storage.readFileStream}.
*
* Do not use directly, it should only used by a {@link Datastore} instance.
* @param {Readable} rawStream
* @return {Promise<{data: document[], indexes: Object.<string, rawIndex>}>}
* @async
* @private
*/
treatRawStreamAsync (rawStream) {
return new Promise((resolve, reject) => {
const dataById = {}
const indexes = {}
let corruptItems = 0
const lineStream = byline(rawStream)
let dataLength = 0
lineStream.on('data', (line) => {
if (line === '') return
try {
const doc = model.deserialize(this.beforeDeserialization(line))
if (doc._id) {
if (doc.$$deleted === true) delete dataById[doc._id]
else dataById[doc._id] = doc
} else if (doc.$$indexCreated && doc.$$indexCreated.fieldName != null) indexes[doc.$$indexCreated.fieldName] = doc.$$indexCreated
else if (typeof doc.$$indexRemoved === 'string') delete indexes[doc.$$indexRemoved]
} catch (e) {
corruptItems += 1
}
dataLength++
})
lineStream.on('end', () => {
// A bit lenient on corruption
if (dataLength > 0) {
const corruptionRate = corruptItems / dataLength
if (corruptionRate > this.corruptAlertThreshold) {
const error = new Error(`${Math.floor(100 * corruptionRate)}% of the data file is corrupt, more than given corruptAlertThreshold (${Math.floor(100 * this.corruptAlertThreshold)}%). Cautiously refusing to start NeDB to prevent dataloss.`)
error.corruptionRate = corruptionRate
error.corruptItems = corruptItems
error.dataLength = dataLength
reject(error, null)
return
}
}
const data = Object.values(dataById)
resolve({ data, indexes })
})
lineStream.on('error', function (err) {
reject(err, null)
})
})
}
/**
* Load the database
* 1) Create all indexes
* 2) Insert all data
* 3) Compact the database
*
* This means pulling data out of the data file or creating it if it doesn't exist
* Also, all data is persisted right away, which has the effect of compacting the database file
* This operation is very quick at startup for a big collection (60ms for ~10k docs)
*
* Do not use directly as it does not use the [Executor]{@link Datastore.executor}, use {@link Datastore#loadDatabaseAsync} instead.
* @return {Promise<void>}
* @private
*/
async loadDatabaseAsync () {
this.db._resetIndexes()
// In-memory only datastore
if (this.inMemoryOnly) return
await Persistence.ensureParentDirectoryExistsAsync(this.filename, this.modes.dirMode)
await storage.ensureDatafileIntegrityAsync(this.filename, this.modes.fileMode)
let treatedData
if (storage.readFileStream) {
// Server side
const fileStream = storage.readFileStream(this.filename, { encoding: 'utf8', mode: this.modes.fileMode })
treatedData = await this.treatRawStreamAsync(fileStream)
} else {
// Browser
const rawData = await storage.readFileAsync(this.filename, { encoding: 'utf8', mode: this.modes.fileMode })
treatedData = this.treatRawData(rawData)
}
// Recreate all indexes in the datafile
Object.keys(treatedData.indexes).forEach(key => {
this.db.indexes[key] = new Index(treatedData.indexes[key])
})
// Fill cached database (i.e. all indexes) with data
try {
this.db._resetIndexes(treatedData.data)
} catch (e) {
this.db._resetIndexes() // Rollback any index which didn't fail
throw e
}
await this.db.persistence.persistCachedDatabaseAsync()
this.db.executor.processBuffer()
}
/**
* See {@link Datastore#dropDatabaseAsync}. This function uses {@link Datastore#executor} internally. Decorating this
* function with an {@link Executor#pushAsync} will result in a deadlock.
* @return {Promise<void>}
* @private
* @see Datastore#dropDatabaseAsync
*/
async dropDatabaseAsync () {
this.db.stopAutocompaction() // stop autocompaction
this.db.executor.ready = false // prevent queuing new tasks
this.db.executor.resetBuffer() // remove pending buffered tasks
await this.db.executor.queue.guardian // wait for the ongoing tasks to end
// remove indexes (which means remove data from memory)
this.db.indexes = {}
// add back _id index, otherwise it will fail
this.db.indexes._id = new Index({ fieldName: '_id', unique: true })
// reset TTL on indexes
this.db.ttlIndexes = {}
// remove datastore file
if (!this.db.inMemoryOnly) {
await this.db.executor.pushAsync(async () => {
if (await storage.existsAsync(this.filename)) await storage.unlinkAsync(this.filename)
}, true)
}
}
/**
* Check if a directory stat and create it on the fly if it is not the case.
* @param {string} dir
* @param {number} [mode=0o777]
* @return {Promise<void>}
* @private
*/
static async ensureParentDirectoryExistsAsync (dir, mode = DEFAULT_DIR_MODE) {
return storage.ensureParentDirectoryExistsAsync(dir, mode)
}
}
// Interface
module.exports = Persistence

317
node_modules/@seald-io/nedb/lib/storage.js generated vendored Executable file
View File

@@ -0,0 +1,317 @@
/**
* Way data is stored for this database.
* This version is the Node.js version.
* It's essentially fs, mkdirp and crash safe write and read functions.
*
* @see module:storageBrowser
* @see module:storageReactNative
* @module storage
* @private
*/
const fs = require('fs')
const fsPromises = fs.promises
const path = require('path')
const { Readable } = require('stream')
const DEFAULT_DIR_MODE = 0o755
const DEFAULT_FILE_MODE = 0o644
/**
* Returns true if file exists.
* @param {string} file
* @return {Promise<boolean>}
* @async
* @alias module:storage.existsAsync
* @see module:storage.exists
*/
const existsAsync = file => fsPromises.access(file, fs.constants.F_OK).then(() => true, () => false)
/**
* Node.js' [fsPromises.rename]{@link https://nodejs.org/api/fs.html#fspromisesrenameoldpath-newpath}
* @function
* @param {string} oldPath
* @param {string} newPath
* @return {Promise<void>}
* @alias module:storage.renameAsync
* @async
*/
const renameAsync = fsPromises.rename
/**
* Node.js' [fsPromises.writeFile]{@link https://nodejs.org/api/fs.html#fspromiseswritefilefile-data-options}.
* @function
* @param {string} path
* @param {string} data
* @param {object} [options]
* @return {Promise<void>}
* @alias module:storage.writeFileAsync
* @async
*/
const writeFileAsync = fsPromises.writeFile
/**
* Node.js' [fs.createWriteStream]{@link https://nodejs.org/api/fs.html#fscreatewritestreampath-options}.
* @function
* @param {string} path
* @param {Object} [options]
* @return {fs.WriteStream}
* @alias module:storage.writeFileStream
*/
const writeFileStream = fs.createWriteStream
/**
* Node.js' [fsPromises.unlink]{@link https://nodejs.org/api/fs.html#fspromisesunlinkpath}.
* @function
* @param {string} path
* @return {Promise<void>}
* @async
* @alias module:storage.unlinkAsync
*/
const unlinkAsync = fsPromises.unlink
/**
* Node.js' [fsPromises.appendFile]{@link https://nodejs.org/api/fs.html#fspromisesappendfilepath-data-options}.
* @function
* @param {string} path
* @param {string} data
* @param {object} [options]
* @return {Promise<void>}
* @alias module:storage.appendFileAsync
* @async
*/
const appendFileAsync = fsPromises.appendFile
/**
* Node.js' [fsPromises.readFile]{@link https://nodejs.org/api/fs.html#fspromisesreadfilepath-options}.
* @function
* @param {string} path
* @param {object} [options]
* @return {Promise<Buffer>}
* @alias module:storage.readFileAsync
* @async
*/
const readFileAsync = fsPromises.readFile
/**
* Node.js' [fs.createReadStream]{@link https://nodejs.org/api/fs.html#fscreatereadstreampath-options}.
* @function
* @param {string} path
* @param {Object} [options]
* @return {fs.ReadStream}
* @alias module:storage.readFileStream
*/
const readFileStream = fs.createReadStream
/**
* Node.js' [fsPromises.mkdir]{@link https://nodejs.org/api/fs.html#fspromisesmkdirpath-options}.
* @function
* @param {string} path
* @param {object} options
* @return {Promise<void|string>}
* @alias module:storage.mkdirAsync
* @async
*/
const mkdirAsync = fsPromises.mkdir
/**
* Removes file if it exists.
* @param {string} file
* @return {Promise<void>}
* @alias module:storage.ensureFileDoesntExistAsync
* @async
*/
const ensureFileDoesntExistAsync = async file => {
if (await existsAsync(file)) await unlinkAsync(file)
}
/**
* Flush data in OS buffer to storage if corresponding option is set.
* @param {object|string} options If options is a string, it is assumed that the flush of the file (not dir) called options was requested
* @param {string} [options.filename]
* @param {boolean} [options.isDir = false] Optional, defaults to false
* @param {number} [options.mode = 0o644] Optional, defaults to 0o644
* @return {Promise<void>}
* @alias module:storage.flushToStorageAsync
* @async
*/
const flushToStorageAsync = async (options) => {
let filename
let flags
let mode
if (typeof options === 'string') {
filename = options
flags = 'r+'
mode = DEFAULT_FILE_MODE
} else {
filename = options.filename
flags = options.isDir ? 'r' : 'r+'
mode = options.mode !== undefined ? options.mode : DEFAULT_FILE_MODE
}
/**
* Some OSes and/or storage backends (augmented node fs) do not support fsync (FlushFileBuffers) directories,
* or calling open() on directories at all. Flushing fails silently in this case, supported by following heuristics:
* + isDir === true
* |-- open(<dir>) -> (err.code === 'EISDIR'): can't call open() on directories (eg. BrowserFS)
* `-- fsync(<dir>) -> (errFS.code === 'EPERM' || errFS.code === 'EISDIR'): can't fsync directory: permissions are checked
* on open(); EPERM error should only occur on fsync incapability and not for general lack of permissions (e.g. Windows)
*
* We can live with this as it cannot cause 100% dataloss except in the very rare event of the first time
* database is loaded and a crash happens.
*/
let filehandle, errorOnFsync, errorOnClose
try {
filehandle = await fsPromises.open(filename, flags, mode)
try {
await filehandle.sync()
} catch (errFS) {
errorOnFsync = errFS
}
} catch (error) {
if (error.code !== 'EISDIR' || !options.isDir) throw error
} finally {
try {
await filehandle.close()
} catch (errC) {
errorOnClose = errC
}
}
if ((errorOnFsync || errorOnClose) && !((errorOnFsync.code === 'EPERM' || errorOnClose.code === 'EISDIR') && options.isDir)) {
const e = new Error('Failed to flush to storage')
e.errorOnFsync = errorOnFsync
e.errorOnClose = errorOnClose
throw e
}
}
/**
* Fully write or rewrite the datafile.
* @param {string} filename
* @param {string[]} lines
* @param {number} [mode=0o644]
* @return {Promise<void>}
* @alias module:storage.writeFileLinesAsync
* @async
*/
const writeFileLinesAsync = (filename, lines, mode = DEFAULT_FILE_MODE) => new Promise((resolve, reject) => {
try {
const stream = writeFileStream(filename, { mode })
const readable = Readable.from(lines)
readable.on('data', (line) => {
try {
stream.write(line + '\n')
} catch (err) {
reject(err)
}
})
readable.on('end', () => {
stream.close(err => {
if (err) reject(err)
else resolve()
})
})
readable.on('error', err => {
reject(err)
})
stream.on('error', err => {
reject(err)
})
} catch (err) {
reject(err)
}
})
/**
* Fully write or rewrite the datafile, immune to crashes during the write operation (data will not be lost).
* @param {string} filename
* @param {string[]} lines
* @param {object} [modes={ fileMode: 0o644, dirMode: 0o755 }]
* @param {number} modes.dirMode
* @param {number} modes.fileMode
* @return {Promise<void>}
* @alias module:storage.crashSafeWriteFileLinesAsync
*/
const crashSafeWriteFileLinesAsync = async (filename, lines, modes = { fileMode: DEFAULT_FILE_MODE, dirMode: DEFAULT_DIR_MODE }) => {
const tempFilename = filename + '~'
await flushToStorageAsync({ filename: path.dirname(filename), isDir: true, mode: modes.dirMode })
const exists = await existsAsync(filename)
if (exists) await flushToStorageAsync({ filename, mode: modes.fileMode })
await writeFileLinesAsync(tempFilename, lines, modes.fileMode)
await flushToStorageAsync({ filename: tempFilename, mode: modes.fileMode })
await renameAsync(tempFilename, filename)
await flushToStorageAsync({ filename: path.dirname(filename), isDir: true, mode: modes.dirMode })
}
/**
* Ensure the datafile contains all the data, even if there was a crash during a full file write.
* @param {string} filename
* @param {number} [mode=0o644]
* @return {Promise<void>}
* @alias module:storage.ensureDatafileIntegrityAsync
*/
const ensureDatafileIntegrityAsync = async (filename, mode = DEFAULT_FILE_MODE) => {
const tempFilename = filename + '~'
const filenameExists = await existsAsync(filename)
// Write was successful
if (filenameExists) return
const oldFilenameExists = await existsAsync(tempFilename)
// New database
if (!oldFilenameExists) await writeFileAsync(filename, '', { encoding: 'utf8', mode })
// Write failed, use old version
else await renameAsync(tempFilename, filename)
}
/**
* Check if a file's parent directory exists and create it on the fly if it is not the case.
* @param {string} filename
* @param {number} mode
* @return {Promise<void>}
* @private
*/
const ensureParentDirectoryExistsAsync = async (filename, mode) => {
const dir = path.dirname(filename)
const parsedDir = path.parse(path.resolve(dir))
// this is because on Windows mkdir throws a permission error when called on the root directory of a volume
if (process.platform !== 'win32' || parsedDir.dir !== parsedDir.root || parsedDir.base !== '') {
await mkdirAsync(dir, { recursive: true, mode })
}
}
// Interface
module.exports.existsAsync = existsAsync
module.exports.renameAsync = renameAsync
module.exports.writeFileAsync = writeFileAsync
module.exports.writeFileLinesAsync = writeFileLinesAsync
module.exports.crashSafeWriteFileLinesAsync = crashSafeWriteFileLinesAsync
module.exports.appendFileAsync = appendFileAsync
module.exports.readFileAsync = readFileAsync
module.exports.unlinkAsync = unlinkAsync
module.exports.mkdirAsync = mkdirAsync
module.exports.readFileStream = readFileStream
module.exports.flushToStorageAsync = flushToStorageAsync
module.exports.ensureDatafileIntegrityAsync = ensureDatafileIntegrityAsync
module.exports.ensureFileDoesntExistAsync = ensureFileDoesntExistAsync
module.exports.ensureParentDirectoryExistsAsync = ensureParentDirectoryExistsAsync

84
node_modules/@seald-io/nedb/lib/utils.js generated vendored Normal file
View File

@@ -0,0 +1,84 @@
/**
* Utility functions for all environments.
* This replaces the underscore dependency.
*
* @module utils
* @private
*/
/**
* @callback IterateeFunction
* @param {*} arg
* @return {*}
*/
/**
* Produces a duplicate-free version of the array, using === to test object equality. In particular only the first
* occurrence of each value is kept. If you want to compute unique items based on a transformation, pass an iteratee
* function.
*
* Heavily inspired by {@link https://underscorejs.org/#uniq}.
* @param {Array} array
* @param {IterateeFunction} [iteratee] transformation applied to every element before checking for duplicates. This will not
* transform the items in the result.
* @return {Array}
* @alias module:utils.uniq
*/
const uniq = (array, iteratee) => {
if (iteratee) return [...(new Map(array.map(x => [iteratee(x), x]))).values()]
else return [...new Set(array)]
}
/**
* Returns true if arg is an Object. Note that JavaScript arrays and functions are objects, while (normal) strings
* and numbers are not.
*
* Heavily inspired by {@link https://underscorejs.org/#isObject}.
* @param {*} arg
* @return {boolean}
*/
const isObject = arg => typeof arg === 'object' && arg !== null
/**
* Returns true if d is a Date.
*
* Heavily inspired by {@link https://underscorejs.org/#isDate}.
* @param {*} d
* @return {boolean}
* @alias module:utils.isDate
*/
const isDate = d => isObject(d) && Object.prototype.toString.call(d) === '[object Date]'
/**
* Returns true if re is a RegExp.
*
* Heavily inspired by {@link https://underscorejs.org/#isRegExp}.
* @param {*} re
* @return {boolean}
* @alias module:utils.isRegExp
*/
const isRegExp = re => isObject(re) && Object.prototype.toString.call(re) === '[object RegExp]'
/**
* Return a copy of the object filtered using the given keys.
*
* @param {object} object
* @param {string[]} keys
* @return {object}
*/
const pick = (object, keys) => {
return keys.reduce((obj, key) => {
if (object && Object.prototype.hasOwnProperty.call(object, key)) {
obj[key] = object[key]
}
return obj
}, {})
}
const filterIndexNames = (indexNames) => ([k, v]) => !!(typeof v === 'string' || typeof v === 'number' || typeof v === 'boolean' || isDate(v) || v === null) &&
indexNames.includes(k)
module.exports.uniq = uniq
module.exports.isDate = isDate
module.exports.isRegExp = isRegExp
module.exports.pick = pick
module.exports.filterIndexNames = filterIndexNames

48
node_modules/@seald-io/nedb/lib/waterfall.js generated vendored Normal file
View File

@@ -0,0 +1,48 @@
/**
* Responsible for sequentially executing actions on the database
* @private
*/
class Waterfall {
/**
* Instantiate a new Waterfall.
*/
constructor () {
/**
* This is the internal Promise object which resolves when all the tasks of the `Waterfall` are done.
*
* It will change any time `this.waterfall` is called.
*
* @type {Promise}
*/
this.guardian = Promise.resolve()
}
/**
*
* @param {AsyncFunction} func
* @return {AsyncFunction}
*/
waterfall (func) {
return (...args) => {
this.guardian = this.guardian.then(() => {
return func(...args)
.then(result => ({ error: false, result }), result => ({ error: true, result }))
})
return this.guardian.then(({ error, result }) => {
if (error) return Promise.reject(result)
else return Promise.resolve(result)
})
}
}
/**
* Shorthand for chaining a promise to the Waterfall
* @param {Promise} promise
* @return {Promise}
*/
chain (promise) {
return this.waterfall(() => promise)()
}
}
module.exports = Waterfall