Initial import with skill sheet working

This commit is contained in:
2024-12-04 00:11:23 +01:00
commit 9050c80ab4
4488 changed files with 671048 additions and 0 deletions

View File

@@ -0,0 +1,23 @@
name: Build Status
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build:
strategy:
matrix:
node-version: [lts/*]
os: [ubuntu-latest, macos-latest, windows-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
- run: npm install
- run: npm test

21
node_modules/stream-composer/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2023 Mathias Buus
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

61
node_modules/stream-composer/README.md generated vendored Normal file
View File

@@ -0,0 +1,61 @@
# stream-composer
Modern stream composer
```
npm install stream-composer
```
Supports composing and pipelining multiple streams into a single [streamx](https://github.com/mafintosh/streamx) stream.
## Usage
``` js
const Composer = require('stream-composer')
// Make a duplex stream out of a read and write stream
const stream = new Composer()
stream.setReadable(someReadableStream) // set readable side
stream.setWritable(someWritableStream) // set writable side
// reads, read from the readable stream
stream.on('data', function (data) {
// data is from someReadableStream
})
// writes, write to the writable stream
stream.write(data)
```
## API
#### `stream = new Composer([options])`
Make a new composer. Optionally pass the writable stream and readable stream in the constructor.
Options are forwarded to streamx.
#### `stream.setReadable(readableStream)`
Set the readable stream. If you pass `null` the readable stream will be set to an empty stream for you.
#### `stream.setWritable(writableStream)`
Set the writable stream. If you pass `null` the writable stream will be set to an empty stream for you.
#### `stream.setPipeline(...pipelineStreams)`
Set the stream to a pipeline. Writing to the outer stream writes to the first stream in the pipeline
and reading from the outer stream, reads from the last stream in the pipeline.
#### `stream = Composer.pipeline(...pipelineStreams)`
Helper for making a composer stream and setting the pipeline in one go.
#### `stream = Composer.duplexer(writableStream, readableStream)`
Helper for making a composer stream and setting the writable and readable stream in one go.
## License
MIT

192
node_modules/stream-composer/index.js generated vendored Normal file
View File

@@ -0,0 +1,192 @@
const { Duplex, pipeline } = require('streamx')
module.exports = class Composer extends Duplex {
constructor (opts) {
super(opts)
this._writable = null
this._readable = null
this._isPipeline = false
this._pipelineMissing = 2
this._writeCallback = null
this._finalCallback = null
this._ondata = this._pushData.bind(this)
this._onend = this._pushEnd.bind(this, null)
this._ondrain = this._continueWrite.bind(this, null)
this._onfinish = this._maybeFinal.bind(this)
this._onerror = this.destroy.bind(this)
this._onclose = this.destroy.bind(this, null)
}
static pipeline (...streams) {
const c = new Composer()
c.setPipeline(...streams)
return c
}
static duplexer (ws = null, rs = null) {
const c = new Composer()
c.setWritable(ws)
c.setReadable(rs)
return c
}
setPipeline (first, ...streams) {
const all = Array.isArray(first) ? first : [first, ...streams]
this._isPipeline = true
this.setWritable(all[0])
this.setReadable(all[all.length - 1])
pipeline(all, (err) => {
if (err) this.destroy(err)
})
return this
}
setReadable (rs) {
if (this._readable) {
this._readable.removeListener('data', this._ondata)
this._readable.removeListener('end', this._onend)
this._readable.removeListener('error', this._onerror)
this._readable.removeListener('close', this._onclose)
}
if (rs === null) {
this._readable = null
this.push(null)
this.resume()
return
}
this._readable = rs
this._readable.on('data', this._ondata)
this._readable.on('end', this._onend)
this._readable.on('error', this._onerror)
this._readable.on('close', this._onclose)
if (this.destroying && this._readable.destroy) {
this._readable.destroy()
}
return this
}
setWritable (ws) {
if (this._writable) {
this._writable.removeListener('drain', this._ondrain)
this._writable.removeListener('finish', this._onfinish)
this._writable.removeListener('error', this._onerror)
this._writable.removeListener('close', this._onclose)
}
if (ws === null) {
this._writable = null
this._continueWrite(null)
this.end()
return
}
this._writable = ws
this._writable.on('drain', this._ondrain)
this._writable.on('finish', this._onfinish)
this._writable.on('error', this._onerror)
this._writable.on('close', this._onclose)
if (this.destroying && this._writable.destroy) {
this._writable.destroy()
}
return this
}
_read (cb) {
if (this._readable !== null) {
this._readable.resume()
}
cb(null)
}
_pushData (data) {
if (this.push(data) === false && this._readable !== null) {
this._readable.pause()
}
}
_pushEnd () {
if (this._isPipeline) {
this.on('end', this._decrementPipeline.bind(this))
}
this.push(null)
if (this._readable !== null) {
this._readable.removeListener('close', this._onclose)
}
}
_decrementPipeline () {
if (--this._pipelineMissing === 0) this._continueFinal(null)
}
_maybeFinal () {
if (this._writable !== null) {
this._writable.removeListener('close', this._onclose)
}
if (this._isPipeline) this._decrementPipeline()
else this._continueFinal(null)
}
_continueFinal (err) {
if (this._finalCallback === null) return
const cb = this._finalCallback
this._finalCallback = null
cb(err)
}
_continueWrite (err) {
if (this._writeCallback === null) return
const cb = this._writeCallback
this._writeCallback = null
cb(err)
}
_predestroy () {
if (this._writable !== null && this._writable.destroy) this._writable.destroy()
if (this._readable !== null && this._readable.destroy) this._readable.destroy()
this._continueWrite(new Error('Stream destroyed'))
this._continueFinal(new Error('Stream destroyed'))
}
_writev (datas, cb) {
if (this._writable === null) {
return cb(null)
}
let flushed = true
for (const data of datas) {
flushed = this._writable.write(data)
}
if (!flushed) {
this._writeCallback = cb
return
}
cb(null)
}
_final (cb) {
if (this._writable === null) {
return cb(null)
}
this._finalCallback = cb
this._writable.end()
}
}

26
node_modules/stream-composer/package.json generated vendored Normal file
View File

@@ -0,0 +1,26 @@
{
"name": "stream-composer",
"version": "1.0.2",
"description": "Modern stream composer",
"main": "index.js",
"dependencies": {
"streamx": "^2.13.2"
},
"devDependencies": {
"brittle": "^3.2.1",
"standard": "^17.0.0"
},
"scripts": {
"test": "standard && brittle test.js"
},
"repository": {
"type": "git",
"url": "https://github.com/mafintosh/stream-composer.git"
},
"author": "Mathias Buus (@mafintosh)",
"license": "MIT",
"bugs": {
"url": "https://github.com/mafintosh/stream-composer/issues"
},
"homepage": "https://github.com/mafintosh/stream-composer"
}

283
node_modules/stream-composer/test.js generated vendored Normal file
View File

@@ -0,0 +1,283 @@
const test = require('brittle')
const { Readable, Writable, Transform } = require('streamx')
const Composer = require('./')
test('read and write', function (t) {
t.plan(42)
const c = new Composer()
let writes = 0
let reads = 0
const r = new Readable()
const w = new Writable({
write (data, cb) {
t.is(data, 'write-' + (writes++))
setTimeout(cb, 0)
}
})
c.setReadable(r)
c.setWritable(w)
let flushed = true
for (let i = 0; i < 20; i++) {
flushed = c.write('write-' + i)
}
t.not(flushed)
for (let i = 0; i < 20; i++) {
r.push('read-' + i)
}
t.ok(Readable.isBackpressured(r))
c.on('data', function (data) {
t.is(data, 'read-' + (reads++))
})
})
test('read and write (twice)', function (t) {
t.plan(42)
const c = new Composer()
let writes = 0
let reads = 0
const r = new Readable()
const w = new Writable({
write (data, cb) {
t.is(data, 'write-' + (writes++))
setTimeout(cb, 0)
}
})
c.setReadable(r)
c.setWritable(w)
c.setReadable(r)
c.setWritable(w)
let flushed = true
for (let i = 0; i < 20; i++) {
flushed = c.write('write-' + i)
}
t.not(flushed)
for (let i = 0; i < 20; i++) {
r.push('read-' + i)
}
t.ok(Readable.isBackpressured(r))
c.on('data', function (data) {
t.is(data, 'read-' + (reads++))
})
})
test('no write', function (t) {
t.plan(13)
const c = new Composer()
let reads = 0
const r = new Readable()
c.setReadable(r)
c.setWritable(null)
for (let i = 0; i < 10; i++) {
r.push('read-' + i)
}
r.push(null)
c.on('data', function (data) {
t.is(data, 'read-' + (reads++))
})
c.on('end', function () {
t.pass('end')
})
c.on('finish', function () {
t.pass('finish')
})
c.on('close', function () {
t.pass('closed')
})
})
test('no read', function (t) {
t.plan(12)
const c = new Composer()
let writes = 0
const w = new Writable({
write (data, cb) {
t.is(data, 'write-' + (writes++))
setTimeout(cb, 0)
}
})
c.setReadable(null)
c.setWritable(w)
for (let i = 0; i < 10; i++) {
c.write('write-' + i)
}
c.end()
c.on('end', function () {
t.pass('ended')
})
c.on('close', function () {
t.pass('closed')
})
})
test('pipeline', function (t) {
t.plan(22)
const c = new Composer()
c.setPipeline(new Transform(), new Transform())
let reads = 0
for (let i = 0; i < 20; i++) {
c.write('hello-' + i)
}
c.on('finish', function () {
t.pass('finish')
})
c.end()
c.on('data', function (data) {
t.is(data, 'hello-' + (reads++))
})
c.on('end', function () {
t.pass('end')
})
})
test('pipeline, static', function (t) {
t.plan(21)
const c = Composer.pipeline(new Transform(), new Transform(), new Transform(), new Transform())
for (let i = 0; i < 20; i++) {
c.write('hello-' + i)
}
c.end()
let reads = 0
c.on('data', function (data) {
t.is(data, 'hello-' + (reads++))
})
c.on('end', function () {
t.pass('ended')
})
})
test('pipeline destroy', function (t) {
t.plan(4)
const streams = [new Transform(), new Transform(), new Transform(), new Transform()]
const c = Composer.pipeline(streams)
for (let i = 0; i < 20; i++) {
c.write('hello-' + i)
}
c.destroy()
c.on('close', function () {
for (const stream of streams) {
t.ok(stream.destroying)
}
})
})
test('pipeline destroy inner', function (t) {
t.plan(5)
const streams = [new Transform(), new Transform(), new Transform(), new Transform()]
const c = Composer.pipeline(streams)
for (let i = 0; i < 20; i++) {
c.write('hello-' + i)
}
streams[1].destroy()
c.on('error', function () {
t.pass('has error')
})
c.on('close', function () {
for (const stream of streams) {
t.ok(stream.destroying)
}
})
})
test('pipeline w/ core streams', function (t) {
t.plan(23)
const coreStream = require('stream')
const c = Composer.pipeline(new Transform({
transform (obj, cb) {
setImmediate(function () {
cb(null, obj)
})
}
}), new Transform({
transform (obj, cb) {
setImmediate(function () {
cb(null, obj)
})
}
}))
let reads = 0
const data = []
for (let i = 0; i < 20; i++) {
data.push({ msg: 'hello-' + i })
}
c.on('finish', function () {
t.absent(pipelineEnded, 'finish')
})
c.on('end', function () {
t.absent(pipelineEnded, 'ended')
})
let pipelineEnded = false
coreStream.pipeline([coreStream.Readable.from(data), c], function (err) {
pipelineEnded = true
t.ok(!err, 'pipeline ended')
})
c.on('data', function (data) {
t.alike(data, { msg: 'hello-' + reads++ }, 'got data')
})
})