Addnew sheets (armor, weapons, malefica) and v13 support

This commit is contained in:
2025-05-18 23:51:26 +02:00
parent 7672f861ff
commit 995d61e1c6
4478 changed files with 667857 additions and 620 deletions

172
node_modules/comment-parser/CHANGELOG.md generated vendored Normal file
View File

@@ -0,0 +1,172 @@
# v1.4.1
- fix .prettierignore
- add source trasformation example
# v1.4.0
- ESM compatibility improvements; fixes #159, #161
# v1.3.1
- allow for valid empty jsdoc; fixes #128
# v1.3.0
- add support for custom block markers
# v1.2.4
- reverting engine constraint back to ^12.0.0
# v1.2.3
- publishing missing fix: point package's main to .cjs file
# v1.2.2
- re-export ./util on the top-level for compatibility with older Node
- point package's main to .cjs file
# v1.2.1
- bump `engines` per `exports` issues in earlier Node versions
# v1.2.0
- keep and handle appropriately CR line endings
# v1.1.6-beta.3
- process CRs as a separate .lineEnd toke
# v1.1.6-beta.2
- ESM/CJS compatibility fixes
# v1.1.6-beta.1
- support native ESM
# v1.1.6-beta.0
- keep original CR line endings
- allow to normalize line endings with `crlf` transform
# v1.1.5
- drop unused variables
- add .editorconfig
# v1.1.4
- `bugfix` fix unsynced lib/
# v1.1.3
- export primitive type on the top level: Markers, Block, Spec, Line, Tokens, Problem
# v1.1.2
- `bugfix` Allow to build nested tags from `name.subname` even if `name` wasn't d
- `bugfix` Preserve indentation when extracting comments
# v1.1.1
- add helpers for rewiring Spec.source <-> Spec.tags.source
# v1.1.0
- split tokenizers into separate modules
- allow multiline {type} definitions - issue #109
- allow using "=>" in [name=default] defaults issue #112
- allow using "=" in quoted [name=default] defaults issue #112
- add tokenizers usage example - issue #111
# v1.0.0
- complete rewrite in TS with more flexible API
# v0.7.6
- distinct non-critical errors by providing `err.warning`
# v0.7.5
- name parsing fixes
# v0.7.4
- node 8 backward compatibility fixes
# v0.7.3
- make stringify result more close to the source
# v0.7.2
- make stringify to start each line with * in multiline comments
# v0.7.1
- ensure non-space characters after asterisk are included in source
# v0.7.0
- allow fenced blocks in tag description, see opts.fence
# v0.6.2
- document TypeScript definitions
# v0.6.1
- adjust strigifier indentation
# v0.6.0
- soft-drop node@6 support
- migrate to ES6 syntax
- allow to generate comments out of parsed data
# v0.5.5
- allow loose tag names, e.g. @.tag, @-tag
# v0.5.4
- allow quoted literal names, e.g. `@tag "My Var" description`
# v0.5.3
- corrected TypeScript definitions
# v0.5.2
- added TypeScript definitions
- removed `readable-stream` dependency
# v0.5.1
- Support for tab as separator between tag components.
- Docs: Indicate when `optional` is `true`; `default` property
# v0.5.0
- line wrapping control with `opts.join`
# v0.4.2
- tolerate inconsistent lines alignment within block
# v0.4.1
- refactored parsing, allow to not start lines with "* " inside block
# v0.3.2
- fix RegExp for `description` extraction to allow $ char
# v0.3.1
- use `readable-stream` fro Node 0.8 comatibility
- allow to pass optional parameters to `parse.file(path [,opts], done)`
- allow `parse.stream` to work with Buffers in addition to strings
# v0.3.0
- `feature` allow to use custom parsers
- `feature` always include source, no `raw_value` option needed
- `bugfix` always provide `optional` tag property
- `refactor` clean up tests
# v0.2.3
- `bugfix` Accept `/** one line */` comments
- `refactor` Get rid of `lodash` to avoid unnecessary extra size when bundled
# v0.2.2
- `feature` allow spaces in default values `@my-tag {my.type} [name=John Doe]`
# v0.2.1
- `refactor` make line pasing mechanism more tolerable
# v0.2.0
- `feature` include source line numbers in parsed data
- `feature` optionally prevent dotten names expanding
# v0.1.2
- `bugfix` Allow to build nested tags from `name.subname` even if `name` wasn't d
- `bugfix` Preserve indentation when extracting comments
# v0.1.1
- `improvement` `parse(source)` returns array of all blocks found in source or an empty array
- `bugfix` fixed indented blocks parsing
# v0.1.0
Initial implementation

21
node_modules/comment-parser/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Sergii Iavorskyi
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

220
node_modules/comment-parser/README.md generated vendored Normal file
View File

@@ -0,0 +1,220 @@
# comment-parser
`comment-parser` is a library helping to handle Generic JSDoc-style comments. It is
- **language-agnostic** no semantics enforced. You decide what tags are and what they mean. And it can be used with any language supporting `/** */` source comments.
- **no dependencies** it is compact and environment-agnostic, can be run on both the server and browser sides
- **highly customizable** with a little code you can deeply customize how comments are parsed
- **bidirectional** - you can write comment blocks back to the source after updating or formatting
- **strictly typed** - comes with generated `d.ts` data definitions since written in TypeScript
```sh
npm install comment-parser
```
> 💡 Check out the [Playground](https://syavorsky.github.io/comment-parser)
> 💡 Previous version lives in [0.x](https://github.com/syavorsky/comment-parser/tree/0.x) branch
Lib mainly provides two pieces [Parser](#Parser) and [Stringifier](#Stringifier).
## Parser
Let's go over string parsing:
```js
const { parse } = require('comment-parser/lib')
const source = `
/**
* Description may go
* over few lines followed by @tags
* @param {string} name the name parameter
* @param {any} value the value of any type
*/`
const parsed = parse(source)
```
Lib source code is written in TypeScript and all data shapes are conveniently available for your IDE of choice. All types described below can be found in [primitives.ts](src/primitives.ts)
The input source is first parsed into lines, then lines split into tokens, and finally, tokens are processed into blocks of tags
### Block
```js
/**
* Description may go
* over multiple lines followed by @tags
* @param {string} name the name parameter
* @param {any} value the value parameter
*/
```
### Description
```js
/**
* Description may go
* over multiple lines followed by @tags
```
### Tags
```js
* @param {string} name the name parameter
```
```js
* @param {any} value the value parameter
*/
```
### Tokens
```
|line|start|delimiter|postDelimiter|tag |postTag|name |postName|type |postType|description |end|
|----|-----|---------|-------------|------|-------|-----|--------|--------|--------|--------------------------------|---|
| 0|{2} |/** | | | | | | | | | |
| 1|{3} |* |{1} | | | | | | |Description may go | |
| 2|{3} |* |{1} | | | | | | |over few lines followed by @tags| |
| 3|{3} |* |{1} |@param|{1} |name |{1} |{string}|{1} |the name parameter | |
| 4|{3} |* |{1} |@param|{1} |value|{1} |{any} |{1} |the value of any type | |
| 5|{3} | | | | | | | | | |*/ |
```
### Result
The result is an array of Block objects, see the full output on the [playground](https://syavorsky.github.io/comment-parser)
```js
[{
// uppper text of the comment, overall block description
description: 'Description may go over multiple lines followed by @tags',
// list of block tags: @param, @param
tags: [{
// tokens.tag without "@"
tag: 'param',
// unwrapped tokens.name
name: 'name',
// unwrapped tokens.type
type: 'string',
// true, if tokens.name is [optional]
optional: false,
// default value if optional [name=default] has one
default: undefined,
// tokens.description assembled from a siongle or multiple lines
description: 'the name parameter',
// problems occured while parsing this tag section, subset of ../problems array
problems: [],
// source lines processed for extracting this tag, "slice" of the ../source item reference
source: [ ... ],
}, ... ],
// source is an array of `Line` items having the source
// line number and `Tokens` that can be assembled back into
// the line string preserving original formatting
source: [{
// source line number
number: 1,
// source line string
source: "/**",
// source line tokens
tokens: {
// indentation
start: "",
// delimiter, either '/**', '*/', '*', or ''. Mid lines may have no delimiters
delimiter: "/**",
// space between delimiter and tag
postDelimiter: "",
// tag starting with "@"
tag: "",
// space between tag and type
postTag: "",
// name with no whitespaces or "multiple words" wrapped into quotes. May occure in [name] and [name=default] forms
name: "",
// space between name and type
postName: "",
// type is has to be {wrapped} into curlies otherwise will be omitted
type: "",
// space between type and description
postType: "",
// description is basicaly rest of the line
description: "",
// closing */ marker if present
end: ""
}
}, ... ],
// problems occured while parsing the block
problems: [],
}];
```
While `.source[].tokens` are not providing readable annotation information, they are essential for tracing data origins and assembling string blocks with `stringify`
### options
```ts
interface Options {
// start count for source line numbers
startLine: number;
// escaping chars sequence marking wrapped content literal for the parser
fence: string;
// block and comment description compaction strategy
spacing: 'compact' | 'preserve';
// tokenizer functions extracting name, type, and description out of tag, see Tokenizer
tokenizers: Tokenizer[];
}
```
examples
- [default config](https://syavorsky.github.io/comment-parser/#parse-defaults)
- [line numbers control](https://syavorsky.github.io/comment-parser/#parse-line-numbering)
- [description spacing](https://syavorsky.github.io/comment-parser/#parse-spacing)
- [escaping](https://syavorsky.github.io/comment-parser/#parse-escaping)
- [explore the origin source](https://syavorsky.github.io/comment-parser/#parse-source-exploration)
[suggest more examples](https://github.com/syavorsky/comment-parser/issues/new?title=example+suggestion%3A+...&labels=example,parser)
## Stringifier
The stringifier is an important piece used by other tools updating the source code. It goes over `Block.source[].tokens` items and assembles them back to the string. It might be used with various transforms applied before stringifying.
```js
const { parse, stringify, transforms: {flow, align, indent} } = require('comment-parser');
const source = `
/**
* Description may go
* over multiple lines followed by @tags
*
* @my-tag {my.type} my-name description line 1
description line 2
* description line 3
*/`;
const parsed = parse(source);
const transform = flow(align(), indent(0))
console.log(stringify(transform(parsed[0])));
```
### Result
```js
/**
* Description may go
* over multiple lines followed by @tags
*
* @my-tag {my.type} my-name description line 1
description line 2
* description line 3
*/
```
examples
- [format comments](https://syavorsky.github.io/comment-parser/#stringify-formatting)
[suggest more examples](https://github.com/syavorsky/comment-parser/issues/new?title=example+suggestion%3A+...&labels=example,stringifier)
## Migrating from 0.x version
Code of pre-1.0 version is forked into [0.x](https://github.com/syavorsky/comment-parser/tree/0.x) and will phase out eventually. Please file the issue if you find some previously existing functionality can't be achieved with 1.x API. Check out [migration notes](migrate-1.0.md).

650
node_modules/comment-parser/browser/index.js generated vendored Normal file
View File

@@ -0,0 +1,650 @@
var CommentParser = (function (exports) {
'use strict';
/** @deprecated */
exports.Markers = void 0;
(function (Markers) {
Markers["start"] = "/**";
Markers["nostart"] = "/***";
Markers["delim"] = "*";
Markers["end"] = "*/";
})(exports.Markers = exports.Markers || (exports.Markers = {}));
function isSpace(source) {
return /^\s+$/.test(source);
}
function splitCR(source) {
const matches = source.match(/\r+$/);
return matches == null
? ['', source]
: [source.slice(-matches[0].length), source.slice(0, -matches[0].length)];
}
function splitSpace(source) {
const matches = source.match(/^\s+/);
return matches == null
? ['', source]
: [source.slice(0, matches[0].length), source.slice(matches[0].length)];
}
function splitLines(source) {
return source.split(/\n/);
}
function seedBlock(block = {}) {
return Object.assign({ description: '', tags: [], source: [], problems: [] }, block);
}
function seedSpec(spec = {}) {
return Object.assign({ tag: '', name: '', type: '', optional: false, description: '', problems: [], source: [] }, spec);
}
function seedTokens(tokens = {}) {
return Object.assign({ start: '', delimiter: '', postDelimiter: '', tag: '', postTag: '', name: '', postName: '', type: '', postType: '', description: '', end: '', lineEnd: '' }, tokens);
}
/**
* Assures Block.tags[].source contains references to the Block.source items,
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
* @param block parsed coments block
*/
function rewireSource(block) {
const source = block.source.reduce((acc, line) => acc.set(line.number, line), new Map());
for (const spec of block.tags) {
spec.source = spec.source.map((line) => source.get(line.number));
}
return block;
}
/**
* Assures Block.source contains references to the Block.tags[].source items,
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
* @param block parsed coments block
*/
function rewireSpecs(block) {
const source = block.tags.reduce((acc, spec) => spec.source.reduce((acc, line) => acc.set(line.number, line), acc), new Map());
block.source = block.source.map((line) => source.get(line.number) || line);
return block;
}
const reTag = /^@\S+/;
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
function getParser$3({ fence = '```', } = {}) {
const fencer = getFencer(fence);
const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;
return function parseBlock(source) {
// start with description section
const sections = [[]];
let isFenced = false;
for (const line of source) {
if (reTag.test(line.tokens.description) && !isFenced) {
sections.push([line]);
}
else {
sections[sections.length - 1].push(line);
}
isFenced = toggleFence(line.tokens.description, isFenced);
}
return sections;
};
}
function getFencer(fence) {
if (typeof fence === 'string')
return (source) => source.split(fence).length % 2 === 0;
return fence;
}
function getParser$2({ startLine = 0, markers = exports.Markers, } = {}) {
let block = null;
let num = startLine;
return function parseSource(source) {
let rest = source;
const tokens = seedTokens();
[tokens.lineEnd, rest] = splitCR(rest);
[tokens.start, rest] = splitSpace(rest);
if (block === null &&
rest.startsWith(markers.start) &&
!rest.startsWith(markers.nostart)) {
block = [];
tokens.delimiter = rest.slice(0, markers.start.length);
rest = rest.slice(markers.start.length);
[tokens.postDelimiter, rest] = splitSpace(rest);
}
if (block === null) {
num++;
return null;
}
const isClosed = rest.trimRight().endsWith(markers.end);
if (tokens.delimiter === '' &&
rest.startsWith(markers.delim) &&
!rest.startsWith(markers.end)) {
tokens.delimiter = markers.delim;
rest = rest.slice(markers.delim.length);
[tokens.postDelimiter, rest] = splitSpace(rest);
}
if (isClosed) {
const trimmed = rest.trimRight();
tokens.end = rest.slice(trimmed.length - markers.end.length);
rest = trimmed.slice(0, -markers.end.length);
}
tokens.description = rest;
block.push({ number: num, source, tokens });
num++;
if (isClosed) {
const result = block.slice();
block = null;
return result;
}
return null;
};
}
function getParser$1({ tokenizers }) {
return function parseSpec(source) {
var _a;
let spec = seedSpec({ source });
for (const tokenize of tokenizers) {
spec = tokenize(spec);
if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical)
break;
}
return spec;
};
}
/**
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
* and populates `spec.tag`
*/
function tagTokenizer() {
return (spec) => {
const { tokens } = spec.source[0];
const match = tokens.description.match(/\s*(@(\S+))(\s*)/);
if (match === null) {
spec.problems.push({
code: 'spec:tag:prefix',
message: 'tag should start with "@" symbol',
line: spec.source[0].number,
critical: true,
});
return spec;
}
tokens.tag = match[1];
tokens.postTag = match[3];
tokens.description = tokens.description.slice(match[0].length);
spec.tag = match[2];
return spec;
};
}
/**
* Sets splits remaining `Spec.lines[].tokes.description` into `type` and `description`
* tokens and populates Spec.type`
*
* @param {Spacing} spacing tells how to deal with a whitespace
* for type values going over multiple lines
*/
function typeTokenizer(spacing = 'compact') {
const join = getJoiner$1(spacing);
return (spec) => {
let curlies = 0;
let lines = [];
for (const [i, { tokens }] of spec.source.entries()) {
let type = '';
if (i === 0 && tokens.description[0] !== '{')
return spec;
for (const ch of tokens.description) {
if (ch === '{')
curlies++;
if (ch === '}')
curlies--;
type += ch;
if (curlies === 0)
break;
}
lines.push([tokens, type]);
if (curlies === 0)
break;
}
if (curlies !== 0) {
spec.problems.push({
code: 'spec:type:unpaired-curlies',
message: 'unpaired curlies',
line: spec.source[0].number,
critical: true,
});
return spec;
}
const parts = [];
const offset = lines[0][0].postDelimiter.length;
for (const [i, [tokens, type]] of lines.entries()) {
tokens.type = type;
if (i > 0) {
tokens.type = tokens.postDelimiter.slice(offset) + type;
tokens.postDelimiter = tokens.postDelimiter.slice(0, offset);
}
[tokens.postType, tokens.description] = splitSpace(tokens.description.slice(type.length));
parts.push(tokens.type);
}
parts[0] = parts[0].slice(1);
parts[parts.length - 1] = parts[parts.length - 1].slice(0, -1);
spec.type = join(parts);
return spec;
};
}
const trim = (x) => x.trim();
function getJoiner$1(spacing) {
if (spacing === 'compact')
return (t) => t.map(trim).join('');
else if (spacing === 'preserve')
return (t) => t.join('\n');
else
return spacing;
}
const isQuoted = (s) => s && s.startsWith('"') && s.endsWith('"');
/**
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
* and populates the `spec.name`
*/
function nameTokenizer() {
const typeEnd = (num, { tokens }, i) => tokens.type === '' ? num : i;
return (spec) => {
// look for the name in the line where {type} ends
const { tokens } = spec.source[spec.source.reduce(typeEnd, 0)];
const source = tokens.description.trimLeft();
const quotedGroups = source.split('"');
// if it starts with quoted group, assume it is a literal
if (quotedGroups.length > 1 &&
quotedGroups[0] === '' &&
quotedGroups.length % 2 === 1) {
spec.name = quotedGroups[1];
tokens.name = `"${quotedGroups[1]}"`;
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
return spec;
}
let brackets = 0;
let name = '';
let optional = false;
let defaultValue;
// assume name is non-space string or anything wrapped into brackets
for (const ch of source) {
if (brackets === 0 && isSpace(ch))
break;
if (ch === '[')
brackets++;
if (ch === ']')
brackets--;
name += ch;
}
if (brackets !== 0) {
spec.problems.push({
code: 'spec:name:unpaired-brackets',
message: 'unpaired brackets',
line: spec.source[0].number,
critical: true,
});
return spec;
}
const nameToken = name;
if (name[0] === '[' && name[name.length - 1] === ']') {
optional = true;
name = name.slice(1, -1);
const parts = name.split('=');
name = parts[0].trim();
if (parts[1] !== undefined)
defaultValue = parts.slice(1).join('=').trim();
if (name === '') {
spec.problems.push({
code: 'spec:name:empty-name',
message: 'empty name',
line: spec.source[0].number,
critical: true,
});
return spec;
}
if (defaultValue === '') {
spec.problems.push({
code: 'spec:name:empty-default',
message: 'empty default value',
line: spec.source[0].number,
critical: true,
});
return spec;
}
// has "=" and is not a string, except for "=>"
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
spec.problems.push({
code: 'spec:name:invalid-default',
message: 'invalid default value syntax',
line: spec.source[0].number,
critical: true,
});
return spec;
}
}
spec.optional = optional;
spec.name = name;
tokens.name = nameToken;
if (defaultValue !== undefined)
spec.default = defaultValue;
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
return spec;
};
}
/**
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
* following given spacing srtategy
* @param {Spacing} spacing tells how to handle the whitespace
* @param {BlockMarkers} markers tells how to handle comment block delimitation
*/
function descriptionTokenizer(spacing = 'compact', markers = exports.Markers) {
const join = getJoiner(spacing);
return (spec) => {
spec.description = join(spec.source, markers);
return spec;
};
}
function getJoiner(spacing) {
if (spacing === 'compact')
return compactJoiner;
if (spacing === 'preserve')
return preserveJoiner;
return spacing;
}
function compactJoiner(lines, markers = exports.Markers) {
return lines
.map(({ tokens: { description } }) => description.trim())
.filter((description) => description !== '')
.join(' ');
}
const lineNo = (num, { tokens }, i) => tokens.type === '' ? num : i;
const getDescription = ({ tokens }) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +
tokens.description;
function preserveJoiner(lines, markers = exports.Markers) {
if (lines.length === 0)
return '';
// skip the opening line with no description
if (lines[0].tokens.description === '' &&
lines[0].tokens.delimiter === markers.start)
lines = lines.slice(1);
// skip the closing line with no description
const lastLine = lines[lines.length - 1];
if (lastLine !== undefined &&
lastLine.tokens.description === '' &&
lastLine.tokens.end.endsWith(markers.end))
lines = lines.slice(0, -1);
// description starts at the last line of type definition
lines = lines.slice(lines.reduce(lineNo, 0));
return lines.map(getDescription).join('\n');
}
function getParser({ startLine = 0, fence = '```', spacing = 'compact', markers = exports.Markers, tokenizers = [
tagTokenizer(),
typeTokenizer(spacing),
nameTokenizer(),
descriptionTokenizer(spacing),
], } = {}) {
if (startLine < 0 || startLine % 1 > 0)
throw new Error('Invalid startLine');
const parseSource = getParser$2({ startLine, markers });
const parseBlock = getParser$3({ fence });
const parseSpec = getParser$1({ tokenizers });
const joinDescription = getJoiner(spacing);
return function (source) {
const blocks = [];
for (const line of splitLines(source)) {
const lines = parseSource(line);
if (lines === null)
continue;
const sections = parseBlock(lines);
const specs = sections.slice(1).map(parseSpec);
blocks.push({
description: joinDescription(sections[0], markers),
tags: specs,
source: lines,
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),
});
}
return blocks;
};
}
function join(tokens) {
return (tokens.start +
tokens.delimiter +
tokens.postDelimiter +
tokens.tag +
tokens.postTag +
tokens.type +
tokens.postType +
tokens.name +
tokens.postName +
tokens.description +
tokens.end +
tokens.lineEnd);
}
function getStringifier() {
return (block) => block.source.map(({ tokens }) => join(tokens)).join('\n');
}
var __rest$2 = (window && window.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
const zeroWidth$1 = {
start: 0,
tag: 0,
type: 0,
name: 0,
};
const getWidth = (markers = exports.Markers) => (w, { tokens: t }) => ({
start: t.delimiter === markers.start ? t.start.length : w.start,
tag: Math.max(w.tag, t.tag.length),
type: Math.max(w.type, t.type.length),
name: Math.max(w.name, t.name.length),
});
const space = (len) => ''.padStart(len, ' ');
function align$1(markers = exports.Markers) {
let intoTags = false;
let w;
function update(line) {
const tokens = Object.assign({}, line.tokens);
if (tokens.tag !== '')
intoTags = true;
const isEmpty = tokens.tag === '' &&
tokens.name === '' &&
tokens.type === '' &&
tokens.description === '';
// dangling '*/'
if (tokens.end === markers.end && isEmpty) {
tokens.start = space(w.start + 1);
return Object.assign(Object.assign({}, line), { tokens });
}
switch (tokens.delimiter) {
case markers.start:
tokens.start = space(w.start);
break;
case markers.delim:
tokens.start = space(w.start + 1);
break;
default:
tokens.delimiter = '';
tokens.start = space(w.start + 2); // compensate delimiter
}
if (!intoTags) {
tokens.postDelimiter = tokens.description === '' ? '' : ' ';
return Object.assign(Object.assign({}, line), { tokens });
}
const nothingAfter = {
delim: false,
tag: false,
type: false,
name: false,
};
if (tokens.description === '') {
nothingAfter.name = true;
tokens.postName = '';
if (tokens.name === '') {
nothingAfter.type = true;
tokens.postType = '';
if (tokens.type === '') {
nothingAfter.tag = true;
tokens.postTag = '';
if (tokens.tag === '') {
nothingAfter.delim = true;
}
}
}
}
tokens.postDelimiter = nothingAfter.delim ? '' : ' ';
if (!nothingAfter.tag)
tokens.postTag = space(w.tag - tokens.tag.length + 1);
if (!nothingAfter.type)
tokens.postType = space(w.type - tokens.type.length + 1);
if (!nothingAfter.name)
tokens.postName = space(w.name - tokens.name.length + 1);
return Object.assign(Object.assign({}, line), { tokens });
}
return (_a) => {
var { source } = _a, fields = __rest$2(_a, ["source"]);
w = source.reduce(getWidth(markers), Object.assign({}, zeroWidth$1));
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
};
}
var __rest$1 = (window && window.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
const pull = (offset) => (str) => str.slice(offset);
const push = (offset) => {
const space = ''.padStart(offset, ' ');
return (str) => str + space;
};
function indent(pos) {
let shift;
const pad = (start) => {
if (shift === undefined) {
const offset = pos - start.length;
shift = offset > 0 ? push(offset) : pull(-offset);
}
return shift(start);
};
const update = (line) => (Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { start: pad(line.tokens.start) }) }));
return (_a) => {
var { source } = _a, fields = __rest$1(_a, ["source"]);
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
};
}
var __rest = (window && window.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
function crlf(ending) {
function update(line) {
return Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { lineEnd: ending === 'LF' ? '' : '\r' }) });
}
return (_a) => {
var { source } = _a, fields = __rest(_a, ["source"]);
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
};
}
function flow(...transforms) {
return (block) => transforms.reduce((block, t) => t(block), block);
}
const zeroWidth = {
line: 0,
start: 0,
delimiter: 0,
postDelimiter: 0,
tag: 0,
postTag: 0,
name: 0,
postName: 0,
type: 0,
postType: 0,
description: 0,
end: 0,
lineEnd: 0,
};
const headers = { lineEnd: 'CR' };
const fields = Object.keys(zeroWidth);
const repr = (x) => (isSpace(x) ? `{${x.length}}` : x);
const frame = (line) => '|' + line.join('|') + '|';
const align = (width, tokens) => Object.keys(tokens).map((k) => repr(tokens[k]).padEnd(width[k]));
function inspect({ source }) {
var _a, _b;
if (source.length === 0)
return '';
const width = Object.assign({}, zeroWidth);
for (const f of fields)
width[f] = ((_a = headers[f]) !== null && _a !== void 0 ? _a : f).length;
for (const { number, tokens } of source) {
width.line = Math.max(width.line, number.toString().length);
for (const k in tokens)
width[k] = Math.max(width[k], repr(tokens[k]).length);
}
const lines = [[], []];
for (const f of fields)
lines[0].push(((_b = headers[f]) !== null && _b !== void 0 ? _b : f).padEnd(width[f]));
for (const f of fields)
lines[1].push('-'.padEnd(width[f], '-'));
for (const { number, tokens } of source) {
const line = number.toString().padStart(width.line);
lines.push([line, ...align(width, tokens)]);
}
return lines.map(frame).join('\n');
}
function parse(source, options = {}) {
return getParser(options)(source);
}
const stringify = getStringifier();
const transforms = {
flow: flow,
align: align$1,
indent: indent,
crlf: crlf,
};
const tokenizers = {
tag: tagTokenizer,
type: typeTokenizer,
name: nameTokenizer,
description: descriptionTokenizer,
};
const util = { rewireSpecs, rewireSource, seedBlock, seedTokens };
exports.inspect = inspect;
exports.parse = parse;
exports.stringify = stringify;
exports.tokenizers = tokenizers;
exports.transforms = transforms;
exports.util = util;
Object.defineProperty(exports, '__esModule', { value: true });
return exports;
}({}));

32
node_modules/comment-parser/es6/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,32 @@
import { Options as ParserOptions } from './parser/index.js';
import descriptionTokenizer from './parser/tokenizers/description.js';
import nameTokenizer from './parser/tokenizers/name.js';
import tagTokenizer from './parser/tokenizers/tag.js';
import typeTokenizer from './parser/tokenizers/type.js';
import alignTransform from './transforms/align.js';
import indentTransform from './transforms/indent.js';
import crlfTransform from './transforms/crlf.js';
import { flow as flowTransform } from './transforms/index.js';
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util.js';
export * from './primitives.js';
export declare function parse(source: string, options?: Partial<ParserOptions>): import("./primitives.js").Block[];
export declare const stringify: import("./stringifier/index.js").Stringifier;
export { default as inspect } from './stringifier/inspect.js';
export declare const transforms: {
flow: typeof flowTransform;
align: typeof alignTransform;
indent: typeof indentTransform;
crlf: typeof crlfTransform;
};
export declare const tokenizers: {
tag: typeof tagTokenizer;
type: typeof typeTokenizer;
name: typeof nameTokenizer;
description: typeof descriptionTokenizer;
};
export declare const util: {
rewireSpecs: typeof rewireSpecs;
rewireSource: typeof rewireSource;
seedBlock: typeof seedBlock;
seedTokens: typeof seedTokens;
};

30
node_modules/comment-parser/es6/index.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
import getParser from './parser/index.js';
import descriptionTokenizer from './parser/tokenizers/description.js';
import nameTokenizer from './parser/tokenizers/name.js';
import tagTokenizer from './parser/tokenizers/tag.js';
import typeTokenizer from './parser/tokenizers/type.js';
import getStringifier from './stringifier/index.js';
import alignTransform from './transforms/align.js';
import indentTransform from './transforms/indent.js';
import crlfTransform from './transforms/crlf.js';
import { flow as flowTransform } from './transforms/index.js';
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util.js';
export * from './primitives.js';
export function parse(source, options = {}) {
return getParser(options)(source);
}
export const stringify = getStringifier();
export { default as inspect } from './stringifier/inspect.js';
export const transforms = {
flow: flowTransform,
align: alignTransform,
indent: indentTransform,
crlf: crlfTransform,
};
export const tokenizers = {
tag: tagTokenizer,
type: typeTokenizer,
name: nameTokenizer,
description: descriptionTokenizer,
};
export const util = { rewireSpecs, rewireSource, seedBlock, seedTokens };

View File

@@ -0,0 +1,24 @@
import { Line } from '../primitives.js';
/**
* Groups source lines in sections representing tags.
* First section is a block description if present. Last section captures lines starting with
* the last tag to the end of the block, including dangling closing marker.
* @param {Line[]} block souce lines making a single comment block
*/
export type Parser = (block: Line[]) => Line[][];
/**
* Predicate telling if string contains opening/closing escaping sequence
* @param {string} source raw source line
*/
export type Fencer = (source: string) => boolean;
/**
* `Parser` configuration options
*/
export interface Options {
fence: string | Fencer;
}
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
export default function getParser({ fence, }?: Partial<Options>): Parser;

29
node_modules/comment-parser/es6/parser/block-parser.js generated vendored Normal file
View File

@@ -0,0 +1,29 @@
const reTag = /^@\S+/;
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
export default function getParser({ fence = '```', } = {}) {
const fencer = getFencer(fence);
const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;
return function parseBlock(source) {
// start with description section
const sections = [[]];
let isFenced = false;
for (const line of source) {
if (reTag.test(line.tokens.description) && !isFenced) {
sections.push([line]);
}
else {
sections[sections.length - 1].push(line);
}
isFenced = toggleFence(line.tokens.description, isFenced);
}
return sections;
};
}
function getFencer(fence) {
if (typeof fence === 'string')
return (source) => source.split(fence).length % 2 === 0;
return fence;
}

11
node_modules/comment-parser/es6/parser/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,11 @@
import { Block, BlockMarkers } from '../primitives.js';
import { Tokenizer } from './tokenizers/index.js';
export interface Options {
startLine: number;
fence: string;
spacing: 'compact' | 'preserve';
markers: BlockMarkers;
tokenizers: Tokenizer[];
}
export type Parser = (source: string) => Block[];
export default function getParser({ startLine, fence, spacing, markers, tokenizers, }?: Partial<Options>): Parser;

39
node_modules/comment-parser/es6/parser/index.js generated vendored Normal file
View File

@@ -0,0 +1,39 @@
import { Markers } from '../primitives.js';
import { splitLines } from '../util.js';
import blockParser from './block-parser.js';
import sourceParser from './source-parser.js';
import specParser from './spec-parser.js';
import tokenizeTag from './tokenizers/tag.js';
import tokenizeType from './tokenizers/type.js';
import tokenizeName from './tokenizers/name.js';
import tokenizeDescription, { getJoiner as getDescriptionJoiner, } from './tokenizers/description.js';
export default function getParser({ startLine = 0, fence = '```', spacing = 'compact', markers = Markers, tokenizers = [
tokenizeTag(),
tokenizeType(spacing),
tokenizeName(),
tokenizeDescription(spacing),
], } = {}) {
if (startLine < 0 || startLine % 1 > 0)
throw new Error('Invalid startLine');
const parseSource = sourceParser({ startLine, markers });
const parseBlock = blockParser({ fence });
const parseSpec = specParser({ tokenizers });
const joinDescription = getDescriptionJoiner(spacing);
return function (source) {
const blocks = [];
for (const line of splitLines(source)) {
const lines = parseSource(line);
if (lines === null)
continue;
const sections = parseBlock(lines);
const specs = sections.slice(1).map(parseSpec);
blocks.push({
description: joinDescription(sections[0], markers),
tags: specs,
source: lines,
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),
});
}
return blocks;
};
}

View File

@@ -0,0 +1,7 @@
import { Line, BlockMarkers } from '../primitives.js';
export interface Options {
startLine: number;
markers: BlockMarkers;
}
export type Parser = (source: string) => Line[] | null;
export default function getParser({ startLine, markers, }?: Partial<Options>): Parser;

View File

@@ -0,0 +1,46 @@
import { Markers } from '../primitives.js';
import { seedTokens, splitSpace, splitCR } from '../util.js';
export default function getParser({ startLine = 0, markers = Markers, } = {}) {
let block = null;
let num = startLine;
return function parseSource(source) {
let rest = source;
const tokens = seedTokens();
[tokens.lineEnd, rest] = splitCR(rest);
[tokens.start, rest] = splitSpace(rest);
if (block === null &&
rest.startsWith(markers.start) &&
!rest.startsWith(markers.nostart)) {
block = [];
tokens.delimiter = rest.slice(0, markers.start.length);
rest = rest.slice(markers.start.length);
[tokens.postDelimiter, rest] = splitSpace(rest);
}
if (block === null) {
num++;
return null;
}
const isClosed = rest.trimRight().endsWith(markers.end);
if (tokens.delimiter === '' &&
rest.startsWith(markers.delim) &&
!rest.startsWith(markers.end)) {
tokens.delimiter = markers.delim;
rest = rest.slice(markers.delim.length);
[tokens.postDelimiter, rest] = splitSpace(rest);
}
if (isClosed) {
const trimmed = rest.trimRight();
tokens.end = rest.slice(trimmed.length - markers.end.length);
rest = trimmed.slice(0, -markers.end.length);
}
tokens.description = rest;
block.push({ number: num, source, tokens });
num++;
if (isClosed) {
const result = block.slice();
block = null;
return result;
}
return null;
};
}

View File

@@ -0,0 +1,7 @@
import { Line, Spec } from '../primitives.js';
import { Tokenizer } from './tokenizers/index.js';
export type Parser = (source: Line[]) => Spec;
export interface Options {
tokenizers: Tokenizer[];
}
export default function getParser({ tokenizers }: Options): Parser;

13
node_modules/comment-parser/es6/parser/spec-parser.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
import { seedSpec } from '../util.js';
export default function getParser({ tokenizers }) {
return function parseSpec(source) {
var _a;
let spec = seedSpec({ source });
for (const tokenize of tokenizers) {
spec = tokenize(spec);
if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical)
break;
}
return spec;
};
}

View File

@@ -0,0 +1,20 @@
import { Line, BlockMarkers, Markers } from '../../primitives.js';
import { Tokenizer } from './index.js';
/**
* Walks over provided lines joining description token into a single string.
* */
export type Joiner = (lines: Line[], markers?: BlockMarkers) => string;
/**
* Shortcut for standard Joiners
* compact - strip surrounding whitespace and concat lines using a single string
* preserve - preserves original whitespace and line breaks as is
*/
export type Spacing = 'compact' | 'preserve' | Joiner;
/**
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
* following given spacing srtategy
* @param {Spacing} spacing tells how to handle the whitespace
* @param {BlockMarkers} markers tells how to handle comment block delimitation
*/
export default function descriptionTokenizer(spacing?: Spacing, markers?: typeof Markers): Tokenizer;
export declare function getJoiner(spacing: Spacing): Joiner;

View File

@@ -0,0 +1,47 @@
import { Markers } from '../../primitives.js';
/**
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
* following given spacing srtategy
* @param {Spacing} spacing tells how to handle the whitespace
* @param {BlockMarkers} markers tells how to handle comment block delimitation
*/
export default function descriptionTokenizer(spacing = 'compact', markers = Markers) {
const join = getJoiner(spacing);
return (spec) => {
spec.description = join(spec.source, markers);
return spec;
};
}
export function getJoiner(spacing) {
if (spacing === 'compact')
return compactJoiner;
if (spacing === 'preserve')
return preserveJoiner;
return spacing;
}
function compactJoiner(lines, markers = Markers) {
return lines
.map(({ tokens: { description } }) => description.trim())
.filter((description) => description !== '')
.join(' ');
}
const lineNo = (num, { tokens }, i) => tokens.type === '' ? num : i;
const getDescription = ({ tokens }) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +
tokens.description;
function preserveJoiner(lines, markers = Markers) {
if (lines.length === 0)
return '';
// skip the opening line with no description
if (lines[0].tokens.description === '' &&
lines[0].tokens.delimiter === markers.start)
lines = lines.slice(1);
// skip the closing line with no description
const lastLine = lines[lines.length - 1];
if (lastLine !== undefined &&
lastLine.tokens.description === '' &&
lastLine.tokens.end.endsWith(markers.end))
lines = lines.slice(0, -1);
// description starts at the last line of type definition
lines = lines.slice(lines.reduce(lineNo, 0));
return lines.map(getDescription).join('\n');
}

View File

@@ -0,0 +1,7 @@
import { Spec } from '../../primitives.js';
/**
* Splits `spect.lines[].token.description` into other tokens,
* and populates the spec.{tag, name, type, description}. Invoked in a chaing
* with other tokens, operations listed above can be moved to separate tokenizers
*/
export type Tokenizer = (spec: Spec) => Spec;

View File

@@ -0,0 +1 @@
export {};

View File

@@ -0,0 +1,6 @@
import { Tokenizer } from './index.js';
/**
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
* and populates the `spec.name`
*/
export default function nameTokenizer(): Tokenizer;

View File

@@ -0,0 +1,91 @@
import { splitSpace, isSpace } from '../../util.js';
const isQuoted = (s) => s && s.startsWith('"') && s.endsWith('"');
/**
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
* and populates the `spec.name`
*/
export default function nameTokenizer() {
const typeEnd = (num, { tokens }, i) => tokens.type === '' ? num : i;
return (spec) => {
// look for the name in the line where {type} ends
const { tokens } = spec.source[spec.source.reduce(typeEnd, 0)];
const source = tokens.description.trimLeft();
const quotedGroups = source.split('"');
// if it starts with quoted group, assume it is a literal
if (quotedGroups.length > 1 &&
quotedGroups[0] === '' &&
quotedGroups.length % 2 === 1) {
spec.name = quotedGroups[1];
tokens.name = `"${quotedGroups[1]}"`;
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
return spec;
}
let brackets = 0;
let name = '';
let optional = false;
let defaultValue;
// assume name is non-space string or anything wrapped into brackets
for (const ch of source) {
if (brackets === 0 && isSpace(ch))
break;
if (ch === '[')
brackets++;
if (ch === ']')
brackets--;
name += ch;
}
if (brackets !== 0) {
spec.problems.push({
code: 'spec:name:unpaired-brackets',
message: 'unpaired brackets',
line: spec.source[0].number,
critical: true,
});
return spec;
}
const nameToken = name;
if (name[0] === '[' && name[name.length - 1] === ']') {
optional = true;
name = name.slice(1, -1);
const parts = name.split('=');
name = parts[0].trim();
if (parts[1] !== undefined)
defaultValue = parts.slice(1).join('=').trim();
if (name === '') {
spec.problems.push({
code: 'spec:name:empty-name',
message: 'empty name',
line: spec.source[0].number,
critical: true,
});
return spec;
}
if (defaultValue === '') {
spec.problems.push({
code: 'spec:name:empty-default',
message: 'empty default value',
line: spec.source[0].number,
critical: true,
});
return spec;
}
// has "=" and is not a string, except for "=>"
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
spec.problems.push({
code: 'spec:name:invalid-default',
message: 'invalid default value syntax',
line: spec.source[0].number,
critical: true,
});
return spec;
}
}
spec.optional = optional;
spec.name = name;
tokens.name = nameToken;
if (defaultValue !== undefined)
spec.default = defaultValue;
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
return spec;
};
}

View File

@@ -0,0 +1,6 @@
import { Tokenizer } from './index.js';
/**
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
* and populates `spec.tag`
*/
export default function tagTokenizer(): Tokenizer;

View File

@@ -0,0 +1,24 @@
/**
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
* and populates `spec.tag`
*/
export default function tagTokenizer() {
return (spec) => {
const { tokens } = spec.source[0];
const match = tokens.description.match(/\s*(@(\S+))(\s*)/);
if (match === null) {
spec.problems.push({
code: 'spec:tag:prefix',
message: 'tag should start with "@" symbol',
line: spec.source[0].number,
critical: true,
});
return spec;
}
tokens.tag = match[1];
tokens.postTag = match[3];
tokens.description = tokens.description.slice(match[0].length);
spec.tag = match[2];
return spec;
};
}

View File

@@ -0,0 +1,27 @@
import { Tokenizer } from './index.js';
/**
* Joiner is a function taking collected type token string parts,
* and joining them together. In most of the cases this will be
* a single piece like {type-name}, but type may go over multipe line
* ```
* @tag {function(
* number,
* string
* )}
* ```
*/
export type Joiner = (parts: string[]) => string;
/**
* Shortcut for standard Joiners
* compact - trim surrounding space, replace line breaks with a single space
* preserve - concat as is
*/
export type Spacing = 'compact' | 'preserve' | Joiner;
/**
* Sets splits remaining `Spec.lines[].tokes.description` into `type` and `description`
* tokens and populates Spec.type`
*
* @param {Spacing} spacing tells how to deal with a whitespace
* for type values going over multiple lines
*/
export default function typeTokenizer(spacing?: Spacing): Tokenizer;

View File

@@ -0,0 +1,65 @@
import { splitSpace } from '../../util.js';
/**
* Sets splits remaining `Spec.lines[].tokes.description` into `type` and `description`
* tokens and populates Spec.type`
*
* @param {Spacing} spacing tells how to deal with a whitespace
* for type values going over multiple lines
*/
export default function typeTokenizer(spacing = 'compact') {
const join = getJoiner(spacing);
return (spec) => {
let curlies = 0;
let lines = [];
for (const [i, { tokens }] of spec.source.entries()) {
let type = '';
if (i === 0 && tokens.description[0] !== '{')
return spec;
for (const ch of tokens.description) {
if (ch === '{')
curlies++;
if (ch === '}')
curlies--;
type += ch;
if (curlies === 0)
break;
}
lines.push([tokens, type]);
if (curlies === 0)
break;
}
if (curlies !== 0) {
spec.problems.push({
code: 'spec:type:unpaired-curlies',
message: 'unpaired curlies',
line: spec.source[0].number,
critical: true,
});
return spec;
}
const parts = [];
const offset = lines[0][0].postDelimiter.length;
for (const [i, [tokens, type]] of lines.entries()) {
tokens.type = type;
if (i > 0) {
tokens.type = tokens.postDelimiter.slice(offset) + type;
tokens.postDelimiter = tokens.postDelimiter.slice(0, offset);
}
[tokens.postType, tokens.description] = splitSpace(tokens.description.slice(type.length));
parts.push(tokens.type);
}
parts[0] = parts[0].slice(1);
parts[parts.length - 1] = parts[parts.length - 1].slice(0, -1);
spec.type = join(parts);
return spec;
};
}
const trim = (x) => x.trim();
function getJoiner(spacing) {
if (spacing === 'compact')
return (t) => t.map(trim).join('');
else if (spacing === 'preserve')
return (t) => t.join('\n');
else
return spacing;
}

54
node_modules/comment-parser/es6/primitives.d.ts generated vendored Normal file
View File

@@ -0,0 +1,54 @@
/** @deprecated */
export declare enum Markers {
start = "/**",
nostart = "/***",
delim = "*",
end = "*/"
}
export interface BlockMarkers {
start: string;
nostart: string;
delim: string;
end: string;
}
export interface Block {
description: string;
tags: Spec[];
source: Line[];
problems: Problem[];
}
export interface Spec {
tag: string;
name: string;
default?: string;
type: string;
optional: boolean;
description: string;
problems: Problem[];
source: Line[];
}
export interface Line {
number: number;
source: string;
tokens: Tokens;
}
export interface Tokens {
start: string;
delimiter: string;
postDelimiter: string;
tag: string;
postTag: string;
name: string;
postName: string;
type: string;
postType: string;
description: string;
end: string;
lineEnd: string;
}
export interface Problem {
code: 'unhandled' | 'custom' | 'source:startline' | 'spec:tag:prefix' | 'spec:type:unpaired-curlies' | 'spec:name:unpaired-brackets' | 'spec:name:empty-name' | 'spec:name:invalid-default' | 'spec:name:empty-default';
message: string;
line: number;
critical: boolean;
}

8
node_modules/comment-parser/es6/primitives.js generated vendored Normal file
View File

@@ -0,0 +1,8 @@
/** @deprecated */
export var Markers;
(function (Markers) {
Markers["start"] = "/**";
Markers["nostart"] = "/***";
Markers["delim"] = "*";
Markers["end"] = "*/";
})(Markers = Markers || (Markers = {}));

View File

@@ -0,0 +1,3 @@
import { Block } from '../primitives.js';
export type Stringifier = (block: Block) => string;
export default function getStringifier(): Stringifier;

17
node_modules/comment-parser/es6/stringifier/index.js generated vendored Normal file
View File

@@ -0,0 +1,17 @@
function join(tokens) {
return (tokens.start +
tokens.delimiter +
tokens.postDelimiter +
tokens.tag +
tokens.postTag +
tokens.type +
tokens.postType +
tokens.name +
tokens.postName +
tokens.description +
tokens.end +
tokens.lineEnd);
}
export default function getStringifier() {
return (block) => block.source.map(({ tokens }) => join(tokens)).join('\n');
}

View File

@@ -0,0 +1,2 @@
import { Block } from '../primitives.js';
export default function inspect({ source }: Block): string;

44
node_modules/comment-parser/es6/stringifier/inspect.js generated vendored Normal file
View File

@@ -0,0 +1,44 @@
import { isSpace } from '../util.js';
const zeroWidth = {
line: 0,
start: 0,
delimiter: 0,
postDelimiter: 0,
tag: 0,
postTag: 0,
name: 0,
postName: 0,
type: 0,
postType: 0,
description: 0,
end: 0,
lineEnd: 0,
};
const headers = { lineEnd: 'CR' };
const fields = Object.keys(zeroWidth);
const repr = (x) => (isSpace(x) ? `{${x.length}}` : x);
const frame = (line) => '|' + line.join('|') + '|';
const align = (width, tokens) => Object.keys(tokens).map((k) => repr(tokens[k]).padEnd(width[k]));
export default function inspect({ source }) {
var _a, _b;
if (source.length === 0)
return '';
const width = Object.assign({}, zeroWidth);
for (const f of fields)
width[f] = ((_a = headers[f]) !== null && _a !== void 0 ? _a : f).length;
for (const { number, tokens } of source) {
width.line = Math.max(width.line, number.toString().length);
for (const k in tokens)
width[k] = Math.max(width[k], repr(tokens[k]).length);
}
const lines = [[], []];
for (const f of fields)
lines[0].push(((_b = headers[f]) !== null && _b !== void 0 ? _b : f).padEnd(width[f]));
for (const f of fields)
lines[1].push('-'.padEnd(width[f], '-'));
for (const { number, tokens } of source) {
const line = number.toString().padStart(width.line);
lines.push([line, ...align(width, tokens)]);
}
return lines.map(frame).join('\n');
}

View File

@@ -0,0 +1,3 @@
import { Transform } from './index.js';
import { Markers } from '../primitives.js';
export default function align(markers?: typeof Markers): Transform;

93
node_modules/comment-parser/es6/transforms/align.js generated vendored Normal file
View File

@@ -0,0 +1,93 @@
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
import { Markers } from '../primitives.js';
import { rewireSource } from '../util.js';
const zeroWidth = {
start: 0,
tag: 0,
type: 0,
name: 0,
};
const getWidth = (markers = Markers) => (w, { tokens: t }) => ({
start: t.delimiter === markers.start ? t.start.length : w.start,
tag: Math.max(w.tag, t.tag.length),
type: Math.max(w.type, t.type.length),
name: Math.max(w.name, t.name.length),
});
const space = (len) => ''.padStart(len, ' ');
export default function align(markers = Markers) {
let intoTags = false;
let w;
function update(line) {
const tokens = Object.assign({}, line.tokens);
if (tokens.tag !== '')
intoTags = true;
const isEmpty = tokens.tag === '' &&
tokens.name === '' &&
tokens.type === '' &&
tokens.description === '';
// dangling '*/'
if (tokens.end === markers.end && isEmpty) {
tokens.start = space(w.start + 1);
return Object.assign(Object.assign({}, line), { tokens });
}
switch (tokens.delimiter) {
case markers.start:
tokens.start = space(w.start);
break;
case markers.delim:
tokens.start = space(w.start + 1);
break;
default:
tokens.delimiter = '';
tokens.start = space(w.start + 2); // compensate delimiter
}
if (!intoTags) {
tokens.postDelimiter = tokens.description === '' ? '' : ' ';
return Object.assign(Object.assign({}, line), { tokens });
}
const nothingAfter = {
delim: false,
tag: false,
type: false,
name: false,
};
if (tokens.description === '') {
nothingAfter.name = true;
tokens.postName = '';
if (tokens.name === '') {
nothingAfter.type = true;
tokens.postType = '';
if (tokens.type === '') {
nothingAfter.tag = true;
tokens.postTag = '';
if (tokens.tag === '') {
nothingAfter.delim = true;
}
}
}
}
tokens.postDelimiter = nothingAfter.delim ? '' : ' ';
if (!nothingAfter.tag)
tokens.postTag = space(w.tag - tokens.tag.length + 1);
if (!nothingAfter.type)
tokens.postType = space(w.type - tokens.type.length + 1);
if (!nothingAfter.name)
tokens.postName = space(w.name - tokens.name.length + 1);
return Object.assign(Object.assign({}, line), { tokens });
}
return (_a) => {
var { source } = _a, fields = __rest(_a, ["source"]);
w = source.reduce(getWidth(markers), Object.assign({}, zeroWidth));
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
};
}

3
node_modules/comment-parser/es6/transforms/crlf.d.ts generated vendored Normal file
View File

@@ -0,0 +1,3 @@
import { Transform } from './index.js';
export type Ending = 'LF' | 'CRLF';
export default function crlf(ending: Ending): Transform;

34
node_modules/comment-parser/es6/transforms/crlf.js generated vendored Normal file
View File

@@ -0,0 +1,34 @@
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
import { rewireSource } from '../util.js';
const order = [
'end',
'description',
'postType',
'type',
'postName',
'name',
'postTag',
'tag',
'postDelimiter',
'delimiter',
'start',
];
export default function crlf(ending) {
function update(line) {
return Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { lineEnd: ending === 'LF' ? '' : '\r' }) });
}
return (_a) => {
var { source } = _a, fields = __rest(_a, ["source"]);
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
};
}

View File

@@ -0,0 +1,2 @@
import { Transform } from './index.js';
export default function indent(pos: number): Transform;

32
node_modules/comment-parser/es6/transforms/indent.js generated vendored Normal file
View File

@@ -0,0 +1,32 @@
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
import { rewireSource } from '../util.js';
const pull = (offset) => (str) => str.slice(offset);
const push = (offset) => {
const space = ''.padStart(offset, ' ');
return (str) => str + space;
};
export default function indent(pos) {
let shift;
const pad = (start) => {
if (shift === undefined) {
const offset = pos - start.length;
shift = offset > 0 ? push(offset) : pull(-offset);
}
return shift(start);
};
const update = (line) => (Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { start: pad(line.tokens.start) }) }));
return (_a) => {
var { source } = _a, fields = __rest(_a, ["source"]);
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
};
}

View File

@@ -0,0 +1,3 @@
import { Block } from '../primitives.js';
export type Transform = (Block: Block) => Block;
export declare function flow(...transforms: Transform[]): Transform;

3
node_modules/comment-parser/es6/transforms/index.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
export function flow(...transforms) {
return (block) => transforms.reduce((block, t) => t(block), block);
}

21
node_modules/comment-parser/es6/util.d.ts generated vendored Normal file
View File

@@ -0,0 +1,21 @@
import { Block, Tokens, Spec } from './primitives.js';
export declare function isSpace(source: string): boolean;
export declare function hasCR(source: string): boolean;
export declare function splitCR(source: string): [string, string];
export declare function splitSpace(source: string): [string, string];
export declare function splitLines(source: string): string[];
export declare function seedBlock(block?: Partial<Block>): Block;
export declare function seedSpec(spec?: Partial<Spec>): Spec;
export declare function seedTokens(tokens?: Partial<Tokens>): Tokens;
/**
* Assures Block.tags[].source contains references to the Block.source items,
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
* @param block parsed coments block
*/
export declare function rewireSource(block: Block): Block;
/**
* Assures Block.source contains references to the Block.tags[].source items,
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
* @param block parsed coments block
*/
export declare function rewireSpecs(block: Block): Block;

52
node_modules/comment-parser/es6/util.js generated vendored Normal file
View File

@@ -0,0 +1,52 @@
export function isSpace(source) {
return /^\s+$/.test(source);
}
export function hasCR(source) {
return /\r$/.test(source);
}
export function splitCR(source) {
const matches = source.match(/\r+$/);
return matches == null
? ['', source]
: [source.slice(-matches[0].length), source.slice(0, -matches[0].length)];
}
export function splitSpace(source) {
const matches = source.match(/^\s+/);
return matches == null
? ['', source]
: [source.slice(0, matches[0].length), source.slice(matches[0].length)];
}
export function splitLines(source) {
return source.split(/\n/);
}
export function seedBlock(block = {}) {
return Object.assign({ description: '', tags: [], source: [], problems: [] }, block);
}
export function seedSpec(spec = {}) {
return Object.assign({ tag: '', name: '', type: '', optional: false, description: '', problems: [], source: [] }, spec);
}
export function seedTokens(tokens = {}) {
return Object.assign({ start: '', delimiter: '', postDelimiter: '', tag: '', postTag: '', name: '', postName: '', type: '', postType: '', description: '', end: '', lineEnd: '' }, tokens);
}
/**
* Assures Block.tags[].source contains references to the Block.source items,
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
* @param block parsed coments block
*/
export function rewireSource(block) {
const source = block.source.reduce((acc, line) => acc.set(line.number, line), new Map());
for (const spec of block.tags) {
spec.source = spec.source.map((line) => source.get(line.number));
}
return block;
}
/**
* Assures Block.source contains references to the Block.tags[].source items,
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
* @param block parsed coments block
*/
export function rewireSpecs(block) {
const source = block.tags.reduce((acc, spec) => spec.source.reduce((acc, line) => acc.set(line.number, line), acc), new Map());
block.source = block.source.map((line) => source.get(line.number) || line);
return block;
}

209
node_modules/comment-parser/jest.config.cjs generated vendored Normal file
View File

@@ -0,0 +1,209 @@
// For a detailed explanation regarding each configuration property, visit:
// https://jestjs.io/docs/en/configuration.html
const { compilerOptions: tsconfig } = JSON.parse(
require('fs').readFileSync('./tsconfig.node.json')
);
module.exports = {
globals: {
'ts-jest': {
tsconfig,
},
},
// All imported modules in your tests should be mocked automatically
// automock: false,
// Stop running tests after `n` failures
// bail: 0,
// The directory where Jest should store its cached dependency information
// cacheDirectory: "/private/var/folders/_g/g97k3tbx31x08qqy2z18kxq80000gn/T/jest_dx",
// Automatically clear mock calls and instances between every test
// clearMocks: false,
// Indicates whether the coverage information should be collected while executing the test
collectCoverage: true,
// An array of glob patterns indicating a set of files for which coverage information should be collected
// collectCoverageFrom: undefined,
// The directory where Jest should output its coverage files
// coverageDirectory: ".coverage",
// An array of regexp pattern strings used to skip coverage collection
coveragePathIgnorePatterns: ['/node_modules/', '/lib/', '/tests/'],
// Indicates which provider should be used to instrument code for coverage
coverageProvider: 'v8',
// A list of reporter names that Jest uses when writing coverage reports
// coverageReporters: [
// "json",
// "text",
// "lcov",
// "clover"
// ],
// An object that configures minimum threshold enforcement for coverage results
// coverageThreshold: {
// global : {
// branches: 85,
// functions: 85,
// lines: 85,
// statements: 85
// }
// },
// A path to a custom dependency extractor
// dependencyExtractor: undefined,
// Make calling deprecated APIs throw helpful error messages
// errorOnDeprecated: false,
// Force coverage collection from ignored files using an array of glob patterns
// forceCoverageMatch: [],
// A path to a module which exports an async function that is triggered once before all test suites
// globalSetup: undefined,
// A path to a module which exports an async function that is triggered once after all test suites
// globalTeardown: undefined,
// A set of global variables that need to be available in all test environments
// globals: {},
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
// maxWorkers: "50%",
// An array of directory names to be searched recursively up from the requiring module's location
// moduleDirectories: [
// "node_modules"
// ],
// An array of file extensions your modules use
// moduleFileExtensions: [
// "js",
// "json",
// "jsx",
// "ts",
// "tsx",
// "node"
// ],
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
moduleNameMapper: {
[/(.+)\.js$/.source]: ['$1.js', '$1.ts']
},
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
// modulePathIgnorePatterns: [],
// Activates notifications for test results
// notify: false,
// An enum that specifies notification mode. Requires { notify: true }
// notifyMode: "failure-change",
// A preset that is used as a base for Jest's configuration
preset: 'ts-jest',
// Run tests from one or more projects
// projects: undefined,
// Use this configuration option to add custom reporters to Jest
// reporters: undefined,
// Automatically reset mock state between every test
// resetMocks: false,
// Reset the module registry before running each individual test
// resetModules: false,
// A path to a custom resolver
// resolver: undefined,
// Automatically restore mock state between every test
// restoreMocks: false,
// The root directory that Jest should scan for tests and modules within
// rootDir: undefined,
// A list of paths to directories that Jest should use to search for files in
roots: ['<rootDir>/tests/'],
// Allows you to use a custom runner instead of Jest's default test runner
// runner: "jest-runner",
// The paths to modules that run some code to configure or set up the testing environment before each test
// setupFiles: [],
// A list of paths to modules that run some code to configure or set up the testing framework before each test
// setupFilesAfterEnv: [],
// The number of seconds after which a test is considered as slow and reported as such in the results.
// slowTestThreshold: 5,
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
// snapshotSerializers: [],
// The test environment that will be used for testing
testEnvironment: 'node',
// Options that will be passed to the testEnvironment
// testEnvironmentOptions: {},
// Adds a location field to test results
// testLocationInResults: false,
// The glob patterns Jest uses to detect test files
// testMatch: [
// "**/__tests__/**/*.[jt]s?(x)",
// "**/?(*.)+(spec|test).[tj]s?(x)"
// ],
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
// testPathIgnorePatterns: [
// "/node_modules/"
// ],
// The regexp pattern or array of patterns that Jest uses to detect test files
// testRegex: [],
// This option allows the use of a custom results processor
// testResultsProcessor: undefined,
// This option allows use of a custom test runner
// testRunner: "jasmine2",
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
// testURL: "http://localhost",
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
// timers: "real",
// A map from regular expressions to paths to transformers
transform: {
'^.+\\.ts$': 'ts-jest',
},
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
// transformIgnorePatterns: [
// "/node_modules/",
// "\\.pnp\\.[^\\/]+$"
// ],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined,
// Indicates whether each individual test should be reported during the run
// verbose: undefined,
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
// watchPathIgnorePatterns: [],
// Whether to use watchman for file crawling
// watchman: true,
};

88
node_modules/comment-parser/lib/index.cjs generated vendored Normal file
View File

@@ -0,0 +1,88 @@
"use strict";
var __createBinding = this && this.__createBinding || (Object.create ? function (o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = {
enumerable: true,
get: function () {
return m[k];
}
};
}
Object.defineProperty(o, k2, desc);
} : function (o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
});
var __exportStar = this && this.__exportStar || function (m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.util = exports.tokenizers = exports.transforms = exports.inspect = exports.stringify = exports.parse = void 0;
const index_js_1 = require("./parser/index.cjs");
const description_js_1 = require("./parser/tokenizers/description.cjs");
const name_js_1 = require("./parser/tokenizers/name.cjs");
const tag_js_1 = require("./parser/tokenizers/tag.cjs");
const type_js_1 = require("./parser/tokenizers/type.cjs");
const index_js_2 = require("./stringifier/index.cjs");
const align_js_1 = require("./transforms/align.cjs");
const indent_js_1 = require("./transforms/indent.cjs");
const crlf_js_1 = require("./transforms/crlf.cjs");
const index_js_3 = require("./transforms/index.cjs");
const util_js_1 = require("./util.cjs");
__exportStar(require("./primitives.cjs"), exports);
function parse(source, options = {}) {
return (0, index_js_1.default)(options)(source);
}
exports.parse = parse;
exports.stringify = (0, index_js_2.default)();
var inspect_js_1 = require("./stringifier/inspect.cjs");
Object.defineProperty(exports, "inspect", {
enumerable: true,
get: function () {
return inspect_js_1.default;
}
});
exports.transforms = {
flow: index_js_3.flow,
align: align_js_1.default,
indent: indent_js_1.default,
crlf: crlf_js_1.default
};
exports.tokenizers = {
tag: tag_js_1.default,
type: type_js_1.default,
name: name_js_1.default,
description: description_js_1.default
};
exports.util = {
rewireSpecs: util_js_1.rewireSpecs,
rewireSource: util_js_1.rewireSource,
seedBlock: util_js_1.seedBlock,
seedTokens: util_js_1.seedTokens
};
//# sourceMappingURL=index.cjs.map

1
node_modules/comment-parser/lib/index.cjs.map generated vendored Normal file

File diff suppressed because one or more lines are too long

32
node_modules/comment-parser/lib/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,32 @@
import { Options as ParserOptions } from './parser/index.js';
import descriptionTokenizer from './parser/tokenizers/description.js';
import nameTokenizer from './parser/tokenizers/name.js';
import tagTokenizer from './parser/tokenizers/tag.js';
import typeTokenizer from './parser/tokenizers/type.js';
import alignTransform from './transforms/align.js';
import indentTransform from './transforms/indent.js';
import crlfTransform from './transforms/crlf.js';
import { flow as flowTransform } from './transforms/index.js';
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util.js';
export * from './primitives.js';
export declare function parse(source: string, options?: Partial<ParserOptions>): import("./primitives.js").Block[];
export declare const stringify: import("./stringifier/index.js").Stringifier;
export { default as inspect } from './stringifier/inspect.js';
export declare const transforms: {
flow: typeof flowTransform;
align: typeof alignTransform;
indent: typeof indentTransform;
crlf: typeof crlfTransform;
};
export declare const tokenizers: {
tag: typeof tagTokenizer;
type: typeof typeTokenizer;
name: typeof nameTokenizer;
description: typeof descriptionTokenizer;
};
export declare const util: {
rewireSpecs: typeof rewireSpecs;
rewireSource: typeof rewireSource;
seedBlock: typeof seedBlock;
seedTokens: typeof seedTokens;
};

View File

@@ -0,0 +1,44 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
const reTag = /^@\S+/;
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
function getParser({
fence = '```'
} = {}) {
const fencer = getFencer(fence);
const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;
return function parseBlock(source) {
// start with description section
const sections = [[]];
let isFenced = false;
for (const line of source) {
if (reTag.test(line.tokens.description) && !isFenced) {
sections.push([line]);
} else {
sections[sections.length - 1].push(line);
}
isFenced = toggleFence(line.tokens.description, isFenced);
}
return sections;
};
}
exports.default = getParser;
function getFencer(fence) {
if (typeof fence === 'string') return source => source.split(fence).length % 2 === 0;
return fence;
}
//# sourceMappingURL=block-parser.cjs.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["block-parser.js"],"names":["Object","defineProperty","exports","value","reTag","getParser","fence","fencer","getFencer","toggleFence","source","isFenced","parseBlock","sections","line","test","tokens","description","push","length","default","split"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;AACA,MAAMC,KAAK,GAAG,OAAd;AACA;AACA;AACA;AACA;;AACA,SAASC,SAAT,CAAmB;AAAEC,EAAAA,KAAK,GAAG;AAAV,IAAqB,EAAxC,EAA4C;AACxC,QAAMC,MAAM,GAAGC,SAAS,CAACF,KAAD,CAAxB;;AACA,QAAMG,WAAW,GAAG,CAACC,MAAD,EAASC,QAAT,KAAsBJ,MAAM,CAACG,MAAD,CAAN,GAAiB,CAACC,QAAlB,GAA6BA,QAAvE;;AACA,SAAO,SAASC,UAAT,CAAoBF,MAApB,EAA4B;AAC/B;AACA,UAAMG,QAAQ,GAAG,CAAC,EAAD,CAAjB;AACA,QAAIF,QAAQ,GAAG,KAAf;;AACA,SAAK,MAAMG,IAAX,IAAmBJ,MAAnB,EAA2B;AACvB,UAAIN,KAAK,CAACW,IAAN,CAAWD,IAAI,CAACE,MAAL,CAAYC,WAAvB,KAAuC,CAACN,QAA5C,EAAsD;AAClDE,QAAAA,QAAQ,CAACK,IAAT,CAAc,CAACJ,IAAD,CAAd;AACH,OAFD,MAGK;AACDD,QAAAA,QAAQ,CAACA,QAAQ,CAACM,MAAT,GAAkB,CAAnB,CAAR,CAA8BD,IAA9B,CAAmCJ,IAAnC;AACH;;AACDH,MAAAA,QAAQ,GAAGF,WAAW,CAACK,IAAI,CAACE,MAAL,CAAYC,WAAb,EAA0BN,QAA1B,CAAtB;AACH;;AACD,WAAOE,QAAP;AACH,GAdD;AAeH;;AACDX,OAAO,CAACkB,OAAR,GAAkBf,SAAlB;;AACA,SAASG,SAAT,CAAmBF,KAAnB,EAA0B;AACtB,MAAI,OAAOA,KAAP,KAAiB,QAArB,EACI,OAAQI,MAAD,IAAYA,MAAM,CAACW,KAAP,CAAaf,KAAb,EAAoBa,MAApB,GAA6B,CAA7B,KAAmC,CAAtD;AACJ,SAAOb,KAAP;AACH","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst reTag = /^@\\S+/;\n/**\n * Creates configured `Parser`\n * @param {Partial<Options>} options\n */\nfunction getParser({ fence = '```', } = {}) {\n const fencer = getFencer(fence);\n const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;\n return function parseBlock(source) {\n // start with description section\n const sections = [[]];\n let isFenced = false;\n for (const line of source) {\n if (reTag.test(line.tokens.description) && !isFenced) {\n sections.push([line]);\n }\n else {\n sections[sections.length - 1].push(line);\n }\n isFenced = toggleFence(line.tokens.description, isFenced);\n }\n return sections;\n };\n}\nexports.default = getParser;\nfunction getFencer(fence) {\n if (typeof fence === 'string')\n return (source) => source.split(fence).length % 2 === 0;\n return fence;\n}\n"],"file":"block-parser.cjs"}

View File

@@ -0,0 +1,24 @@
import { Line } from '../primitives.js';
/**
* Groups source lines in sections representing tags.
* First section is a block description if present. Last section captures lines starting with
* the last tag to the end of the block, including dangling closing marker.
* @param {Line[]} block souce lines making a single comment block
*/
export type Parser = (block: Line[]) => Line[][];
/**
* Predicate telling if string contains opening/closing escaping sequence
* @param {string} source raw source line
*/
export type Fencer = (source: string) => boolean;
/**
* `Parser` configuration options
*/
export interface Options {
fence: string | Fencer;
}
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
export default function getParser({ fence, }?: Partial<Options>): Parser;

65
node_modules/comment-parser/lib/parser/index.cjs generated vendored Normal file
View File

@@ -0,0 +1,65 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
const primitives_js_1 = require("../primitives.cjs");
const util_js_1 = require("../util.cjs");
const block_parser_js_1 = require("./block-parser.cjs");
const source_parser_js_1 = require("./source-parser.cjs");
const spec_parser_js_1 = require("./spec-parser.cjs");
const tag_js_1 = require("./tokenizers/tag.cjs");
const type_js_1 = require("./tokenizers/type.cjs");
const name_js_1 = require("./tokenizers/name.cjs");
const description_js_1 = require("./tokenizers/description.cjs");
function getParser({
startLine = 0,
fence = '```',
spacing = 'compact',
markers = primitives_js_1.Markers,
tokenizers = [(0, tag_js_1.default)(), (0, type_js_1.default)(spacing), (0, name_js_1.default)(), (0, description_js_1.default)(spacing)]
} = {}) {
if (startLine < 0 || startLine % 1 > 0) throw new Error('Invalid startLine');
const parseSource = (0, source_parser_js_1.default)({
startLine,
markers
});
const parseBlock = (0, block_parser_js_1.default)({
fence
});
const parseSpec = (0, spec_parser_js_1.default)({
tokenizers
});
const joinDescription = (0, description_js_1.getJoiner)(spacing);
return function (source) {
const blocks = [];
for (const line of (0, util_js_1.splitLines)(source)) {
const lines = parseSource(line);
if (lines === null) continue;
const sections = parseBlock(lines);
const specs = sections.slice(1).map(parseSpec);
blocks.push({
description: joinDescription(sections[0], markers),
tags: specs,
source: lines,
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), [])
});
}
return blocks;
};
}
exports.default = getParser;
//# sourceMappingURL=index.cjs.map

1
node_modules/comment-parser/lib/parser/index.cjs.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"sources":["index.js"],"names":["Object","defineProperty","exports","value","primitives_js_1","require","util_js_1","block_parser_js_1","source_parser_js_1","spec_parser_js_1","tag_js_1","type_js_1","name_js_1","description_js_1","getParser","startLine","fence","spacing","markers","Markers","tokenizers","default","Error","parseSource","parseBlock","parseSpec","joinDescription","getJoiner","source","blocks","line","splitLines","lines","sections","specs","slice","map","push","description","tags","problems","reduce","acc","spec","concat"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;;AACA,MAAMC,eAAe,GAAGC,OAAH,qBAArB;;AACA,MAAMC,SAAS,GAAGD,OAAH,eAAf;;AACA,MAAME,iBAAiB,GAAGF,OAAH,sBAAvB;;AACA,MAAMG,kBAAkB,GAAGH,OAAH,uBAAxB;;AACA,MAAMI,gBAAgB,GAAGJ,OAAH,qBAAtB;;AACA,MAAMK,QAAQ,GAAGL,OAAH,wBAAd;;AACA,MAAMM,SAAS,GAAGN,OAAH,yBAAf;;AACA,MAAMO,SAAS,GAAGP,OAAH,yBAAf;;AACA,MAAMQ,gBAAgB,GAAGR,OAAH,gCAAtB;;AACA,SAASS,SAAT,CAAmB;AAAEC,EAAAA,SAAS,GAAG,CAAd;AAAiBC,EAAAA,KAAK,GAAG,KAAzB;AAAgCC,EAAAA,OAAO,GAAG,SAA1C;AAAqDC,EAAAA,OAAO,GAAGd,eAAe,CAACe,OAA/E;AAAwFC,EAAAA,UAAU,GAAG,CACpH,CAAC,GAAGV,QAAQ,CAACW,OAAb,GADoH,EAEpH,CAAC,GAAGV,SAAS,CAACU,OAAd,EAAuBJ,OAAvB,CAFoH,EAGpH,CAAC,GAAGL,SAAS,CAACS,OAAd,GAHoH,EAIpH,CAAC,GAAGR,gBAAgB,CAACQ,OAArB,EAA8BJ,OAA9B,CAJoH;AAArG,IAKZ,EALP,EAKW;AACP,MAAIF,SAAS,GAAG,CAAZ,IAAiBA,SAAS,GAAG,CAAZ,GAAgB,CAArC,EACI,MAAM,IAAIO,KAAJ,CAAU,mBAAV,CAAN;AACJ,QAAMC,WAAW,GAAG,CAAC,GAAGf,kBAAkB,CAACa,OAAvB,EAAgC;AAAEN,IAAAA,SAAF;AAAaG,IAAAA;AAAb,GAAhC,CAApB;AACA,QAAMM,UAAU,GAAG,CAAC,GAAGjB,iBAAiB,CAACc,OAAtB,EAA+B;AAAEL,IAAAA;AAAF,GAA/B,CAAnB;AACA,QAAMS,SAAS,GAAG,CAAC,GAAGhB,gBAAgB,CAACY,OAArB,EAA8B;AAAED,IAAAA;AAAF,GAA9B,CAAlB;AACA,QAAMM,eAAe,GAAG,CAAC,GAAGb,gBAAgB,CAACc,SAArB,EAAgCV,OAAhC,CAAxB;AACA,SAAO,UAAUW,MAAV,EAAkB;AACrB,UAAMC,MAAM,GAAG,EAAf;;AACA,SAAK,MAAMC,IAAX,IAAmB,CAAC,GAAGxB,SAAS,CAACyB,UAAd,EAA0BH,MAA1B,CAAnB,EAAsD;AAClD,YAAMI,KAAK,GAAGT,WAAW,CAACO,IAAD,CAAzB;AACA,UAAIE,KAAK,KAAK,IAAd,EACI;AACJ,YAAMC,QAAQ,GAAGT,UAAU,CAACQ,KAAD,CAA3B;AACA,YAAME,KAAK,GAAGD,QAAQ,CAACE,KAAT,CAAe,CAAf,EAAkBC,GAAlB,CAAsBX,SAAtB,CAAd;AACAI,MAAAA,MAAM,CAACQ,IAAP,CAAY;AACRC,QAAAA,WAAW,EAAEZ,eAAe,CAACO,QAAQ,CAAC,CAAD,CAAT,EAAcf,OAAd,CADpB;AAERqB,QAAAA,IAAI,EAAEL,KAFE;AAGRN,QAAAA,MAAM,EAAEI,KAHA;AAIRQ,QAAAA,QAAQ,EAAEN,KAAK,CAACO,MAAN,CAAa,CAACC,GAAD,EAAMC,IAAN,KAAeD,GAAG,CAACE,MAAJ,CAAWD,IAAI,CAACH,QAAhB,CAA5B,EAAuD,EAAvD;AAJF,OAAZ;AAMH;;AACD,WAAOX,MAAP;AACH,GAhBD;AAiBH;;AACD3B,OAAO,CAACmB,OAAR,GAAkBP,SAAlB","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst primitives_js_1 = require(\"../primitives.js\");\nconst util_js_1 = require(\"../util.js\");\nconst block_parser_js_1 = require(\"./block-parser.js\");\nconst source_parser_js_1 = require(\"./source-parser.js\");\nconst spec_parser_js_1 = require(\"./spec-parser.js\");\nconst tag_js_1 = require(\"./tokenizers/tag.js\");\nconst type_js_1 = require(\"./tokenizers/type.js\");\nconst name_js_1 = require(\"./tokenizers/name.js\");\nconst description_js_1 = require(\"./tokenizers/description.js\");\nfunction getParser({ startLine = 0, fence = '```', spacing = 'compact', markers = primitives_js_1.Markers, tokenizers = [\n (0, tag_js_1.default)(),\n (0, type_js_1.default)(spacing),\n (0, name_js_1.default)(),\n (0, description_js_1.default)(spacing),\n], } = {}) {\n if (startLine < 0 || startLine % 1 > 0)\n throw new Error('Invalid startLine');\n const parseSource = (0, source_parser_js_1.default)({ startLine, markers });\n const parseBlock = (0, block_parser_js_1.default)({ fence });\n const parseSpec = (0, spec_parser_js_1.default)({ tokenizers });\n const joinDescription = (0, description_js_1.getJoiner)(spacing);\n return function (source) {\n const blocks = [];\n for (const line of (0, util_js_1.splitLines)(source)) {\n const lines = parseSource(line);\n if (lines === null)\n continue;\n const sections = parseBlock(lines);\n const specs = sections.slice(1).map(parseSpec);\n blocks.push({\n description: joinDescription(sections[0], markers),\n tags: specs,\n source: lines,\n problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),\n });\n }\n return blocks;\n };\n}\nexports.default = getParser;\n"],"file":"index.cjs"}

11
node_modules/comment-parser/lib/parser/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,11 @@
import { Block, BlockMarkers } from '../primitives.js';
import { Tokenizer } from './tokenizers/index.js';
export interface Options {
startLine: number;
fence: string;
spacing: 'compact' | 'preserve';
markers: BlockMarkers;
tokenizers: Tokenizer[];
}
export type Parser = (source: string) => Block[];
export default function getParser({ startLine, fence, spacing, markers, tokenizers, }?: Partial<Options>): Parser;

View File

@@ -0,0 +1,68 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
const primitives_js_1 = require("../primitives.cjs");
const util_js_1 = require("../util.cjs");
function getParser({
startLine = 0,
markers = primitives_js_1.Markers
} = {}) {
let block = null;
let num = startLine;
return function parseSource(source) {
let rest = source;
const tokens = (0, util_js_1.seedTokens)();
[tokens.lineEnd, rest] = (0, util_js_1.splitCR)(rest);
[tokens.start, rest] = (0, util_js_1.splitSpace)(rest);
if (block === null && rest.startsWith(markers.start) && !rest.startsWith(markers.nostart)) {
block = [];
tokens.delimiter = rest.slice(0, markers.start.length);
rest = rest.slice(markers.start.length);
[tokens.postDelimiter, rest] = (0, util_js_1.splitSpace)(rest);
}
if (block === null) {
num++;
return null;
}
const isClosed = rest.trimRight().endsWith(markers.end);
if (tokens.delimiter === '' && rest.startsWith(markers.delim) && !rest.startsWith(markers.end)) {
tokens.delimiter = markers.delim;
rest = rest.slice(markers.delim.length);
[tokens.postDelimiter, rest] = (0, util_js_1.splitSpace)(rest);
}
if (isClosed) {
const trimmed = rest.trimRight();
tokens.end = rest.slice(trimmed.length - markers.end.length);
rest = trimmed.slice(0, -markers.end.length);
}
tokens.description = rest;
block.push({
number: num,
source,
tokens
});
num++;
if (isClosed) {
const result = block.slice();
block = null;
return result;
}
return null;
};
}
exports.default = getParser;
//# sourceMappingURL=source-parser.cjs.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["source-parser.js"],"names":["Object","defineProperty","exports","value","primitives_js_1","require","util_js_1","getParser","startLine","markers","Markers","block","num","parseSource","source","rest","tokens","seedTokens","lineEnd","splitCR","start","splitSpace","startsWith","nostart","delimiter","slice","length","postDelimiter","isClosed","trimRight","endsWith","end","delim","trimmed","description","push","number","result","default"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;;AACA,MAAMC,eAAe,GAAGC,OAAH,qBAArB;;AACA,MAAMC,SAAS,GAAGD,OAAH,eAAf;;AACA,SAASE,SAAT,CAAmB;AAAEC,EAAAA,SAAS,GAAG,CAAd;AAAiBC,EAAAA,OAAO,GAAGL,eAAe,CAACM;AAA3C,IAAwD,EAA3E,EAA+E;AAC3E,MAAIC,KAAK,GAAG,IAAZ;AACA,MAAIC,GAAG,GAAGJ,SAAV;AACA,SAAO,SAASK,WAAT,CAAqBC,MAArB,EAA6B;AAChC,QAAIC,IAAI,GAAGD,MAAX;AACA,UAAME,MAAM,GAAG,CAAC,GAAGV,SAAS,CAACW,UAAd,GAAf;AACA,KAACD,MAAM,CAACE,OAAR,EAAiBH,IAAjB,IAAyB,CAAC,GAAGT,SAAS,CAACa,OAAd,EAAuBJ,IAAvB,CAAzB;AACA,KAACC,MAAM,CAACI,KAAR,EAAeL,IAAf,IAAuB,CAAC,GAAGT,SAAS,CAACe,UAAd,EAA0BN,IAA1B,CAAvB;;AACA,QAAIJ,KAAK,KAAK,IAAV,IACAI,IAAI,CAACO,UAAL,CAAgBb,OAAO,CAACW,KAAxB,CADA,IAEA,CAACL,IAAI,CAACO,UAAL,CAAgBb,OAAO,CAACc,OAAxB,CAFL,EAEuC;AACnCZ,MAAAA,KAAK,GAAG,EAAR;AACAK,MAAAA,MAAM,CAACQ,SAAP,GAAmBT,IAAI,CAACU,KAAL,CAAW,CAAX,EAAchB,OAAO,CAACW,KAAR,CAAcM,MAA5B,CAAnB;AACAX,MAAAA,IAAI,GAAGA,IAAI,CAACU,KAAL,CAAWhB,OAAO,CAACW,KAAR,CAAcM,MAAzB,CAAP;AACA,OAACV,MAAM,CAACW,aAAR,EAAuBZ,IAAvB,IAA+B,CAAC,GAAGT,SAAS,CAACe,UAAd,EAA0BN,IAA1B,CAA/B;AACH;;AACD,QAAIJ,KAAK,KAAK,IAAd,EAAoB;AAChBC,MAAAA,GAAG;AACH,aAAO,IAAP;AACH;;AACD,UAAMgB,QAAQ,GAAGb,IAAI,CAACc,SAAL,GAAiBC,QAAjB,CAA0BrB,OAAO,CAACsB,GAAlC,CAAjB;;AACA,QAAIf,MAAM,CAACQ,SAAP,KAAqB,EAArB,IACAT,IAAI,CAACO,UAAL,CAAgBb,OAAO,CAACuB,KAAxB,CADA,IAEA,CAACjB,IAAI,CAACO,UAAL,CAAgBb,OAAO,CAACsB,GAAxB,CAFL,EAEmC;AAC/Bf,MAAAA,MAAM,CAACQ,SAAP,GAAmBf,OAAO,CAACuB,KAA3B;AACAjB,MAAAA,IAAI,GAAGA,IAAI,CAACU,KAAL,CAAWhB,OAAO,CAACuB,KAAR,CAAcN,MAAzB,CAAP;AACA,OAACV,MAAM,CAACW,aAAR,EAAuBZ,IAAvB,IAA+B,CAAC,GAAGT,SAAS,CAACe,UAAd,EAA0BN,IAA1B,CAA/B;AACH;;AACD,QAAIa,QAAJ,EAAc;AACV,YAAMK,OAAO,GAAGlB,IAAI,CAACc,SAAL,EAAhB;AACAb,MAAAA,MAAM,CAACe,GAAP,GAAahB,IAAI,CAACU,KAAL,CAAWQ,OAAO,CAACP,MAAR,GAAiBjB,OAAO,CAACsB,GAAR,CAAYL,MAAxC,CAAb;AACAX,MAAAA,IAAI,GAAGkB,OAAO,CAACR,KAAR,CAAc,CAAd,EAAiB,CAAChB,OAAO,CAACsB,GAAR,CAAYL,MAA9B,CAAP;AACH;;AACDV,IAAAA,MAAM,CAACkB,WAAP,GAAqBnB,IAArB;AACAJ,IAAAA,KAAK,CAACwB,IAAN,CAAW;AAAEC,MAAAA,MAAM,EAAExB,GAAV;AAAeE,MAAAA,MAAf;AAAuBE,MAAAA;AAAvB,KAAX;AACAJ,IAAAA,GAAG;;AACH,QAAIgB,QAAJ,EAAc;AACV,YAAMS,MAAM,GAAG1B,KAAK,CAACc,KAAN,EAAf;AACAd,MAAAA,KAAK,GAAG,IAAR;AACA,aAAO0B,MAAP;AACH;;AACD,WAAO,IAAP;AACH,GAvCD;AAwCH;;AACDnC,OAAO,CAACoC,OAAR,GAAkB/B,SAAlB","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst primitives_js_1 = require(\"../primitives.js\");\nconst util_js_1 = require(\"../util.js\");\nfunction getParser({ startLine = 0, markers = primitives_js_1.Markers, } = {}) {\n let block = null;\n let num = startLine;\n return function parseSource(source) {\n let rest = source;\n const tokens = (0, util_js_1.seedTokens)();\n [tokens.lineEnd, rest] = (0, util_js_1.splitCR)(rest);\n [tokens.start, rest] = (0, util_js_1.splitSpace)(rest);\n if (block === null &&\n rest.startsWith(markers.start) &&\n !rest.startsWith(markers.nostart)) {\n block = [];\n tokens.delimiter = rest.slice(0, markers.start.length);\n rest = rest.slice(markers.start.length);\n [tokens.postDelimiter, rest] = (0, util_js_1.splitSpace)(rest);\n }\n if (block === null) {\n num++;\n return null;\n }\n const isClosed = rest.trimRight().endsWith(markers.end);\n if (tokens.delimiter === '' &&\n rest.startsWith(markers.delim) &&\n !rest.startsWith(markers.end)) {\n tokens.delimiter = markers.delim;\n rest = rest.slice(markers.delim.length);\n [tokens.postDelimiter, rest] = (0, util_js_1.splitSpace)(rest);\n }\n if (isClosed) {\n const trimmed = rest.trimRight();\n tokens.end = rest.slice(trimmed.length - markers.end.length);\n rest = trimmed.slice(0, -markers.end.length);\n }\n tokens.description = rest;\n block.push({ number: num, source, tokens });\n num++;\n if (isClosed) {\n const result = block.slice();\n block = null;\n return result;\n }\n return null;\n };\n}\nexports.default = getParser;\n"],"file":"source-parser.cjs"}

View File

@@ -0,0 +1,7 @@
import { Line, BlockMarkers } from '../primitives.js';
export interface Options {
startLine: number;
markers: BlockMarkers;
}
export type Parser = (source: string) => Line[] | null;
export default function getParser({ startLine, markers, }?: Partial<Options>): Parser;

29
node_modules/comment-parser/lib/parser/spec-parser.cjs generated vendored Normal file
View File

@@ -0,0 +1,29 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
const util_js_1 = require("../util.cjs");
function getParser({
tokenizers
}) {
return function parseSpec(source) {
var _a;
let spec = (0, util_js_1.seedSpec)({
source
});
for (const tokenize of tokenizers) {
spec = tokenize(spec);
if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical) break;
}
return spec;
};
}
exports.default = getParser;
//# sourceMappingURL=spec-parser.cjs.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["spec-parser.js"],"names":["Object","defineProperty","exports","value","util_js_1","require","getParser","tokenizers","parseSpec","source","_a","spec","seedSpec","tokenize","problems","length","critical","default"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;;AACA,MAAMC,SAAS,GAAGC,OAAH,eAAf;;AACA,SAASC,SAAT,CAAmB;AAAEC,EAAAA;AAAF,CAAnB,EAAmC;AAC/B,SAAO,SAASC,SAAT,CAAmBC,MAAnB,EAA2B;AAC9B,QAAIC,EAAJ;;AACA,QAAIC,IAAI,GAAG,CAAC,GAAGP,SAAS,CAACQ,QAAd,EAAwB;AAAEH,MAAAA;AAAF,KAAxB,CAAX;;AACA,SAAK,MAAMI,QAAX,IAAuBN,UAAvB,EAAmC;AAC/BI,MAAAA,IAAI,GAAGE,QAAQ,CAACF,IAAD,CAAf;AACA,UAAI,CAACD,EAAE,GAAGC,IAAI,CAACG,QAAL,CAAcH,IAAI,CAACG,QAAL,CAAcC,MAAd,GAAuB,CAArC,CAAN,MAAmD,IAAnD,IAA2DL,EAAE,KAAK,KAAK,CAAvE,GAA2E,KAAK,CAAhF,GAAoFA,EAAE,CAACM,QAA3F,EACI;AACP;;AACD,WAAOL,IAAP;AACH,GATD;AAUH;;AACDT,OAAO,CAACe,OAAR,GAAkBX,SAAlB","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_js_1 = require(\"../util.js\");\nfunction getParser({ tokenizers }) {\n return function parseSpec(source) {\n var _a;\n let spec = (0, util_js_1.seedSpec)({ source });\n for (const tokenize of tokenizers) {\n spec = tokenize(spec);\n if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical)\n break;\n }\n return spec;\n };\n}\nexports.default = getParser;\n"],"file":"spec-parser.cjs"}

View File

@@ -0,0 +1,7 @@
import { Line, Spec } from '../primitives.js';
import { Tokenizer } from './tokenizers/index.js';
export type Parser = (source: Line[]) => Spec;
export interface Options {
tokenizers: Tokenizer[];
}
export default function getParser({ tokenizers }: Options): Parser;

View File

@@ -0,0 +1,62 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.getJoiner = void 0;
const primitives_js_1 = require("../../primitives.cjs");
/**
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
* following given spacing srtategy
* @param {Spacing} spacing tells how to handle the whitespace
* @param {BlockMarkers} markers tells how to handle comment block delimitation
*/
function descriptionTokenizer(spacing = 'compact', markers = primitives_js_1.Markers) {
const join = getJoiner(spacing);
return spec => {
spec.description = join(spec.source, markers);
return spec;
};
}
exports.default = descriptionTokenizer;
function getJoiner(spacing) {
if (spacing === 'compact') return compactJoiner;
if (spacing === 'preserve') return preserveJoiner;
return spacing;
}
exports.getJoiner = getJoiner;
function compactJoiner(lines, markers = primitives_js_1.Markers) {
return lines.map(({
tokens: {
description
}
}) => description.trim()).filter(description => description !== '').join(' ');
}
const lineNo = (num, {
tokens
}, i) => tokens.type === '' ? num : i;
const getDescription = ({
tokens
}) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) + tokens.description;
function preserveJoiner(lines, markers = primitives_js_1.Markers) {
if (lines.length === 0) return ''; // skip the opening line with no description
if (lines[0].tokens.description === '' && lines[0].tokens.delimiter === markers.start) lines = lines.slice(1); // skip the closing line with no description
const lastLine = lines[lines.length - 1];
if (lastLine !== undefined && lastLine.tokens.description === '' && lastLine.tokens.end.endsWith(markers.end)) lines = lines.slice(0, -1); // description starts at the last line of type definition
lines = lines.slice(lines.reduce(lineNo, 0));
return lines.map(getDescription).join('\n');
}
//# sourceMappingURL=description.cjs.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["description.js"],"names":["Object","defineProperty","exports","value","getJoiner","primitives_js_1","require","descriptionTokenizer","spacing","markers","Markers","join","spec","description","source","default","compactJoiner","preserveJoiner","lines","map","tokens","trim","filter","lineNo","num","i","type","getDescription","delimiter","start","postDelimiter","slice","length","lastLine","undefined","end","endsWith","reduce"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;AACAD,OAAO,CAACE,SAAR,GAAoB,KAAK,CAAzB;;AACA,MAAMC,eAAe,GAAGC,OAAH,wBAArB;AACA;AACA;AACA;AACA;AACA;AACA;;;AACA,SAASC,oBAAT,CAA8BC,OAAO,GAAG,SAAxC,EAAmDC,OAAO,GAAGJ,eAAe,CAACK,OAA7E,EAAsF;AAClF,QAAMC,IAAI,GAAGP,SAAS,CAACI,OAAD,CAAtB;AACA,SAAQI,IAAD,IAAU;AACbA,IAAAA,IAAI,CAACC,WAAL,GAAmBF,IAAI,CAACC,IAAI,CAACE,MAAN,EAAcL,OAAd,CAAvB;AACA,WAAOG,IAAP;AACH,GAHD;AAIH;;AACDV,OAAO,CAACa,OAAR,GAAkBR,oBAAlB;;AACA,SAASH,SAAT,CAAmBI,OAAnB,EAA4B;AACxB,MAAIA,OAAO,KAAK,SAAhB,EACI,OAAOQ,aAAP;AACJ,MAAIR,OAAO,KAAK,UAAhB,EACI,OAAOS,cAAP;AACJ,SAAOT,OAAP;AACH;;AACDN,OAAO,CAACE,SAAR,GAAoBA,SAApB;;AACA,SAASY,aAAT,CAAuBE,KAAvB,EAA8BT,OAAO,GAAGJ,eAAe,CAACK,OAAxD,EAAiE;AAC7D,SAAOQ,KAAK,CACPC,GADE,CACE,CAAC;AAAEC,IAAAA,MAAM,EAAE;AAAEP,MAAAA;AAAF;AAAV,GAAD,KAAiCA,WAAW,CAACQ,IAAZ,EADnC,EAEFC,MAFE,CAEMT,WAAD,IAAiBA,WAAW,KAAK,EAFtC,EAGFF,IAHE,CAGG,GAHH,CAAP;AAIH;;AACD,MAAMY,MAAM,GAAG,CAACC,GAAD,EAAM;AAAEJ,EAAAA;AAAF,CAAN,EAAkBK,CAAlB,KAAwBL,MAAM,CAACM,IAAP,KAAgB,EAAhB,GAAqBF,GAArB,GAA2BC,CAAlE;;AACA,MAAME,cAAc,GAAG,CAAC;AAAEP,EAAAA;AAAF,CAAD,KAAgB,CAACA,MAAM,CAACQ,SAAP,KAAqB,EAArB,GAA0BR,MAAM,CAACS,KAAjC,GAAyCT,MAAM,CAACU,aAAP,CAAqBC,KAArB,CAA2B,CAA3B,CAA1C,IACnCX,MAAM,CAACP,WADX;;AAEA,SAASI,cAAT,CAAwBC,KAAxB,EAA+BT,OAAO,GAAGJ,eAAe,CAACK,OAAzD,EAAkE;AAC9D,MAAIQ,KAAK,CAACc,MAAN,KAAiB,CAArB,EACI,OAAO,EAAP,CAF0D,CAG9D;;AACA,MAAId,KAAK,CAAC,CAAD,CAAL,CAASE,MAAT,CAAgBP,WAAhB,KAAgC,EAAhC,IACAK,KAAK,CAAC,CAAD,CAAL,CAASE,MAAT,CAAgBQ,SAAhB,KAA8BnB,OAAO,CAACoB,KAD1C,EAEIX,KAAK,GAAGA,KAAK,CAACa,KAAN,CAAY,CAAZ,CAAR,CAN0D,CAO9D;;AACA,QAAME,QAAQ,GAAGf,KAAK,CAACA,KAAK,CAACc,MAAN,GAAe,CAAhB,CAAtB;AACA,MAAIC,QAAQ,KAAKC,SAAb,IACAD,QAAQ,CAACb,MAAT,CAAgBP,WAAhB,KAAgC,EADhC,IAEAoB,QAAQ,CAACb,MAAT,CAAgBe,GAAhB,CAAoBC,QAApB,CAA6B3B,OAAO,CAAC0B,GAArC,CAFJ,EAGIjB,KAAK,GAAGA,KAAK,CAACa,KAAN,CAAY,CAAZ,EAAe,CAAC,CAAhB,CAAR,CAZ0D,CAa9D;;AACAb,EAAAA,KAAK,GAAGA,KAAK,CAACa,KAAN,CAAYb,KAAK,CAACmB,MAAN,CAAad,MAAb,EAAqB,CAArB,CAAZ,CAAR;AACA,SAAOL,KAAK,CAACC,GAAN,CAAUQ,cAAV,EAA0BhB,IAA1B,CAA+B,IAA/B,CAAP;AACH","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getJoiner = void 0;\nconst primitives_js_1 = require(\"../../primitives.js\");\n/**\n * Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`\n * following given spacing srtategy\n * @param {Spacing} spacing tells how to handle the whitespace\n * @param {BlockMarkers} markers tells how to handle comment block delimitation\n */\nfunction descriptionTokenizer(spacing = 'compact', markers = primitives_js_1.Markers) {\n const join = getJoiner(spacing);\n return (spec) => {\n spec.description = join(spec.source, markers);\n return spec;\n };\n}\nexports.default = descriptionTokenizer;\nfunction getJoiner(spacing) {\n if (spacing === 'compact')\n return compactJoiner;\n if (spacing === 'preserve')\n return preserveJoiner;\n return spacing;\n}\nexports.getJoiner = getJoiner;\nfunction compactJoiner(lines, markers = primitives_js_1.Markers) {\n return lines\n .map(({ tokens: { description } }) => description.trim())\n .filter((description) => description !== '')\n .join(' ');\n}\nconst lineNo = (num, { tokens }, i) => tokens.type === '' ? num : i;\nconst getDescription = ({ tokens }) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +\n tokens.description;\nfunction preserveJoiner(lines, markers = primitives_js_1.Markers) {\n if (lines.length === 0)\n return '';\n // skip the opening line with no description\n if (lines[0].tokens.description === '' &&\n lines[0].tokens.delimiter === markers.start)\n lines = lines.slice(1);\n // skip the closing line with no description\n const lastLine = lines[lines.length - 1];\n if (lastLine !== undefined &&\n lastLine.tokens.description === '' &&\n lastLine.tokens.end.endsWith(markers.end))\n lines = lines.slice(0, -1);\n // description starts at the last line of type definition\n lines = lines.slice(lines.reduce(lineNo, 0));\n return lines.map(getDescription).join('\\n');\n}\n"],"file":"description.cjs"}

View File

@@ -0,0 +1,20 @@
import { Line, BlockMarkers, Markers } from '../../primitives.js';
import { Tokenizer } from './index.js';
/**
* Walks over provided lines joining description token into a single string.
* */
export type Joiner = (lines: Line[], markers?: BlockMarkers) => string;
/**
* Shortcut for standard Joiners
* compact - strip surrounding whitespace and concat lines using a single string
* preserve - preserves original whitespace and line breaks as is
*/
export type Spacing = 'compact' | 'preserve' | Joiner;
/**
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
* following given spacing srtategy
* @param {Spacing} spacing tells how to handle the whitespace
* @param {BlockMarkers} markers tells how to handle comment block delimitation
*/
export default function descriptionTokenizer(spacing?: Spacing, markers?: typeof Markers): Tokenizer;
export declare function getJoiner(spacing: Spacing): Joiner;

View File

@@ -0,0 +1,6 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
//# sourceMappingURL=index.cjs.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["index.js"],"names":["Object","defineProperty","exports","value"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n"],"file":"index.cjs"}

View File

@@ -0,0 +1,7 @@
import { Spec } from '../../primitives.js';
/**
* Splits `spect.lines[].token.description` into other tokens,
* and populates the spec.{tag, name, type, description}. Invoked in a chaing
* with other tokens, operations listed above can be moved to separate tokenizers
*/
export type Tokenizer = (spec: Spec) => Spec;

View File

@@ -0,0 +1,109 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
const util_js_1 = require("../../util.cjs");
const isQuoted = s => s && s.startsWith('"') && s.endsWith('"');
/**
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
* and populates the `spec.name`
*/
function nameTokenizer() {
const typeEnd = (num, {
tokens
}, i) => tokens.type === '' ? num : i;
return spec => {
// look for the name in the line where {type} ends
const {
tokens
} = spec.source[spec.source.reduce(typeEnd, 0)];
const source = tokens.description.trimLeft();
const quotedGroups = source.split('"'); // if it starts with quoted group, assume it is a literal
if (quotedGroups.length > 1 && quotedGroups[0] === '' && quotedGroups.length % 2 === 1) {
spec.name = quotedGroups[1];
tokens.name = `"${quotedGroups[1]}"`;
[tokens.postName, tokens.description] = (0, util_js_1.splitSpace)(source.slice(tokens.name.length));
return spec;
}
let brackets = 0;
let name = '';
let optional = false;
let defaultValue; // assume name is non-space string or anything wrapped into brackets
for (const ch of source) {
if (brackets === 0 && (0, util_js_1.isSpace)(ch)) break;
if (ch === '[') brackets++;
if (ch === ']') brackets--;
name += ch;
}
if (brackets !== 0) {
spec.problems.push({
code: 'spec:name:unpaired-brackets',
message: 'unpaired brackets',
line: spec.source[0].number,
critical: true
});
return spec;
}
const nameToken = name;
if (name[0] === '[' && name[name.length - 1] === ']') {
optional = true;
name = name.slice(1, -1);
const parts = name.split('=');
name = parts[0].trim();
if (parts[1] !== undefined) defaultValue = parts.slice(1).join('=').trim();
if (name === '') {
spec.problems.push({
code: 'spec:name:empty-name',
message: 'empty name',
line: spec.source[0].number,
critical: true
});
return spec;
}
if (defaultValue === '') {
spec.problems.push({
code: 'spec:name:empty-default',
message: 'empty default value',
line: spec.source[0].number,
critical: true
});
return spec;
} // has "=" and is not a string, except for "=>"
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
spec.problems.push({
code: 'spec:name:invalid-default',
message: 'invalid default value syntax',
line: spec.source[0].number,
critical: true
});
return spec;
}
}
spec.optional = optional;
spec.name = name;
tokens.name = nameToken;
if (defaultValue !== undefined) spec.default = defaultValue;
[tokens.postName, tokens.description] = (0, util_js_1.splitSpace)(source.slice(tokens.name.length));
return spec;
};
}
exports.default = nameTokenizer;
//# sourceMappingURL=name.cjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,6 @@
import { Tokenizer } from './index.js';
/**
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
* and populates the `spec.name`
*/
export default function nameTokenizer(): Tokenizer;

View File

@@ -0,0 +1,37 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
/**
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
* and populates `spec.tag`
*/
function tagTokenizer() {
return spec => {
const {
tokens
} = spec.source[0];
const match = tokens.description.match(/\s*(@(\S+))(\s*)/);
if (match === null) {
spec.problems.push({
code: 'spec:tag:prefix',
message: 'tag should start with "@" symbol',
line: spec.source[0].number,
critical: true
});
return spec;
}
tokens.tag = match[1];
tokens.postTag = match[3];
tokens.description = tokens.description.slice(match[0].length);
spec.tag = match[2];
return spec;
};
}
exports.default = tagTokenizer;
//# sourceMappingURL=tag.cjs.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["tag.js"],"names":["Object","defineProperty","exports","value","tagTokenizer","spec","tokens","source","match","description","problems","push","code","message","line","number","critical","tag","postTag","slice","length","default"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;AACA;AACA;AACA;AACA;;AACA,SAASC,YAAT,GAAwB;AACpB,SAAQC,IAAD,IAAU;AACb,UAAM;AAAEC,MAAAA;AAAF,QAAaD,IAAI,CAACE,MAAL,CAAY,CAAZ,CAAnB;AACA,UAAMC,KAAK,GAAGF,MAAM,CAACG,WAAP,CAAmBD,KAAnB,CAAyB,kBAAzB,CAAd;;AACA,QAAIA,KAAK,KAAK,IAAd,EAAoB;AAChBH,MAAAA,IAAI,CAACK,QAAL,CAAcC,IAAd,CAAmB;AACfC,QAAAA,IAAI,EAAE,iBADS;AAEfC,QAAAA,OAAO,EAAE,kCAFM;AAGfC,QAAAA,IAAI,EAAET,IAAI,CAACE,MAAL,CAAY,CAAZ,EAAeQ,MAHN;AAIfC,QAAAA,QAAQ,EAAE;AAJK,OAAnB;AAMA,aAAOX,IAAP;AACH;;AACDC,IAAAA,MAAM,CAACW,GAAP,GAAaT,KAAK,CAAC,CAAD,CAAlB;AACAF,IAAAA,MAAM,CAACY,OAAP,GAAiBV,KAAK,CAAC,CAAD,CAAtB;AACAF,IAAAA,MAAM,CAACG,WAAP,GAAqBH,MAAM,CAACG,WAAP,CAAmBU,KAAnB,CAAyBX,KAAK,CAAC,CAAD,CAAL,CAASY,MAAlC,CAArB;AACAf,IAAAA,IAAI,CAACY,GAAL,GAAWT,KAAK,CAAC,CAAD,CAAhB;AACA,WAAOH,IAAP;AACH,GAjBD;AAkBH;;AACDH,OAAO,CAACmB,OAAR,GAAkBjB,YAAlB","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,\n * and populates `spec.tag`\n */\nfunction tagTokenizer() {\n return (spec) => {\n const { tokens } = spec.source[0];\n const match = tokens.description.match(/\\s*(@(\\S+))(\\s*)/);\n if (match === null) {\n spec.problems.push({\n code: 'spec:tag:prefix',\n message: 'tag should start with \"@\" symbol',\n line: spec.source[0].number,\n critical: true,\n });\n return spec;\n }\n tokens.tag = match[1];\n tokens.postTag = match[3];\n tokens.description = tokens.description.slice(match[0].length);\n spec.tag = match[2];\n return spec;\n };\n}\nexports.default = tagTokenizer;\n"],"file":"tag.cjs"}

View File

@@ -0,0 +1,6 @@
import { Tokenizer } from './index.js';
/**
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
* and populates `spec.tag`
*/
export default function tagTokenizer(): Tokenizer;

View File

@@ -0,0 +1,79 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
const util_js_1 = require("../../util.cjs");
/**
* Sets splits remaining `Spec.lines[].tokes.description` into `type` and `description`
* tokens and populates Spec.type`
*
* @param {Spacing} spacing tells how to deal with a whitespace
* for type values going over multiple lines
*/
function typeTokenizer(spacing = 'compact') {
const join = getJoiner(spacing);
return spec => {
let curlies = 0;
let lines = [];
for (const [i, {
tokens
}] of spec.source.entries()) {
let type = '';
if (i === 0 && tokens.description[0] !== '{') return spec;
for (const ch of tokens.description) {
if (ch === '{') curlies++;
if (ch === '}') curlies--;
type += ch;
if (curlies === 0) break;
}
lines.push([tokens, type]);
if (curlies === 0) break;
}
if (curlies !== 0) {
spec.problems.push({
code: 'spec:type:unpaired-curlies',
message: 'unpaired curlies',
line: spec.source[0].number,
critical: true
});
return spec;
}
const parts = [];
const offset = lines[0][0].postDelimiter.length;
for (const [i, [tokens, type]] of lines.entries()) {
tokens.type = type;
if (i > 0) {
tokens.type = tokens.postDelimiter.slice(offset) + type;
tokens.postDelimiter = tokens.postDelimiter.slice(0, offset);
}
[tokens.postType, tokens.description] = (0, util_js_1.splitSpace)(tokens.description.slice(type.length));
parts.push(tokens.type);
}
parts[0] = parts[0].slice(1);
parts[parts.length - 1] = parts[parts.length - 1].slice(0, -1);
spec.type = join(parts);
return spec;
};
}
exports.default = typeTokenizer;
const trim = x => x.trim();
function getJoiner(spacing) {
if (spacing === 'compact') return t => t.map(trim).join('');else if (spacing === 'preserve') return t => t.join('\n');else return spacing;
}
//# sourceMappingURL=type.cjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,27 @@
import { Tokenizer } from './index.js';
/**
* Joiner is a function taking collected type token string parts,
* and joining them together. In most of the cases this will be
* a single piece like {type-name}, but type may go over multipe line
* ```
* @tag {function(
* number,
* string
* )}
* ```
*/
export type Joiner = (parts: string[]) => string;
/**
* Shortcut for standard Joiners
* compact - trim surrounding space, replace line breaks with a single space
* preserve - concat as is
*/
export type Spacing = 'compact' | 'preserve' | Joiner;
/**
* Sets splits remaining `Spec.lines[].tokes.description` into `type` and `description`
* tokens and populates Spec.type`
*
* @param {Spacing} spacing tells how to deal with a whitespace
* for type values going over multiple lines
*/
export default function typeTokenizer(spacing?: Spacing): Tokenizer;

17
node_modules/comment-parser/lib/primitives.cjs generated vendored Normal file
View File

@@ -0,0 +1,17 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.Markers = void 0;
/** @deprecated */
var Markers;
(function (Markers) {
Markers["start"] = "/**";
Markers["nostart"] = "/***";
Markers["delim"] = "*";
Markers["end"] = "*/";
})(Markers = exports.Markers || (exports.Markers = {}));
//# sourceMappingURL=primitives.cjs.map

1
node_modules/comment-parser/lib/primitives.cjs.map generated vendored Normal file
View File

@@ -0,0 +1 @@
{"version":3,"sources":["primitives.js"],"names":["Object","defineProperty","exports","value","Markers"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;AACAD,OAAO,CAACE,OAAR,GAAkB,KAAK,CAAvB;AACA;;AACA,IAAIA,OAAJ;;AACA,CAAC,UAAUA,OAAV,EAAmB;AAChBA,EAAAA,OAAO,CAAC,OAAD,CAAP,GAAmB,KAAnB;AACAA,EAAAA,OAAO,CAAC,SAAD,CAAP,GAAqB,MAArB;AACAA,EAAAA,OAAO,CAAC,OAAD,CAAP,GAAmB,GAAnB;AACAA,EAAAA,OAAO,CAAC,KAAD,CAAP,GAAiB,IAAjB;AACH,CALD,EAKGA,OAAO,GAAGF,OAAO,CAACE,OAAR,KAAoBF,OAAO,CAACE,OAAR,GAAkB,EAAtC,CALb","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Markers = void 0;\n/** @deprecated */\nvar Markers;\n(function (Markers) {\n Markers[\"start\"] = \"/**\";\n Markers[\"nostart\"] = \"/***\";\n Markers[\"delim\"] = \"*\";\n Markers[\"end\"] = \"*/\";\n})(Markers = exports.Markers || (exports.Markers = {}));\n"],"file":"primitives.cjs"}

54
node_modules/comment-parser/lib/primitives.d.ts generated vendored Normal file
View File

@@ -0,0 +1,54 @@
/** @deprecated */
export declare enum Markers {
start = "/**",
nostart = "/***",
delim = "*",
end = "*/"
}
export interface BlockMarkers {
start: string;
nostart: string;
delim: string;
end: string;
}
export interface Block {
description: string;
tags: Spec[];
source: Line[];
problems: Problem[];
}
export interface Spec {
tag: string;
name: string;
default?: string;
type: string;
optional: boolean;
description: string;
problems: Problem[];
source: Line[];
}
export interface Line {
number: number;
source: string;
tokens: Tokens;
}
export interface Tokens {
start: string;
delimiter: string;
postDelimiter: string;
tag: string;
postTag: string;
name: string;
postName: string;
type: string;
postType: string;
description: string;
end: string;
lineEnd: string;
}
export interface Problem {
code: 'unhandled' | 'custom' | 'source:startline' | 'spec:tag:prefix' | 'spec:type:unpaired-curlies' | 'spec:name:unpaired-brackets' | 'spec:name:empty-name' | 'spec:name:invalid-default' | 'spec:name:empty-default';
message: string;
line: number;
critical: boolean;
}

18
node_modules/comment-parser/lib/stringifier/index.cjs generated vendored Normal file
View File

@@ -0,0 +1,18 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
function join(tokens) {
return tokens.start + tokens.delimiter + tokens.postDelimiter + tokens.tag + tokens.postTag + tokens.type + tokens.postType + tokens.name + tokens.postName + tokens.description + tokens.end + tokens.lineEnd;
}
function getStringifier() {
return block => block.source.map(({
tokens
}) => join(tokens)).join('\n');
}
exports.default = getStringifier;
//# sourceMappingURL=index.cjs.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["index.js"],"names":["Object","defineProperty","exports","value","join","tokens","start","delimiter","postDelimiter","tag","postTag","type","postType","name","postName","description","end","lineEnd","getStringifier","block","source","map","default"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;;AACA,SAASC,IAAT,CAAcC,MAAd,EAAsB;AAClB,SAAQA,MAAM,CAACC,KAAP,GACJD,MAAM,CAACE,SADH,GAEJF,MAAM,CAACG,aAFH,GAGJH,MAAM,CAACI,GAHH,GAIJJ,MAAM,CAACK,OAJH,GAKJL,MAAM,CAACM,IALH,GAMJN,MAAM,CAACO,QANH,GAOJP,MAAM,CAACQ,IAPH,GAQJR,MAAM,CAACS,QARH,GASJT,MAAM,CAACU,WATH,GAUJV,MAAM,CAACW,GAVH,GAWJX,MAAM,CAACY,OAXX;AAYH;;AACD,SAASC,cAAT,GAA0B;AACtB,SAAQC,KAAD,IAAWA,KAAK,CAACC,MAAN,CAAaC,GAAb,CAAiB,CAAC;AAAEhB,IAAAA;AAAF,GAAD,KAAgBD,IAAI,CAACC,MAAD,CAArC,EAA+CD,IAA/C,CAAoD,IAApD,CAAlB;AACH;;AACDF,OAAO,CAACoB,OAAR,GAAkBJ,cAAlB","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction join(tokens) {\n return (tokens.start +\n tokens.delimiter +\n tokens.postDelimiter +\n tokens.tag +\n tokens.postTag +\n tokens.type +\n tokens.postType +\n tokens.name +\n tokens.postName +\n tokens.description +\n tokens.end +\n tokens.lineEnd);\n}\nfunction getStringifier() {\n return (block) => block.source.map(({ tokens }) => join(tokens)).join('\\n');\n}\nexports.default = getStringifier;\n"],"file":"index.cjs"}

View File

@@ -0,0 +1,3 @@
import { Block } from '../primitives.js';
export type Stringifier = (block: Block) => string;
export default function getStringifier(): Stringifier;

View File

@@ -0,0 +1,72 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
const util_js_1 = require("../util.cjs");
const zeroWidth = {
line: 0,
start: 0,
delimiter: 0,
postDelimiter: 0,
tag: 0,
postTag: 0,
name: 0,
postName: 0,
type: 0,
postType: 0,
description: 0,
end: 0,
lineEnd: 0
};
const headers = {
lineEnd: 'CR'
};
const fields = Object.keys(zeroWidth);
const repr = x => (0, util_js_1.isSpace)(x) ? `{${x.length}}` : x;
const frame = line => '|' + line.join('|') + '|';
const align = (width, tokens) => Object.keys(tokens).map(k => repr(tokens[k]).padEnd(width[k]));
function inspect({
source
}) {
var _a, _b;
if (source.length === 0) return '';
const width = Object.assign({}, zeroWidth);
for (const f of fields) width[f] = ((_a = headers[f]) !== null && _a !== void 0 ? _a : f).length;
for (const {
number,
tokens
} of source) {
width.line = Math.max(width.line, number.toString().length);
for (const k in tokens) width[k] = Math.max(width[k], repr(tokens[k]).length);
}
const lines = [[], []];
for (const f of fields) lines[0].push(((_b = headers[f]) !== null && _b !== void 0 ? _b : f).padEnd(width[f]));
for (const f of fields) lines[1].push('-'.padEnd(width[f], '-'));
for (const {
number,
tokens
} of source) {
const line = number.toString().padStart(width.line);
lines.push([line, ...align(width, tokens)]);
}
return lines.map(frame).join('\n');
}
exports.default = inspect;
//# sourceMappingURL=inspect.cjs.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["inspect.js"],"names":["Object","defineProperty","exports","value","util_js_1","require","zeroWidth","line","start","delimiter","postDelimiter","tag","postTag","name","postName","type","postType","description","end","lineEnd","headers","fields","keys","repr","x","isSpace","length","frame","join","align","width","tokens","map","k","padEnd","inspect","source","_a","_b","assign","f","number","Math","max","toString","lines","push","padStart","default"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;;AACA,MAAMC,SAAS,GAAGC,OAAH,eAAf;;AACA,MAAMC,SAAS,GAAG;AACdC,EAAAA,IAAI,EAAE,CADQ;AAEdC,EAAAA,KAAK,EAAE,CAFO;AAGdC,EAAAA,SAAS,EAAE,CAHG;AAIdC,EAAAA,aAAa,EAAE,CAJD;AAKdC,EAAAA,GAAG,EAAE,CALS;AAMdC,EAAAA,OAAO,EAAE,CANK;AAOdC,EAAAA,IAAI,EAAE,CAPQ;AAQdC,EAAAA,QAAQ,EAAE,CARI;AASdC,EAAAA,IAAI,EAAE,CATQ;AAUdC,EAAAA,QAAQ,EAAE,CAVI;AAWdC,EAAAA,WAAW,EAAE,CAXC;AAYdC,EAAAA,GAAG,EAAE,CAZS;AAadC,EAAAA,OAAO,EAAE;AAbK,CAAlB;AAeA,MAAMC,OAAO,GAAG;AAAED,EAAAA,OAAO,EAAE;AAAX,CAAhB;AACA,MAAME,MAAM,GAAGrB,MAAM,CAACsB,IAAP,CAAYhB,SAAZ,CAAf;;AACA,MAAMiB,IAAI,GAAIC,CAAD,IAAQ,CAAC,GAAGpB,SAAS,CAACqB,OAAd,EAAuBD,CAAvB,IAA6B,IAAGA,CAAC,CAACE,MAAO,GAAzC,GAA8CF,CAAnE;;AACA,MAAMG,KAAK,GAAIpB,IAAD,IAAU,MAAMA,IAAI,CAACqB,IAAL,CAAU,GAAV,CAAN,GAAuB,GAA/C;;AACA,MAAMC,KAAK,GAAG,CAACC,KAAD,EAAQC,MAAR,KAAmB/B,MAAM,CAACsB,IAAP,CAAYS,MAAZ,EAAoBC,GAApB,CAAyBC,CAAD,IAAOV,IAAI,CAACQ,MAAM,CAACE,CAAD,CAAP,CAAJ,CAAgBC,MAAhB,CAAuBJ,KAAK,CAACG,CAAD,CAA5B,CAA/B,CAAjC;;AACA,SAASE,OAAT,CAAiB;AAAEC,EAAAA;AAAF,CAAjB,EAA6B;AACzB,MAAIC,EAAJ,EAAQC,EAAR;;AACA,MAAIF,MAAM,CAACV,MAAP,KAAkB,CAAtB,EACI,OAAO,EAAP;AACJ,QAAMI,KAAK,GAAG9B,MAAM,CAACuC,MAAP,CAAc,EAAd,EAAkBjC,SAAlB,CAAd;;AACA,OAAK,MAAMkC,CAAX,IAAgBnB,MAAhB,EACIS,KAAK,CAACU,CAAD,CAAL,GAAW,CAAC,CAACH,EAAE,GAAGjB,OAAO,CAACoB,CAAD,CAAb,MAAsB,IAAtB,IAA8BH,EAAE,KAAK,KAAK,CAA1C,GAA8CA,EAA9C,GAAmDG,CAApD,EAAuDd,MAAlE;;AACJ,OAAK,MAAM;AAAEe,IAAAA,MAAF;AAAUV,IAAAA;AAAV,GAAX,IAAiCK,MAAjC,EAAyC;AACrCN,IAAAA,KAAK,CAACvB,IAAN,GAAamC,IAAI,CAACC,GAAL,CAASb,KAAK,CAACvB,IAAf,EAAqBkC,MAAM,CAACG,QAAP,GAAkBlB,MAAvC,CAAb;;AACA,SAAK,MAAMO,CAAX,IAAgBF,MAAhB,EACID,KAAK,CAACG,CAAD,CAAL,GAAWS,IAAI,CAACC,GAAL,CAASb,KAAK,CAACG,CAAD,CAAd,EAAmBV,IAAI,CAACQ,MAAM,CAACE,CAAD,CAAP,CAAJ,CAAgBP,MAAnC,CAAX;AACP;;AACD,QAAMmB,KAAK,GAAG,CAAC,EAAD,EAAK,EAAL,CAAd;;AACA,OAAK,MAAML,CAAX,IAAgBnB,MAAhB,EACIwB,KAAK,CAAC,CAAD,CAAL,CAASC,IAAT,CAAc,CAAC,CAACR,EAAE,GAAGlB,OAAO,CAACoB,CAAD,CAAb,MAAsB,IAAtB,IAA8BF,EAAE,KAAK,KAAK,CAA1C,GAA8CA,EAA9C,GAAmDE,CAApD,EAAuDN,MAAvD,CAA8DJ,KAAK,CAACU,CAAD,CAAnE,CAAd;;AACJ,OAAK,MAAMA,CAAX,IAAgBnB,MAAhB,EACIwB,KAAK,CAAC,CAAD,CAAL,CAASC,IAAT,CAAc,IAAIZ,MAAJ,CAAWJ,KAAK,CAACU,CAAD,CAAhB,EAAqB,GAArB,CAAd;;AACJ,OAAK,MAAM;AAAEC,IAAAA,MAAF;AAAUV,IAAAA;AAAV,GAAX,IAAiCK,MAAjC,EAAyC;AACrC,UAAM7B,IAAI,GAAGkC,MAAM,CAACG,QAAP,GAAkBG,QAAlB,CAA2BjB,KAAK,CAACvB,IAAjC,CAAb;AACAsC,IAAAA,KAAK,CAACC,IAAN,CAAW,CAACvC,IAAD,EAAO,GAAGsB,KAAK,CAACC,KAAD,EAAQC,MAAR,CAAf,CAAX;AACH;;AACD,SAAOc,KAAK,CAACb,GAAN,CAAUL,KAAV,EAAiBC,IAAjB,CAAsB,IAAtB,CAAP;AACH;;AACD1B,OAAO,CAAC8C,OAAR,GAAkBb,OAAlB","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_js_1 = require(\"../util.js\");\nconst zeroWidth = {\n line: 0,\n start: 0,\n delimiter: 0,\n postDelimiter: 0,\n tag: 0,\n postTag: 0,\n name: 0,\n postName: 0,\n type: 0,\n postType: 0,\n description: 0,\n end: 0,\n lineEnd: 0,\n};\nconst headers = { lineEnd: 'CR' };\nconst fields = Object.keys(zeroWidth);\nconst repr = (x) => ((0, util_js_1.isSpace)(x) ? `{${x.length}}` : x);\nconst frame = (line) => '|' + line.join('|') + '|';\nconst align = (width, tokens) => Object.keys(tokens).map((k) => repr(tokens[k]).padEnd(width[k]));\nfunction inspect({ source }) {\n var _a, _b;\n if (source.length === 0)\n return '';\n const width = Object.assign({}, zeroWidth);\n for (const f of fields)\n width[f] = ((_a = headers[f]) !== null && _a !== void 0 ? _a : f).length;\n for (const { number, tokens } of source) {\n width.line = Math.max(width.line, number.toString().length);\n for (const k in tokens)\n width[k] = Math.max(width[k], repr(tokens[k]).length);\n }\n const lines = [[], []];\n for (const f of fields)\n lines[0].push(((_b = headers[f]) !== null && _b !== void 0 ? _b : f).padEnd(width[f]));\n for (const f of fields)\n lines[1].push('-'.padEnd(width[f], '-'));\n for (const { number, tokens } of source) {\n const line = number.toString().padStart(width.line);\n lines.push([line, ...align(width, tokens)]);\n }\n return lines.map(frame).join('\\n');\n}\nexports.default = inspect;\n"],"file":"inspect.cjs"}

View File

@@ -0,0 +1,2 @@
import { Block } from '../primitives.js';
export default function inspect({ source }: Block): string;

127
node_modules/comment-parser/lib/transforms/align.cjs generated vendored Normal file
View File

@@ -0,0 +1,127 @@
"use strict";
var __rest = this && this.__rest || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) t[p[i]] = s[p[i]];
}
return t;
};
Object.defineProperty(exports, "__esModule", {
value: true
});
const primitives_js_1 = require("../primitives.cjs");
const util_js_1 = require("../util.cjs");
const zeroWidth = {
start: 0,
tag: 0,
type: 0,
name: 0
};
const getWidth = (markers = primitives_js_1.Markers) => (w, {
tokens: t
}) => ({
start: t.delimiter === markers.start ? t.start.length : w.start,
tag: Math.max(w.tag, t.tag.length),
type: Math.max(w.type, t.type.length),
name: Math.max(w.name, t.name.length)
});
const space = len => ''.padStart(len, ' ');
function align(markers = primitives_js_1.Markers) {
let intoTags = false;
let w;
function update(line) {
const tokens = Object.assign({}, line.tokens);
if (tokens.tag !== '') intoTags = true;
const isEmpty = tokens.tag === '' && tokens.name === '' && tokens.type === '' && tokens.description === ''; // dangling '*/'
if (tokens.end === markers.end && isEmpty) {
tokens.start = space(w.start + 1);
return Object.assign(Object.assign({}, line), {
tokens
});
}
switch (tokens.delimiter) {
case markers.start:
tokens.start = space(w.start);
break;
case markers.delim:
tokens.start = space(w.start + 1);
break;
default:
tokens.delimiter = '';
tokens.start = space(w.start + 2);
// compensate delimiter
}
if (!intoTags) {
tokens.postDelimiter = tokens.description === '' ? '' : ' ';
return Object.assign(Object.assign({}, line), {
tokens
});
}
const nothingAfter = {
delim: false,
tag: false,
type: false,
name: false
};
if (tokens.description === '') {
nothingAfter.name = true;
tokens.postName = '';
if (tokens.name === '') {
nothingAfter.type = true;
tokens.postType = '';
if (tokens.type === '') {
nothingAfter.tag = true;
tokens.postTag = '';
if (tokens.tag === '') {
nothingAfter.delim = true;
}
}
}
}
tokens.postDelimiter = nothingAfter.delim ? '' : ' ';
if (!nothingAfter.tag) tokens.postTag = space(w.tag - tokens.tag.length + 1);
if (!nothingAfter.type) tokens.postType = space(w.type - tokens.type.length + 1);
if (!nothingAfter.name) tokens.postName = space(w.name - tokens.name.length + 1);
return Object.assign(Object.assign({}, line), {
tokens
});
}
return _a => {
var {
source
} = _a,
fields = __rest(_a, ["source"]);
w = source.reduce(getWidth(markers), Object.assign({}, zeroWidth));
return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), {
source: source.map(update)
}));
};
}
exports.default = align;
//# sourceMappingURL=align.cjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
import { Transform } from './index.js';
import { Markers } from '../primitives.js';
export default function align(markers?: typeof Markers): Transform;

44
node_modules/comment-parser/lib/transforms/crlf.cjs generated vendored Normal file
View File

@@ -0,0 +1,44 @@
"use strict";
var __rest = this && this.__rest || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) t[p[i]] = s[p[i]];
}
return t;
};
Object.defineProperty(exports, "__esModule", {
value: true
});
const util_js_1 = require("../util.cjs");
const order = ['end', 'description', 'postType', 'type', 'postName', 'name', 'postTag', 'tag', 'postDelimiter', 'delimiter', 'start'];
function crlf(ending) {
function update(line) {
return Object.assign(Object.assign({}, line), {
tokens: Object.assign(Object.assign({}, line.tokens), {
lineEnd: ending === 'LF' ? '' : '\r'
})
});
}
return _a => {
var {
source
} = _a,
fields = __rest(_a, ["source"]);
return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), {
source: source.map(update)
}));
};
}
exports.default = crlf;
//# sourceMappingURL=crlf.cjs.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["crlf.js"],"names":["__rest","s","e","t","p","Object","prototype","hasOwnProperty","call","indexOf","getOwnPropertySymbols","i","length","propertyIsEnumerable","defineProperty","exports","value","util_js_1","require","order","crlf","ending","update","line","assign","tokens","lineEnd","_a","source","fields","rewireSource","map","default"],"mappings":"AAAA;;AACA,IAAIA,MAAM,GAAI,QAAQ,KAAKA,MAAd,IAAyB,UAAUC,CAAV,EAAaC,CAAb,EAAgB;AAClD,MAAIC,CAAC,GAAG,EAAR;;AACA,OAAK,IAAIC,CAAT,IAAcH,CAAd,EAAiB,IAAII,MAAM,CAACC,SAAP,CAAiBC,cAAjB,CAAgCC,IAAhC,CAAqCP,CAArC,EAAwCG,CAAxC,KAA8CF,CAAC,CAACO,OAAF,CAAUL,CAAV,IAAe,CAAjE,EACbD,CAAC,CAACC,CAAD,CAAD,GAAOH,CAAC,CAACG,CAAD,CAAR;;AACJ,MAAIH,CAAC,IAAI,IAAL,IAAa,OAAOI,MAAM,CAACK,qBAAd,KAAwC,UAAzD,EACI,KAAK,IAAIC,CAAC,GAAG,CAAR,EAAWP,CAAC,GAAGC,MAAM,CAACK,qBAAP,CAA6BT,CAA7B,CAApB,EAAqDU,CAAC,GAAGP,CAAC,CAACQ,MAA3D,EAAmED,CAAC,EAApE,EAAwE;AACpE,QAAIT,CAAC,CAACO,OAAF,CAAUL,CAAC,CAACO,CAAD,CAAX,IAAkB,CAAlB,IAAuBN,MAAM,CAACC,SAAP,CAAiBO,oBAAjB,CAAsCL,IAAtC,CAA2CP,CAA3C,EAA8CG,CAAC,CAACO,CAAD,CAA/C,CAA3B,EACIR,CAAC,CAACC,CAAC,CAACO,CAAD,CAAF,CAAD,GAAUV,CAAC,CAACG,CAAC,CAACO,CAAD,CAAF,CAAX;AACP;AACL,SAAOR,CAAP;AACH,CAVD;;AAWAE,MAAM,CAACS,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;;AACA,MAAMC,SAAS,GAAGC,OAAH,eAAf;;AACA,MAAMC,KAAK,GAAG,CACV,KADU,EAEV,aAFU,EAGV,UAHU,EAIV,MAJU,EAKV,UALU,EAMV,MANU,EAOV,SAPU,EAQV,KARU,EASV,eATU,EAUV,WAVU,EAWV,OAXU,CAAd;;AAaA,SAASC,IAAT,CAAcC,MAAd,EAAsB;AAClB,WAASC,MAAT,CAAgBC,IAAhB,EAAsB;AAClB,WAAOlB,MAAM,CAACmB,MAAP,CAAcnB,MAAM,CAACmB,MAAP,CAAc,EAAd,EAAkBD,IAAlB,CAAd,EAAuC;AAAEE,MAAAA,MAAM,EAAEpB,MAAM,CAACmB,MAAP,CAAcnB,MAAM,CAACmB,MAAP,CAAc,EAAd,EAAkBD,IAAI,CAACE,MAAvB,CAAd,EAA8C;AAAEC,QAAAA,OAAO,EAAEL,MAAM,KAAK,IAAX,GAAkB,EAAlB,GAAuB;AAAlC,OAA9C;AAAV,KAAvC,CAAP;AACH;;AACD,SAAQM,EAAD,IAAQ;AACX,QAAI;AAAEC,MAAAA;AAAF,QAAaD,EAAjB;AAAA,QAAqBE,MAAM,GAAG7B,MAAM,CAAC2B,EAAD,EAAK,CAAC,QAAD,CAAL,CAApC;;AACA,WAAO,CAAC,GAAGV,SAAS,CAACa,YAAd,EAA4BzB,MAAM,CAACmB,MAAP,CAAcnB,MAAM,CAACmB,MAAP,CAAc,EAAd,EAAkBK,MAAlB,CAAd,EAAyC;AAAED,MAAAA,MAAM,EAAEA,MAAM,CAACG,GAAP,CAAWT,MAAX;AAAV,KAAzC,CAA5B,CAAP;AACH,GAHD;AAIH;;AACDP,OAAO,CAACiB,OAAR,GAAkBZ,IAAlB","sourcesContent":["\"use strict\";\nvar __rest = (this && this.__rest) || function (s, e) {\n var t = {};\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\n t[p] = s[p];\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\n t[p[i]] = s[p[i]];\n }\n return t;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_js_1 = require(\"../util.js\");\nconst order = [\n 'end',\n 'description',\n 'postType',\n 'type',\n 'postName',\n 'name',\n 'postTag',\n 'tag',\n 'postDelimiter',\n 'delimiter',\n 'start',\n];\nfunction crlf(ending) {\n function update(line) {\n return Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { lineEnd: ending === 'LF' ? '' : '\\r' }) });\n }\n return (_a) => {\n var { source } = _a, fields = __rest(_a, [\"source\"]);\n return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), { source: source.map(update) }));\n };\n}\nexports.default = crlf;\n"],"file":"crlf.cjs"}

3
node_modules/comment-parser/lib/transforms/crlf.d.ts generated vendored Normal file
View File

@@ -0,0 +1,3 @@
import { Transform } from './index.js';
export type Ending = 'LF' | 'CRLF';
export default function crlf(ending: Ending): Transform;

58
node_modules/comment-parser/lib/transforms/indent.cjs generated vendored Normal file
View File

@@ -0,0 +1,58 @@
"use strict";
var __rest = this && this.__rest || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) t[p[i]] = s[p[i]];
}
return t;
};
Object.defineProperty(exports, "__esModule", {
value: true
});
const util_js_1 = require("../util.cjs");
const pull = offset => str => str.slice(offset);
const push = offset => {
const space = ''.padStart(offset, ' ');
return str => str + space;
};
function indent(pos) {
let shift;
const pad = start => {
if (shift === undefined) {
const offset = pos - start.length;
shift = offset > 0 ? push(offset) : pull(-offset);
}
return shift(start);
};
const update = line => Object.assign(Object.assign({}, line), {
tokens: Object.assign(Object.assign({}, line.tokens), {
start: pad(line.tokens.start)
})
});
return _a => {
var {
source
} = _a,
fields = __rest(_a, ["source"]);
return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), {
source: source.map(update)
}));
};
}
exports.default = indent;
//# sourceMappingURL=indent.cjs.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["indent.js"],"names":["__rest","s","e","t","p","Object","prototype","hasOwnProperty","call","indexOf","getOwnPropertySymbols","i","length","propertyIsEnumerable","defineProperty","exports","value","util_js_1","require","pull","offset","str","slice","push","space","padStart","indent","pos","shift","pad","start","undefined","update","line","assign","tokens","_a","source","fields","rewireSource","map","default"],"mappings":"AAAA;;AACA,IAAIA,MAAM,GAAI,QAAQ,KAAKA,MAAd,IAAyB,UAAUC,CAAV,EAAaC,CAAb,EAAgB;AAClD,MAAIC,CAAC,GAAG,EAAR;;AACA,OAAK,IAAIC,CAAT,IAAcH,CAAd,EAAiB,IAAII,MAAM,CAACC,SAAP,CAAiBC,cAAjB,CAAgCC,IAAhC,CAAqCP,CAArC,EAAwCG,CAAxC,KAA8CF,CAAC,CAACO,OAAF,CAAUL,CAAV,IAAe,CAAjE,EACbD,CAAC,CAACC,CAAD,CAAD,GAAOH,CAAC,CAACG,CAAD,CAAR;;AACJ,MAAIH,CAAC,IAAI,IAAL,IAAa,OAAOI,MAAM,CAACK,qBAAd,KAAwC,UAAzD,EACI,KAAK,IAAIC,CAAC,GAAG,CAAR,EAAWP,CAAC,GAAGC,MAAM,CAACK,qBAAP,CAA6BT,CAA7B,CAApB,EAAqDU,CAAC,GAAGP,CAAC,CAACQ,MAA3D,EAAmED,CAAC,EAApE,EAAwE;AACpE,QAAIT,CAAC,CAACO,OAAF,CAAUL,CAAC,CAACO,CAAD,CAAX,IAAkB,CAAlB,IAAuBN,MAAM,CAACC,SAAP,CAAiBO,oBAAjB,CAAsCL,IAAtC,CAA2CP,CAA3C,EAA8CG,CAAC,CAACO,CAAD,CAA/C,CAA3B,EACIR,CAAC,CAACC,CAAC,CAACO,CAAD,CAAF,CAAD,GAAUV,CAAC,CAACG,CAAC,CAACO,CAAD,CAAF,CAAX;AACP;AACL,SAAOR,CAAP;AACH,CAVD;;AAWAE,MAAM,CAACS,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;;AACA,MAAMC,SAAS,GAAGC,OAAH,eAAf;;AACA,MAAMC,IAAI,GAAIC,MAAD,IAAaC,GAAD,IAASA,GAAG,CAACC,KAAJ,CAAUF,MAAV,CAAlC;;AACA,MAAMG,IAAI,GAAIH,MAAD,IAAY;AACrB,QAAMI,KAAK,GAAG,GAAGC,QAAH,CAAYL,MAAZ,EAAoB,GAApB,CAAd;AACA,SAAQC,GAAD,IAASA,GAAG,GAAGG,KAAtB;AACH,CAHD;;AAIA,SAASE,MAAT,CAAgBC,GAAhB,EAAqB;AACjB,MAAIC,KAAJ;;AACA,QAAMC,GAAG,GAAIC,KAAD,IAAW;AACnB,QAAIF,KAAK,KAAKG,SAAd,EAAyB;AACrB,YAAMX,MAAM,GAAGO,GAAG,GAAGG,KAAK,CAAClB,MAA3B;AACAgB,MAAAA,KAAK,GAAGR,MAAM,GAAG,CAAT,GAAaG,IAAI,CAACH,MAAD,CAAjB,GAA4BD,IAAI,CAAC,CAACC,MAAF,CAAxC;AACH;;AACD,WAAOQ,KAAK,CAACE,KAAD,CAAZ;AACH,GAND;;AAOA,QAAME,MAAM,GAAIC,IAAD,IAAW5B,MAAM,CAAC6B,MAAP,CAAc7B,MAAM,CAAC6B,MAAP,CAAc,EAAd,EAAkBD,IAAlB,CAAd,EAAuC;AAAEE,IAAAA,MAAM,EAAE9B,MAAM,CAAC6B,MAAP,CAAc7B,MAAM,CAAC6B,MAAP,CAAc,EAAd,EAAkBD,IAAI,CAACE,MAAvB,CAAd,EAA8C;AAAEL,MAAAA,KAAK,EAAED,GAAG,CAACI,IAAI,CAACE,MAAL,CAAYL,KAAb;AAAZ,KAA9C;AAAV,GAAvC,CAA1B;;AACA,SAAQM,EAAD,IAAQ;AACX,QAAI;AAAEC,MAAAA;AAAF,QAAaD,EAAjB;AAAA,QAAqBE,MAAM,GAAGtC,MAAM,CAACoC,EAAD,EAAK,CAAC,QAAD,CAAL,CAApC;;AACA,WAAO,CAAC,GAAGnB,SAAS,CAACsB,YAAd,EAA4BlC,MAAM,CAAC6B,MAAP,CAAc7B,MAAM,CAAC6B,MAAP,CAAc,EAAd,EAAkBI,MAAlB,CAAd,EAAyC;AAAED,MAAAA,MAAM,EAAEA,MAAM,CAACG,GAAP,CAAWR,MAAX;AAAV,KAAzC,CAA5B,CAAP;AACH,GAHD;AAIH;;AACDjB,OAAO,CAAC0B,OAAR,GAAkBf,MAAlB","sourcesContent":["\"use strict\";\nvar __rest = (this && this.__rest) || function (s, e) {\n var t = {};\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\n t[p] = s[p];\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\n t[p[i]] = s[p[i]];\n }\n return t;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_js_1 = require(\"../util.js\");\nconst pull = (offset) => (str) => str.slice(offset);\nconst push = (offset) => {\n const space = ''.padStart(offset, ' ');\n return (str) => str + space;\n};\nfunction indent(pos) {\n let shift;\n const pad = (start) => {\n if (shift === undefined) {\n const offset = pos - start.length;\n shift = offset > 0 ? push(offset) : pull(-offset);\n }\n return shift(start);\n };\n const update = (line) => (Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { start: pad(line.tokens.start) }) }));\n return (_a) => {\n var { source } = _a, fields = __rest(_a, [\"source\"]);\n return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), { source: source.map(update) }));\n };\n}\nexports.default = indent;\n"],"file":"indent.cjs"}

View File

@@ -0,0 +1,2 @@
import { Transform } from './index.js';
export default function indent(pos: number): Transform;

13
node_modules/comment-parser/lib/transforms/index.cjs generated vendored Normal file
View File

@@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.flow = void 0;
function flow(...transforms) {
return block => transforms.reduce((block, t) => t(block), block);
}
exports.flow = flow;
//# sourceMappingURL=index.cjs.map

View File

@@ -0,0 +1 @@
{"version":3,"sources":["index.js"],"names":["Object","defineProperty","exports","value","flow","transforms","block","reduce","t"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;AACAD,OAAO,CAACE,IAAR,GAAe,KAAK,CAApB;;AACA,SAASA,IAAT,CAAc,GAAGC,UAAjB,EAA6B;AACzB,SAAQC,KAAD,IAAWD,UAAU,CAACE,MAAX,CAAkB,CAACD,KAAD,EAAQE,CAAR,KAAcA,CAAC,CAACF,KAAD,CAAjC,EAA0CA,KAA1C,CAAlB;AACH;;AACDJ,OAAO,CAACE,IAAR,GAAeA,IAAf","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.flow = void 0;\nfunction flow(...transforms) {\n return (block) => transforms.reduce((block, t) => t(block), block);\n}\nexports.flow = flow;\n"],"file":"index.cjs"}

View File

@@ -0,0 +1,3 @@
import { Block } from '../primitives.js';
export type Transform = (Block: Block) => Block;
export declare function flow(...transforms: Transform[]): Transform;

113
node_modules/comment-parser/lib/util.cjs generated vendored Normal file
View File

@@ -0,0 +1,113 @@
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.rewireSpecs = exports.rewireSource = exports.seedTokens = exports.seedSpec = exports.seedBlock = exports.splitLines = exports.splitSpace = exports.splitCR = exports.hasCR = exports.isSpace = void 0;
function isSpace(source) {
return /^\s+$/.test(source);
}
exports.isSpace = isSpace;
function hasCR(source) {
return /\r$/.test(source);
}
exports.hasCR = hasCR;
function splitCR(source) {
const matches = source.match(/\r+$/);
return matches == null ? ['', source] : [source.slice(-matches[0].length), source.slice(0, -matches[0].length)];
}
exports.splitCR = splitCR;
function splitSpace(source) {
const matches = source.match(/^\s+/);
return matches == null ? ['', source] : [source.slice(0, matches[0].length), source.slice(matches[0].length)];
}
exports.splitSpace = splitSpace;
function splitLines(source) {
return source.split(/\n/);
}
exports.splitLines = splitLines;
function seedBlock(block = {}) {
return Object.assign({
description: '',
tags: [],
source: [],
problems: []
}, block);
}
exports.seedBlock = seedBlock;
function seedSpec(spec = {}) {
return Object.assign({
tag: '',
name: '',
type: '',
optional: false,
description: '',
problems: [],
source: []
}, spec);
}
exports.seedSpec = seedSpec;
function seedTokens(tokens = {}) {
return Object.assign({
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '',
lineEnd: ''
}, tokens);
}
exports.seedTokens = seedTokens;
/**
* Assures Block.tags[].source contains references to the Block.source items,
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
* @param block parsed coments block
*/
function rewireSource(block) {
const source = block.source.reduce((acc, line) => acc.set(line.number, line), new Map());
for (const spec of block.tags) {
spec.source = spec.source.map(line => source.get(line.number));
}
return block;
}
exports.rewireSource = rewireSource;
/**
* Assures Block.source contains references to the Block.tags[].source items,
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
* @param block parsed coments block
*/
function rewireSpecs(block) {
const source = block.tags.reduce((acc, spec) => spec.source.reduce((acc, line) => acc.set(line.number, line), acc), new Map());
block.source = block.source.map(line => source.get(line.number) || line);
return block;
}
exports.rewireSpecs = rewireSpecs;
//# sourceMappingURL=util.cjs.map

1
node_modules/comment-parser/lib/util.cjs.map generated vendored Normal file

File diff suppressed because one or more lines are too long

21
node_modules/comment-parser/lib/util.d.ts generated vendored Normal file
View File

@@ -0,0 +1,21 @@
import { Block, Tokens, Spec } from './primitives.js';
export declare function isSpace(source: string): boolean;
export declare function hasCR(source: string): boolean;
export declare function splitCR(source: string): [string, string];
export declare function splitSpace(source: string): [string, string];
export declare function splitLines(source: string): string[];
export declare function seedBlock(block?: Partial<Block>): Block;
export declare function seedSpec(spec?: Partial<Spec>): Spec;
export declare function seedTokens(tokens?: Partial<Tokens>): Tokens;
/**
* Assures Block.tags[].source contains references to the Block.source items,
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
* @param block parsed coments block
*/
export declare function rewireSource(block: Block): Block;
/**
* Assures Block.source contains references to the Block.tags[].source items,
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
* @param block parsed coments block
*/
export declare function rewireSpecs(block: Block): Block;

105
node_modules/comment-parser/migrate-1.0.md generated vendored Normal file
View File

@@ -0,0 +1,105 @@
# Migrating 0.x to 1.x
## Parser
0.x can be mostly translated into 1.x one way or another. The idea behind the new config structure is to handle only the most common cases, and provide the fallback for alternative implementation.
### `dotted_names: boolean`
> By default dotted names like `name.subname.subsubname` will be expanded into nested sections, this can be prevented by passing opts.dotted_names = false.
**Removed** This feature is removed but still can be done on top of the `parse()` output. Please post a request or contribute a PR if you need it.
### `trim: boolean`
> Set this to false to avoid the default of trimming whitespace at the start and end of each line.
In the new parser all original spacing is kept along with comment lines in `.source`. Description lines are joined together depending on `spacing` option
**New option:**
- `spacing: "compact"` lines concatenated with a single space and no line breaks
- `spacing: "preserve"` keeps line breaks and space around as is. Indentation space counts from `*` delimiter or from the start of the line if the delimiter is omitted
- `spacing: (lines: Line[]) => string` completely freeform joining strategy, since all original spacing can be accessed, there is no limit to how this can be implemented. See [primitives.ts](./src/primitives.ts) and [spacer.ts](./src/parser/spacer.ts)
### `join: string | number | boolean`
> If the following lines of a multiline comment do not start with a star, `join` will have the following effect on tag source (and description) when joining the lines together:
>
> - If a string, use that string in place of the leading whitespace (and avoid newlines).
> - If a non-zero number (e.g., 1), do no trimming and avoid newlines.
> - If undefined, false, or 0, use the default behavior of not trimming but adding a newline.
> - Otherwise (e.g., if join is true), replace any leading whitespace with a single space and avoid newlines.
>
> Note that if a multi-line comment has lines that start with a star, these will be appended with initial whitespace as is and with newlines regardless of the join setting.
See the `spacing` option above, all the variations can be fine-tunned with `spacing: (lines: Line[]) => string`
### `fence: string | RegExp | ((source: string) => boolean)`
> Set to a string or regular expression to toggle state upon finding an odd number of matches within a line. Defaults to ```.
>
> If set to a function, it should return true to toggle fenced state; upon returning true the first time, this will prevent subsequent lines from being interpreted as starting a new jsdoc tag until such time as the function returns true again to indicate that the state has toggled back.
This is mostly kept the same
**New optoins:**
- ```` fence: '```' ```` same as 0.x
- `fencer: (source: string) => boolean` same as 0.x, see [parser/block-parser.ts](./src/parser/block-parser.ts)
### `parsers: Parser[]`
> In case you need to parse tags in different way you can pass opts.parsers = [parser1, ..., parserN], where each parser is function name(str:String, data:Object):{source:String, data:Object}.
> ...
**New options:**
- `tokenizers: []Tokenizer` is a list of functions extracting the `tag`, `type`, `name` and `description` tokens from this string. See [parser/spec-parser.ts](./src/parser/spec-parser.ts) and [primitives.ts](./src/primitives.ts)
Default tokenizers chain is
```js
[
tagTokenizer(),
typeTokenizer(),
nameTokenizer(),
descriptionTokenizer(getSpacer(spacing)),
]
```
where
```ts
type Tokenizer = (spec: Spec) => Spec
interface Spec {
tag: string;
name: string;
default?: string;
type: string;
optional: boolean;
description: string;
problems: Problem[];
source: Line[];
}
```
chain starts with blank `Spec` and each tokenizer fulfills a piece using `.source` input
## Stringifier
> One may also convert comment-parser JSON structures back into strings using the stringify method (stringify(o: (object|Array) [, opts: object]): string).
> ...
Stringifier config follows the same strategy a couple of common cases, and freeform formatter as a fallback
**New Options:**
- `format: "none"` re-assembles the source with original spacing and delimiters preserved
- `format: "align"` aligns tag, name, type, and descriptions into fixed-width columns
- `format: (tokens: Tokens) => string[]` do what you like, resulting lines will be concatenated into the output. Despite the simple interface, this can be turned into a complex stateful formatter, see `"align"` implementation in [transforms/align.ts](./src/transforms/align.ts)
## Stream
Work in progress

84
node_modules/comment-parser/package.json generated vendored Normal file
View File

@@ -0,0 +1,84 @@
{
"name": "comment-parser",
"version": "1.4.1",
"description": "Generic JSDoc-like comment parser",
"type": "module",
"main": "lib/index.cjs",
"exports": {
".": {
"import": "./es6/index.js",
"require": "./lib/index.cjs"
},
"./primitives": {
"import": "./es6/primitives.js",
"require": "./lib/primitives.cjs"
},
"./util": {
"import": "./es6/util.js",
"require": "./lib/util.cjs"
},
"./parser/*": {
"import": "./es6/parser/*.js",
"require": "./lib/parser/*.cjs"
},
"./stringifier/*": {
"import": "./es6/stringifier/*.js",
"require": "./lib/stringifier/*.cjs"
},
"./transforms/*": {
"import": "./es6/transforms/*.js",
"require": "./lib/transforms/*.cjs"
}
},
"types": "lib/index.d.ts",
"directories": {
"test": "tests"
},
"devDependencies": {
"@types/jest": "^26.0.23",
"convert-extension": "^0.3.0",
"jest": "^27.0.5",
"prettier": "2.3.1",
"rimraf": "^3.0.2",
"rollup": "^2.52.2",
"ts-jest": "^27.0.3",
"typescript": "^4.9.5"
},
"engines": {
"node": ">= 12.0.0"
},
"scripts": {
"build": "rimraf lib es6 browser; tsc -p tsconfig.json && tsc -p tsconfig.node.json && rollup -o browser/index.js -f iife --context window -n CommentParser es6/index.js && convert-extension cjs lib/",
"format": "prettier --write src tests",
"pretest": "rimraf coverage; npm run build",
"test": "prettier --check src tests && jest --verbose",
"preversion": "npm run build"
},
"repository": {
"type": "git",
"url": "git@github.com:yavorskiy/comment-parser.git"
},
"keywords": [
"jsdoc",
"comments",
"parser"
],
"author": "Sergiy Yavorsky <sergiy@yavorsky.me> (https://github.com/syavorsky)",
"contributors": [
"Alex Grozav (https://github.com/alexgrozav)",
"Alexej Yaroshevich (https://github.com/zxqfox)",
"Andre Wachsmuth (https://github.com/blutorange)",
"Brett Zamir (https://github.com/brettz9)",
"Dieter Oberkofler (https://github.com/doberkofler)",
"Evgeny Reznichenko (https://github.com/zxcabs)",
"Javier \"Ciberma\" Mora (https://github.com/jhm-ciberman)",
"Jayden Seric (https://github.com/jaydenseric)",
"Jordan Harband (https://github.com/ljharb)",
"tengattack (https://github.com/tengattack)"
],
"license": "MIT",
"bugs": {
"url": "https://github.com/syavorsky/comment-parser/issues"
},
"homepage": "https://github.com/syavorsky/comment-parser"
}

36
node_modules/comment-parser/src/index.ts generated vendored Normal file
View File

@@ -0,0 +1,36 @@
import getParser, { Options as ParserOptions } from './parser/index.js';
import descriptionTokenizer from './parser/tokenizers/description.js';
import nameTokenizer from './parser/tokenizers/name.js';
import tagTokenizer from './parser/tokenizers/tag.js';
import typeTokenizer from './parser/tokenizers/type.js';
import getStringifier from './stringifier/index.js';
import alignTransform from './transforms/align.js';
import indentTransform from './transforms/indent.js';
import crlfTransform from './transforms/crlf.js';
import { flow as flowTransform } from './transforms/index.js';
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util.js';
export * from './primitives.js';
export function parse(source: string, options: Partial<ParserOptions> = {}) {
return getParser(options)(source);
}
export const stringify = getStringifier();
export { default as inspect } from './stringifier/inspect.js';
export const transforms = {
flow: flowTransform,
align: alignTransform,
indent: indentTransform,
crlf: crlfTransform,
};
export const tokenizers = {
tag: tagTokenizer,
type: typeTokenizer,
name: nameTokenizer,
description: descriptionTokenizer,
};
export const util = { rewireSpecs, rewireSource, seedBlock, seedTokens };

60
node_modules/comment-parser/src/parser/block-parser.ts generated vendored Normal file
View File

@@ -0,0 +1,60 @@
import { Line } from '../primitives.js';
const reTag = /^@\S+/;
/**
* Groups source lines in sections representing tags.
* First section is a block description if present. Last section captures lines starting with
* the last tag to the end of the block, including dangling closing marker.
* @param {Line[]} block souce lines making a single comment block
*/
export type Parser = (block: Line[]) => Line[][];
/**
* Predicate telling if string contains opening/closing escaping sequence
* @param {string} source raw source line
*/
export type Fencer = (source: string) => boolean;
/**
* `Parser` configuration options
*/
export interface Options {
// escaping sequence or predicate
fence: string | Fencer;
}
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
export default function getParser({
fence = '```',
}: Partial<Options> = {}): Parser {
const fencer = getFencer(fence);
const toggleFence = (source: string, isFenced: boolean): boolean =>
fencer(source) ? !isFenced : isFenced;
return function parseBlock(source: Line[]): Line[][] {
// start with description section
const sections: Line[][] = [[]];
let isFenced = false;
for (const line of source) {
if (reTag.test(line.tokens.description) && !isFenced) {
sections.push([line]);
} else {
sections[sections.length - 1].push(line);
}
isFenced = toggleFence(line.tokens.description, isFenced);
}
return sections;
};
}
function getFencer(fence: string | Fencer): Fencer {
if (typeof fence === 'string')
return (source: string) => source.split(fence).length % 2 === 0;
return fence;
}

70
node_modules/comment-parser/src/parser/index.ts generated vendored Normal file
View File

@@ -0,0 +1,70 @@
import { Block, Line, Problem, BlockMarkers, Markers } from '../primitives.js';
import { splitLines } from '../util.js';
import blockParser from './block-parser.js';
import sourceParser from './source-parser.js';
import specParser from './spec-parser.js';
import { Tokenizer } from './tokenizers/index.js';
import tokenizeTag from './tokenizers/tag.js';
import tokenizeType from './tokenizers/type.js';
import tokenizeName from './tokenizers/name.js';
import tokenizeDescription, {
getJoiner as getDescriptionJoiner,
} from './tokenizers/description.js';
export interface Options {
// start count for source line numbers
startLine: number;
// escaping chars sequence marking wrapped content literal for the parser
fence: string;
// block and comment description compaction strategy
spacing: 'compact' | 'preserve';
// comment description markers
markers: BlockMarkers;
// tokenizer functions extracting name, type, and description out of tag, see Tokenizer
tokenizers: Tokenizer[];
}
export type Parser = (source: string) => Block[];
export default function getParser({
startLine = 0,
fence = '```',
spacing = 'compact',
markers = Markers,
tokenizers = [
tokenizeTag(),
tokenizeType(spacing),
tokenizeName(),
tokenizeDescription(spacing),
],
}: Partial<Options> = {}): Parser {
if (startLine < 0 || startLine % 1 > 0) throw new Error('Invalid startLine');
const parseSource = sourceParser({ startLine, markers });
const parseBlock = blockParser({ fence });
const parseSpec = specParser({ tokenizers });
const joinDescription = getDescriptionJoiner(spacing);
return function (source: string): Block[] {
const blocks: Block[] = [];
for (const line of splitLines(source)) {
const lines = parseSource(line);
if (lines === null) continue;
const sections = parseBlock(lines);
const specs = sections.slice(1).map(parseSpec);
blocks.push({
description: joinDescription(sections[0], markers),
tags: specs,
source: lines,
problems: specs.reduce(
(acc: Problem[], spec) => acc.concat(spec.problems),
[]
),
});
}
return blocks;
};
}

Some files were not shown because too many files have changed in this diff Show More