forked from public/fvtt-cthulhu-eternal
Initial import with skill sheet working
This commit is contained in:
88
node_modules/comment-parser/lib/index.cjs
generated
vendored
Normal file
88
node_modules/comment-parser/lib/index.cjs
generated
vendored
Normal file
@ -0,0 +1,88 @@
|
||||
"use strict";
|
||||
|
||||
var __createBinding = this && this.__createBinding || (Object.create ? function (o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return m[k];
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Object.defineProperty(o, k2, desc);
|
||||
} : function (o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
});
|
||||
|
||||
var __exportStar = this && this.__exportStar || function (m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.util = exports.tokenizers = exports.transforms = exports.inspect = exports.stringify = exports.parse = void 0;
|
||||
|
||||
const index_js_1 = require("./parser/index.cjs");
|
||||
|
||||
const description_js_1 = require("./parser/tokenizers/description.cjs");
|
||||
|
||||
const name_js_1 = require("./parser/tokenizers/name.cjs");
|
||||
|
||||
const tag_js_1 = require("./parser/tokenizers/tag.cjs");
|
||||
|
||||
const type_js_1 = require("./parser/tokenizers/type.cjs");
|
||||
|
||||
const index_js_2 = require("./stringifier/index.cjs");
|
||||
|
||||
const align_js_1 = require("./transforms/align.cjs");
|
||||
|
||||
const indent_js_1 = require("./transforms/indent.cjs");
|
||||
|
||||
const crlf_js_1 = require("./transforms/crlf.cjs");
|
||||
|
||||
const index_js_3 = require("./transforms/index.cjs");
|
||||
|
||||
const util_js_1 = require("./util.cjs");
|
||||
|
||||
__exportStar(require("./primitives.cjs"), exports);
|
||||
|
||||
function parse(source, options = {}) {
|
||||
return (0, index_js_1.default)(options)(source);
|
||||
}
|
||||
|
||||
exports.parse = parse;
|
||||
exports.stringify = (0, index_js_2.default)();
|
||||
|
||||
var inspect_js_1 = require("./stringifier/inspect.cjs");
|
||||
|
||||
Object.defineProperty(exports, "inspect", {
|
||||
enumerable: true,
|
||||
get: function () {
|
||||
return inspect_js_1.default;
|
||||
}
|
||||
});
|
||||
exports.transforms = {
|
||||
flow: index_js_3.flow,
|
||||
align: align_js_1.default,
|
||||
indent: indent_js_1.default,
|
||||
crlf: crlf_js_1.default
|
||||
};
|
||||
exports.tokenizers = {
|
||||
tag: tag_js_1.default,
|
||||
type: type_js_1.default,
|
||||
name: name_js_1.default,
|
||||
description: description_js_1.default
|
||||
};
|
||||
exports.util = {
|
||||
rewireSpecs: util_js_1.rewireSpecs,
|
||||
rewireSource: util_js_1.rewireSource,
|
||||
seedBlock: util_js_1.seedBlock,
|
||||
seedTokens: util_js_1.seedTokens
|
||||
};
|
||||
//# sourceMappingURL=index.cjs.map
|
1
node_modules/comment-parser/lib/index.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/index.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
32
node_modules/comment-parser/lib/index.d.ts
generated
vendored
Normal file
32
node_modules/comment-parser/lib/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
import { Options as ParserOptions } from './parser/index.js';
|
||||
import descriptionTokenizer from './parser/tokenizers/description.js';
|
||||
import nameTokenizer from './parser/tokenizers/name.js';
|
||||
import tagTokenizer from './parser/tokenizers/tag.js';
|
||||
import typeTokenizer from './parser/tokenizers/type.js';
|
||||
import alignTransform from './transforms/align.js';
|
||||
import indentTransform from './transforms/indent.js';
|
||||
import crlfTransform from './transforms/crlf.js';
|
||||
import { flow as flowTransform } from './transforms/index.js';
|
||||
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util.js';
|
||||
export * from './primitives.js';
|
||||
export declare function parse(source: string, options?: Partial<ParserOptions>): import("./primitives.js").Block[];
|
||||
export declare const stringify: import("./stringifier/index.js").Stringifier;
|
||||
export { default as inspect } from './stringifier/inspect.js';
|
||||
export declare const transforms: {
|
||||
flow: typeof flowTransform;
|
||||
align: typeof alignTransform;
|
||||
indent: typeof indentTransform;
|
||||
crlf: typeof crlfTransform;
|
||||
};
|
||||
export declare const tokenizers: {
|
||||
tag: typeof tagTokenizer;
|
||||
type: typeof typeTokenizer;
|
||||
name: typeof nameTokenizer;
|
||||
description: typeof descriptionTokenizer;
|
||||
};
|
||||
export declare const util: {
|
||||
rewireSpecs: typeof rewireSpecs;
|
||||
rewireSource: typeof rewireSource;
|
||||
seedBlock: typeof seedBlock;
|
||||
seedTokens: typeof seedTokens;
|
||||
};
|
44
node_modules/comment-parser/lib/parser/block-parser.cjs
generated
vendored
Normal file
44
node_modules/comment-parser/lib/parser/block-parser.cjs
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
const reTag = /^@\S+/;
|
||||
/**
|
||||
* Creates configured `Parser`
|
||||
* @param {Partial<Options>} options
|
||||
*/
|
||||
|
||||
function getParser({
|
||||
fence = '```'
|
||||
} = {}) {
|
||||
const fencer = getFencer(fence);
|
||||
|
||||
const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;
|
||||
|
||||
return function parseBlock(source) {
|
||||
// start with description section
|
||||
const sections = [[]];
|
||||
let isFenced = false;
|
||||
|
||||
for (const line of source) {
|
||||
if (reTag.test(line.tokens.description) && !isFenced) {
|
||||
sections.push([line]);
|
||||
} else {
|
||||
sections[sections.length - 1].push(line);
|
||||
}
|
||||
|
||||
isFenced = toggleFence(line.tokens.description, isFenced);
|
||||
}
|
||||
|
||||
return sections;
|
||||
};
|
||||
}
|
||||
|
||||
exports.default = getParser;
|
||||
|
||||
function getFencer(fence) {
|
||||
if (typeof fence === 'string') return source => source.split(fence).length % 2 === 0;
|
||||
return fence;
|
||||
}
|
||||
//# sourceMappingURL=block-parser.cjs.map
|
1
node_modules/comment-parser/lib/parser/block-parser.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/parser/block-parser.cjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["block-parser.js"],"names":["Object","defineProperty","exports","value","reTag","getParser","fence","fencer","getFencer","toggleFence","source","isFenced","parseBlock","sections","line","test","tokens","description","push","length","default","split"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;AACA,MAAMC,KAAK,GAAG,OAAd;AACA;AACA;AACA;AACA;;AACA,SAASC,SAAT,CAAmB;AAAEC,EAAAA,KAAK,GAAG;AAAV,IAAqB,EAAxC,EAA4C;AACxC,QAAMC,MAAM,GAAGC,SAAS,CAACF,KAAD,CAAxB;;AACA,QAAMG,WAAW,GAAG,CAACC,MAAD,EAASC,QAAT,KAAsBJ,MAAM,CAACG,MAAD,CAAN,GAAiB,CAACC,QAAlB,GAA6BA,QAAvE;;AACA,SAAO,SAASC,UAAT,CAAoBF,MAApB,EAA4B;AAC/B;AACA,UAAMG,QAAQ,GAAG,CAAC,EAAD,CAAjB;AACA,QAAIF,QAAQ,GAAG,KAAf;;AACA,SAAK,MAAMG,IAAX,IAAmBJ,MAAnB,EAA2B;AACvB,UAAIN,KAAK,CAACW,IAAN,CAAWD,IAAI,CAACE,MAAL,CAAYC,WAAvB,KAAuC,CAACN,QAA5C,EAAsD;AAClDE,QAAAA,QAAQ,CAACK,IAAT,CAAc,CAACJ,IAAD,CAAd;AACH,OAFD,MAGK;AACDD,QAAAA,QAAQ,CAACA,QAAQ,CAACM,MAAT,GAAkB,CAAnB,CAAR,CAA8BD,IAA9B,CAAmCJ,IAAnC;AACH;;AACDH,MAAAA,QAAQ,GAAGF,WAAW,CAACK,IAAI,CAACE,MAAL,CAAYC,WAAb,EAA0BN,QAA1B,CAAtB;AACH;;AACD,WAAOE,QAAP;AACH,GAdD;AAeH;;AACDX,OAAO,CAACkB,OAAR,GAAkBf,SAAlB;;AACA,SAASG,SAAT,CAAmBF,KAAnB,EAA0B;AACtB,MAAI,OAAOA,KAAP,KAAiB,QAArB,EACI,OAAQI,MAAD,IAAYA,MAAM,CAACW,KAAP,CAAaf,KAAb,EAAoBa,MAApB,GAA6B,CAA7B,KAAmC,CAAtD;AACJ,SAAOb,KAAP;AACH","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst reTag = /^@\\S+/;\n/**\n * Creates configured `Parser`\n * @param {Partial<Options>} options\n */\nfunction getParser({ fence = '```', } = {}) {\n const fencer = getFencer(fence);\n const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;\n return function parseBlock(source) {\n // start with description section\n const sections = [[]];\n let isFenced = false;\n for (const line of source) {\n if (reTag.test(line.tokens.description) && !isFenced) {\n sections.push([line]);\n }\n else {\n sections[sections.length - 1].push(line);\n }\n isFenced = toggleFence(line.tokens.description, isFenced);\n }\n return sections;\n };\n}\nexports.default = getParser;\nfunction getFencer(fence) {\n if (typeof fence === 'string')\n return (source) => source.split(fence).length % 2 === 0;\n return fence;\n}\n"],"file":"block-parser.cjs"}
|
24
node_modules/comment-parser/lib/parser/block-parser.d.ts
generated
vendored
Normal file
24
node_modules/comment-parser/lib/parser/block-parser.d.ts
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
import { Line } from '../primitives.js';
|
||||
/**
|
||||
* Groups source lines in sections representing tags.
|
||||
* First section is a block description if present. Last section captures lines starting with
|
||||
* the last tag to the end of the block, including dangling closing marker.
|
||||
* @param {Line[]} block souce lines making a single comment block
|
||||
*/
|
||||
export type Parser = (block: Line[]) => Line[][];
|
||||
/**
|
||||
* Predicate telling if string contains opening/closing escaping sequence
|
||||
* @param {string} source raw source line
|
||||
*/
|
||||
export type Fencer = (source: string) => boolean;
|
||||
/**
|
||||
* `Parser` configuration options
|
||||
*/
|
||||
export interface Options {
|
||||
fence: string | Fencer;
|
||||
}
|
||||
/**
|
||||
* Creates configured `Parser`
|
||||
* @param {Partial<Options>} options
|
||||
*/
|
||||
export default function getParser({ fence, }?: Partial<Options>): Parser;
|
65
node_modules/comment-parser/lib/parser/index.cjs
generated
vendored
Normal file
65
node_modules/comment-parser/lib/parser/index.cjs
generated
vendored
Normal file
@ -0,0 +1,65 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
const primitives_js_1 = require("../primitives.cjs");
|
||||
|
||||
const util_js_1 = require("../util.cjs");
|
||||
|
||||
const block_parser_js_1 = require("./block-parser.cjs");
|
||||
|
||||
const source_parser_js_1 = require("./source-parser.cjs");
|
||||
|
||||
const spec_parser_js_1 = require("./spec-parser.cjs");
|
||||
|
||||
const tag_js_1 = require("./tokenizers/tag.cjs");
|
||||
|
||||
const type_js_1 = require("./tokenizers/type.cjs");
|
||||
|
||||
const name_js_1 = require("./tokenizers/name.cjs");
|
||||
|
||||
const description_js_1 = require("./tokenizers/description.cjs");
|
||||
|
||||
function getParser({
|
||||
startLine = 0,
|
||||
fence = '```',
|
||||
spacing = 'compact',
|
||||
markers = primitives_js_1.Markers,
|
||||
tokenizers = [(0, tag_js_1.default)(), (0, type_js_1.default)(spacing), (0, name_js_1.default)(), (0, description_js_1.default)(spacing)]
|
||||
} = {}) {
|
||||
if (startLine < 0 || startLine % 1 > 0) throw new Error('Invalid startLine');
|
||||
const parseSource = (0, source_parser_js_1.default)({
|
||||
startLine,
|
||||
markers
|
||||
});
|
||||
const parseBlock = (0, block_parser_js_1.default)({
|
||||
fence
|
||||
});
|
||||
const parseSpec = (0, spec_parser_js_1.default)({
|
||||
tokenizers
|
||||
});
|
||||
const joinDescription = (0, description_js_1.getJoiner)(spacing);
|
||||
return function (source) {
|
||||
const blocks = [];
|
||||
|
||||
for (const line of (0, util_js_1.splitLines)(source)) {
|
||||
const lines = parseSource(line);
|
||||
if (lines === null) continue;
|
||||
const sections = parseBlock(lines);
|
||||
const specs = sections.slice(1).map(parseSpec);
|
||||
blocks.push({
|
||||
description: joinDescription(sections[0], markers),
|
||||
tags: specs,
|
||||
source: lines,
|
||||
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), [])
|
||||
});
|
||||
}
|
||||
|
||||
return blocks;
|
||||
};
|
||||
}
|
||||
|
||||
exports.default = getParser;
|
||||
//# sourceMappingURL=index.cjs.map
|
1
node_modules/comment-parser/lib/parser/index.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/parser/index.cjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["index.js"],"names":["Object","defineProperty","exports","value","primitives_js_1","require","util_js_1","block_parser_js_1","source_parser_js_1","spec_parser_js_1","tag_js_1","type_js_1","name_js_1","description_js_1","getParser","startLine","fence","spacing","markers","Markers","tokenizers","default","Error","parseSource","parseBlock","parseSpec","joinDescription","getJoiner","source","blocks","line","splitLines","lines","sections","specs","slice","map","push","description","tags","problems","reduce","acc","spec","concat"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;;AACA,MAAMC,eAAe,GAAGC,OAAH,qBAArB;;AACA,MAAMC,SAAS,GAAGD,OAAH,eAAf;;AACA,MAAME,iBAAiB,GAAGF,OAAH,sBAAvB;;AACA,MAAMG,kBAAkB,GAAGH,OAAH,uBAAxB;;AACA,MAAMI,gBAAgB,GAAGJ,OAAH,qBAAtB;;AACA,MAAMK,QAAQ,GAAGL,OAAH,wBAAd;;AACA,MAAMM,SAAS,GAAGN,OAAH,yBAAf;;AACA,MAAMO,SAAS,GAAGP,OAAH,yBAAf;;AACA,MAAMQ,gBAAgB,GAAGR,OAAH,gCAAtB;;AACA,SAASS,SAAT,CAAmB;AAAEC,EAAAA,SAAS,GAAG,CAAd;AAAiBC,EAAAA,KAAK,GAAG,KAAzB;AAAgCC,EAAAA,OAAO,GAAG,SAA1C;AAAqDC,EAAAA,OAAO,GAAGd,eAAe,CAACe,OAA/E;AAAwFC,EAAAA,UAAU,GAAG,CACpH,CAAC,GAAGV,QAAQ,CAACW,OAAb,GADoH,EAEpH,CAAC,GAAGV,SAAS,CAACU,OAAd,EAAuBJ,OAAvB,CAFoH,EAGpH,CAAC,GAAGL,SAAS,CAACS,OAAd,GAHoH,EAIpH,CAAC,GAAGR,gBAAgB,CAACQ,OAArB,EAA8BJ,OAA9B,CAJoH;AAArG,IAKZ,EALP,EAKW;AACP,MAAIF,SAAS,GAAG,CAAZ,IAAiBA,SAAS,GAAG,CAAZ,GAAgB,CAArC,EACI,MAAM,IAAIO,KAAJ,CAAU,mBAAV,CAAN;AACJ,QAAMC,WAAW,GAAG,CAAC,GAAGf,kBAAkB,CAACa,OAAvB,EAAgC;AAAEN,IAAAA,SAAF;AAAaG,IAAAA;AAAb,GAAhC,CAApB;AACA,QAAMM,UAAU,GAAG,CAAC,GAAGjB,iBAAiB,CAACc,OAAtB,EAA+B;AAAEL,IAAAA;AAAF,GAA/B,CAAnB;AACA,QAAMS,SAAS,GAAG,CAAC,GAAGhB,gBAAgB,CAACY,OAArB,EAA8B;AAAED,IAAAA;AAAF,GAA9B,CAAlB;AACA,QAAMM,eAAe,GAAG,CAAC,GAAGb,gBAAgB,CAACc,SAArB,EAAgCV,OAAhC,CAAxB;AACA,SAAO,UAAUW,MAAV,EAAkB;AACrB,UAAMC,MAAM,GAAG,EAAf;;AACA,SAAK,MAAMC,IAAX,IAAmB,CAAC,GAAGxB,SAAS,CAACyB,UAAd,EAA0BH,MAA1B,CAAnB,EAAsD;AAClD,YAAMI,KAAK,GAAGT,WAAW,CAACO,IAAD,CAAzB;AACA,UAAIE,KAAK,KAAK,IAAd,EACI;AACJ,YAAMC,QAAQ,GAAGT,UAAU,CAACQ,KAAD,CAA3B;AACA,YAAME,KAAK,GAAGD,QAAQ,CAACE,KAAT,CAAe,CAAf,EAAkBC,GAAlB,CAAsBX,SAAtB,CAAd;AACAI,MAAAA,MAAM,CAACQ,IAAP,CAAY;AACRC,QAAAA,WAAW,EAAEZ,eAAe,CAACO,QAAQ,CAAC,CAAD,CAAT,EAAcf,OAAd,CADpB;AAERqB,QAAAA,IAAI,EAAEL,KAFE;AAGRN,QAAAA,MAAM,EAAEI,KAHA;AAIRQ,QAAAA,QAAQ,EAAEN,KAAK,CAACO,MAAN,CAAa,CAACC,GAAD,EAAMC,IAAN,KAAeD,GAAG,CAACE,MAAJ,CAAWD,IAAI,CAACH,QAAhB,CAA5B,EAAuD,EAAvD;AAJF,OAAZ;AAMH;;AACD,WAAOX,MAAP;AACH,GAhBD;AAiBH;;AACD3B,OAAO,CAACmB,OAAR,GAAkBP,SAAlB","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst primitives_js_1 = require(\"../primitives.js\");\nconst util_js_1 = require(\"../util.js\");\nconst block_parser_js_1 = require(\"./block-parser.js\");\nconst source_parser_js_1 = require(\"./source-parser.js\");\nconst spec_parser_js_1 = require(\"./spec-parser.js\");\nconst tag_js_1 = require(\"./tokenizers/tag.js\");\nconst type_js_1 = require(\"./tokenizers/type.js\");\nconst name_js_1 = require(\"./tokenizers/name.js\");\nconst description_js_1 = require(\"./tokenizers/description.js\");\nfunction getParser({ startLine = 0, fence = '```', spacing = 'compact', markers = primitives_js_1.Markers, tokenizers = [\n (0, tag_js_1.default)(),\n (0, type_js_1.default)(spacing),\n (0, name_js_1.default)(),\n (0, description_js_1.default)(spacing),\n], } = {}) {\n if (startLine < 0 || startLine % 1 > 0)\n throw new Error('Invalid startLine');\n const parseSource = (0, source_parser_js_1.default)({ startLine, markers });\n const parseBlock = (0, block_parser_js_1.default)({ fence });\n const parseSpec = (0, spec_parser_js_1.default)({ tokenizers });\n const joinDescription = (0, description_js_1.getJoiner)(spacing);\n return function (source) {\n const blocks = [];\n for (const line of (0, util_js_1.splitLines)(source)) {\n const lines = parseSource(line);\n if (lines === null)\n continue;\n const sections = parseBlock(lines);\n const specs = sections.slice(1).map(parseSpec);\n blocks.push({\n description: joinDescription(sections[0], markers),\n tags: specs,\n source: lines,\n problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),\n });\n }\n return blocks;\n };\n}\nexports.default = getParser;\n"],"file":"index.cjs"}
|
11
node_modules/comment-parser/lib/parser/index.d.ts
generated
vendored
Normal file
11
node_modules/comment-parser/lib/parser/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
import { Block, BlockMarkers } from '../primitives.js';
|
||||
import { Tokenizer } from './tokenizers/index.js';
|
||||
export interface Options {
|
||||
startLine: number;
|
||||
fence: string;
|
||||
spacing: 'compact' | 'preserve';
|
||||
markers: BlockMarkers;
|
||||
tokenizers: Tokenizer[];
|
||||
}
|
||||
export type Parser = (source: string) => Block[];
|
||||
export default function getParser({ startLine, fence, spacing, markers, tokenizers, }?: Partial<Options>): Parser;
|
68
node_modules/comment-parser/lib/parser/source-parser.cjs
generated
vendored
Normal file
68
node_modules/comment-parser/lib/parser/source-parser.cjs
generated
vendored
Normal file
@ -0,0 +1,68 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
const primitives_js_1 = require("../primitives.cjs");
|
||||
|
||||
const util_js_1 = require("../util.cjs");
|
||||
|
||||
function getParser({
|
||||
startLine = 0,
|
||||
markers = primitives_js_1.Markers
|
||||
} = {}) {
|
||||
let block = null;
|
||||
let num = startLine;
|
||||
return function parseSource(source) {
|
||||
let rest = source;
|
||||
const tokens = (0, util_js_1.seedTokens)();
|
||||
[tokens.lineEnd, rest] = (0, util_js_1.splitCR)(rest);
|
||||
[tokens.start, rest] = (0, util_js_1.splitSpace)(rest);
|
||||
|
||||
if (block === null && rest.startsWith(markers.start) && !rest.startsWith(markers.nostart)) {
|
||||
block = [];
|
||||
tokens.delimiter = rest.slice(0, markers.start.length);
|
||||
rest = rest.slice(markers.start.length);
|
||||
[tokens.postDelimiter, rest] = (0, util_js_1.splitSpace)(rest);
|
||||
}
|
||||
|
||||
if (block === null) {
|
||||
num++;
|
||||
return null;
|
||||
}
|
||||
|
||||
const isClosed = rest.trimRight().endsWith(markers.end);
|
||||
|
||||
if (tokens.delimiter === '' && rest.startsWith(markers.delim) && !rest.startsWith(markers.end)) {
|
||||
tokens.delimiter = markers.delim;
|
||||
rest = rest.slice(markers.delim.length);
|
||||
[tokens.postDelimiter, rest] = (0, util_js_1.splitSpace)(rest);
|
||||
}
|
||||
|
||||
if (isClosed) {
|
||||
const trimmed = rest.trimRight();
|
||||
tokens.end = rest.slice(trimmed.length - markers.end.length);
|
||||
rest = trimmed.slice(0, -markers.end.length);
|
||||
}
|
||||
|
||||
tokens.description = rest;
|
||||
block.push({
|
||||
number: num,
|
||||
source,
|
||||
tokens
|
||||
});
|
||||
num++;
|
||||
|
||||
if (isClosed) {
|
||||
const result = block.slice();
|
||||
block = null;
|
||||
return result;
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
}
|
||||
|
||||
exports.default = getParser;
|
||||
//# sourceMappingURL=source-parser.cjs.map
|
1
node_modules/comment-parser/lib/parser/source-parser.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/parser/source-parser.cjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["source-parser.js"],"names":["Object","defineProperty","exports","value","primitives_js_1","require","util_js_1","getParser","startLine","markers","Markers","block","num","parseSource","source","rest","tokens","seedTokens","lineEnd","splitCR","start","splitSpace","startsWith","nostart","delimiter","slice","length","postDelimiter","isClosed","trimRight","endsWith","end","delim","trimmed","description","push","number","result","default"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;;AACA,MAAMC,eAAe,GAAGC,OAAH,qBAArB;;AACA,MAAMC,SAAS,GAAGD,OAAH,eAAf;;AACA,SAASE,SAAT,CAAmB;AAAEC,EAAAA,SAAS,GAAG,CAAd;AAAiBC,EAAAA,OAAO,GAAGL,eAAe,CAACM;AAA3C,IAAwD,EAA3E,EAA+E;AAC3E,MAAIC,KAAK,GAAG,IAAZ;AACA,MAAIC,GAAG,GAAGJ,SAAV;AACA,SAAO,SAASK,WAAT,CAAqBC,MAArB,EAA6B;AAChC,QAAIC,IAAI,GAAGD,MAAX;AACA,UAAME,MAAM,GAAG,CAAC,GAAGV,SAAS,CAACW,UAAd,GAAf;AACA,KAACD,MAAM,CAACE,OAAR,EAAiBH,IAAjB,IAAyB,CAAC,GAAGT,SAAS,CAACa,OAAd,EAAuBJ,IAAvB,CAAzB;AACA,KAACC,MAAM,CAACI,KAAR,EAAeL,IAAf,IAAuB,CAAC,GAAGT,SAAS,CAACe,UAAd,EAA0BN,IAA1B,CAAvB;;AACA,QAAIJ,KAAK,KAAK,IAAV,IACAI,IAAI,CAACO,UAAL,CAAgBb,OAAO,CAACW,KAAxB,CADA,IAEA,CAACL,IAAI,CAACO,UAAL,CAAgBb,OAAO,CAACc,OAAxB,CAFL,EAEuC;AACnCZ,MAAAA,KAAK,GAAG,EAAR;AACAK,MAAAA,MAAM,CAACQ,SAAP,GAAmBT,IAAI,CAACU,KAAL,CAAW,CAAX,EAAchB,OAAO,CAACW,KAAR,CAAcM,MAA5B,CAAnB;AACAX,MAAAA,IAAI,GAAGA,IAAI,CAACU,KAAL,CAAWhB,OAAO,CAACW,KAAR,CAAcM,MAAzB,CAAP;AACA,OAACV,MAAM,CAACW,aAAR,EAAuBZ,IAAvB,IAA+B,CAAC,GAAGT,SAAS,CAACe,UAAd,EAA0BN,IAA1B,CAA/B;AACH;;AACD,QAAIJ,KAAK,KAAK,IAAd,EAAoB;AAChBC,MAAAA,GAAG;AACH,aAAO,IAAP;AACH;;AACD,UAAMgB,QAAQ,GAAGb,IAAI,CAACc,SAAL,GAAiBC,QAAjB,CAA0BrB,OAAO,CAACsB,GAAlC,CAAjB;;AACA,QAAIf,MAAM,CAACQ,SAAP,KAAqB,EAArB,IACAT,IAAI,CAACO,UAAL,CAAgBb,OAAO,CAACuB,KAAxB,CADA,IAEA,CAACjB,IAAI,CAACO,UAAL,CAAgBb,OAAO,CAACsB,GAAxB,CAFL,EAEmC;AAC/Bf,MAAAA,MAAM,CAACQ,SAAP,GAAmBf,OAAO,CAACuB,KAA3B;AACAjB,MAAAA,IAAI,GAAGA,IAAI,CAACU,KAAL,CAAWhB,OAAO,CAACuB,KAAR,CAAcN,MAAzB,CAAP;AACA,OAACV,MAAM,CAACW,aAAR,EAAuBZ,IAAvB,IAA+B,CAAC,GAAGT,SAAS,CAACe,UAAd,EAA0BN,IAA1B,CAA/B;AACH;;AACD,QAAIa,QAAJ,EAAc;AACV,YAAMK,OAAO,GAAGlB,IAAI,CAACc,SAAL,EAAhB;AACAb,MAAAA,MAAM,CAACe,GAAP,GAAahB,IAAI,CAACU,KAAL,CAAWQ,OAAO,CAACP,MAAR,GAAiBjB,OAAO,CAACsB,GAAR,CAAYL,MAAxC,CAAb;AACAX,MAAAA,IAAI,GAAGkB,OAAO,CAACR,KAAR,CAAc,CAAd,EAAiB,CAAChB,OAAO,CAACsB,GAAR,CAAYL,MAA9B,CAAP;AACH;;AACDV,IAAAA,MAAM,CAACkB,WAAP,GAAqBnB,IAArB;AACAJ,IAAAA,KAAK,CAACwB,IAAN,CAAW;AAAEC,MAAAA,MAAM,EAAExB,GAAV;AAAeE,MAAAA,MAAf;AAAuBE,MAAAA;AAAvB,KAAX;AACAJ,IAAAA,GAAG;;AACH,QAAIgB,QAAJ,EAAc;AACV,YAAMS,MAAM,GAAG1B,KAAK,CAACc,KAAN,EAAf;AACAd,MAAAA,KAAK,GAAG,IAAR;AACA,aAAO0B,MAAP;AACH;;AACD,WAAO,IAAP;AACH,GAvCD;AAwCH;;AACDnC,OAAO,CAACoC,OAAR,GAAkB/B,SAAlB","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst primitives_js_1 = require(\"../primitives.js\");\nconst util_js_1 = require(\"../util.js\");\nfunction getParser({ startLine = 0, markers = primitives_js_1.Markers, } = {}) {\n let block = null;\n let num = startLine;\n return function parseSource(source) {\n let rest = source;\n const tokens = (0, util_js_1.seedTokens)();\n [tokens.lineEnd, rest] = (0, util_js_1.splitCR)(rest);\n [tokens.start, rest] = (0, util_js_1.splitSpace)(rest);\n if (block === null &&\n rest.startsWith(markers.start) &&\n !rest.startsWith(markers.nostart)) {\n block = [];\n tokens.delimiter = rest.slice(0, markers.start.length);\n rest = rest.slice(markers.start.length);\n [tokens.postDelimiter, rest] = (0, util_js_1.splitSpace)(rest);\n }\n if (block === null) {\n num++;\n return null;\n }\n const isClosed = rest.trimRight().endsWith(markers.end);\n if (tokens.delimiter === '' &&\n rest.startsWith(markers.delim) &&\n !rest.startsWith(markers.end)) {\n tokens.delimiter = markers.delim;\n rest = rest.slice(markers.delim.length);\n [tokens.postDelimiter, rest] = (0, util_js_1.splitSpace)(rest);\n }\n if (isClosed) {\n const trimmed = rest.trimRight();\n tokens.end = rest.slice(trimmed.length - markers.end.length);\n rest = trimmed.slice(0, -markers.end.length);\n }\n tokens.description = rest;\n block.push({ number: num, source, tokens });\n num++;\n if (isClosed) {\n const result = block.slice();\n block = null;\n return result;\n }\n return null;\n };\n}\nexports.default = getParser;\n"],"file":"source-parser.cjs"}
|
7
node_modules/comment-parser/lib/parser/source-parser.d.ts
generated
vendored
Normal file
7
node_modules/comment-parser/lib/parser/source-parser.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import { Line, BlockMarkers } from '../primitives.js';
|
||||
export interface Options {
|
||||
startLine: number;
|
||||
markers: BlockMarkers;
|
||||
}
|
||||
export type Parser = (source: string) => Line[] | null;
|
||||
export default function getParser({ startLine, markers, }?: Partial<Options>): Parser;
|
29
node_modules/comment-parser/lib/parser/spec-parser.cjs
generated
vendored
Normal file
29
node_modules/comment-parser/lib/parser/spec-parser.cjs
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
const util_js_1 = require("../util.cjs");
|
||||
|
||||
function getParser({
|
||||
tokenizers
|
||||
}) {
|
||||
return function parseSpec(source) {
|
||||
var _a;
|
||||
|
||||
let spec = (0, util_js_1.seedSpec)({
|
||||
source
|
||||
});
|
||||
|
||||
for (const tokenize of tokenizers) {
|
||||
spec = tokenize(spec);
|
||||
if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical) break;
|
||||
}
|
||||
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
|
||||
exports.default = getParser;
|
||||
//# sourceMappingURL=spec-parser.cjs.map
|
1
node_modules/comment-parser/lib/parser/spec-parser.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/parser/spec-parser.cjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["spec-parser.js"],"names":["Object","defineProperty","exports","value","util_js_1","require","getParser","tokenizers","parseSpec","source","_a","spec","seedSpec","tokenize","problems","length","critical","default"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;;AACA,MAAMC,SAAS,GAAGC,OAAH,eAAf;;AACA,SAASC,SAAT,CAAmB;AAAEC,EAAAA;AAAF,CAAnB,EAAmC;AAC/B,SAAO,SAASC,SAAT,CAAmBC,MAAnB,EAA2B;AAC9B,QAAIC,EAAJ;;AACA,QAAIC,IAAI,GAAG,CAAC,GAAGP,SAAS,CAACQ,QAAd,EAAwB;AAAEH,MAAAA;AAAF,KAAxB,CAAX;;AACA,SAAK,MAAMI,QAAX,IAAuBN,UAAvB,EAAmC;AAC/BI,MAAAA,IAAI,GAAGE,QAAQ,CAACF,IAAD,CAAf;AACA,UAAI,CAACD,EAAE,GAAGC,IAAI,CAACG,QAAL,CAAcH,IAAI,CAACG,QAAL,CAAcC,MAAd,GAAuB,CAArC,CAAN,MAAmD,IAAnD,IAA2DL,EAAE,KAAK,KAAK,CAAvE,GAA2E,KAAK,CAAhF,GAAoFA,EAAE,CAACM,QAA3F,EACI;AACP;;AACD,WAAOL,IAAP;AACH,GATD;AAUH;;AACDT,OAAO,CAACe,OAAR,GAAkBX,SAAlB","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_js_1 = require(\"../util.js\");\nfunction getParser({ tokenizers }) {\n return function parseSpec(source) {\n var _a;\n let spec = (0, util_js_1.seedSpec)({ source });\n for (const tokenize of tokenizers) {\n spec = tokenize(spec);\n if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical)\n break;\n }\n return spec;\n };\n}\nexports.default = getParser;\n"],"file":"spec-parser.cjs"}
|
7
node_modules/comment-parser/lib/parser/spec-parser.d.ts
generated
vendored
Normal file
7
node_modules/comment-parser/lib/parser/spec-parser.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import { Line, Spec } from '../primitives.js';
|
||||
import { Tokenizer } from './tokenizers/index.js';
|
||||
export type Parser = (source: Line[]) => Spec;
|
||||
export interface Options {
|
||||
tokenizers: Tokenizer[];
|
||||
}
|
||||
export default function getParser({ tokenizers }: Options): Parser;
|
62
node_modules/comment-parser/lib/parser/tokenizers/description.cjs
generated
vendored
Normal file
62
node_modules/comment-parser/lib/parser/tokenizers/description.cjs
generated
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.getJoiner = void 0;
|
||||
|
||||
const primitives_js_1 = require("../../primitives.cjs");
|
||||
/**
|
||||
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
|
||||
* following given spacing srtategy
|
||||
* @param {Spacing} spacing tells how to handle the whitespace
|
||||
* @param {BlockMarkers} markers tells how to handle comment block delimitation
|
||||
*/
|
||||
|
||||
|
||||
function descriptionTokenizer(spacing = 'compact', markers = primitives_js_1.Markers) {
|
||||
const join = getJoiner(spacing);
|
||||
return spec => {
|
||||
spec.description = join(spec.source, markers);
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
|
||||
exports.default = descriptionTokenizer;
|
||||
|
||||
function getJoiner(spacing) {
|
||||
if (spacing === 'compact') return compactJoiner;
|
||||
if (spacing === 'preserve') return preserveJoiner;
|
||||
return spacing;
|
||||
}
|
||||
|
||||
exports.getJoiner = getJoiner;
|
||||
|
||||
function compactJoiner(lines, markers = primitives_js_1.Markers) {
|
||||
return lines.map(({
|
||||
tokens: {
|
||||
description
|
||||
}
|
||||
}) => description.trim()).filter(description => description !== '').join(' ');
|
||||
}
|
||||
|
||||
const lineNo = (num, {
|
||||
tokens
|
||||
}, i) => tokens.type === '' ? num : i;
|
||||
|
||||
const getDescription = ({
|
||||
tokens
|
||||
}) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) + tokens.description;
|
||||
|
||||
function preserveJoiner(lines, markers = primitives_js_1.Markers) {
|
||||
if (lines.length === 0) return ''; // skip the opening line with no description
|
||||
|
||||
if (lines[0].tokens.description === '' && lines[0].tokens.delimiter === markers.start) lines = lines.slice(1); // skip the closing line with no description
|
||||
|
||||
const lastLine = lines[lines.length - 1];
|
||||
if (lastLine !== undefined && lastLine.tokens.description === '' && lastLine.tokens.end.endsWith(markers.end)) lines = lines.slice(0, -1); // description starts at the last line of type definition
|
||||
|
||||
lines = lines.slice(lines.reduce(lineNo, 0));
|
||||
return lines.map(getDescription).join('\n');
|
||||
}
|
||||
//# sourceMappingURL=description.cjs.map
|
1
node_modules/comment-parser/lib/parser/tokenizers/description.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/parser/tokenizers/description.cjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["description.js"],"names":["Object","defineProperty","exports","value","getJoiner","primitives_js_1","require","descriptionTokenizer","spacing","markers","Markers","join","spec","description","source","default","compactJoiner","preserveJoiner","lines","map","tokens","trim","filter","lineNo","num","i","type","getDescription","delimiter","start","postDelimiter","slice","length","lastLine","undefined","end","endsWith","reduce"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;AACAD,OAAO,CAACE,SAAR,GAAoB,KAAK,CAAzB;;AACA,MAAMC,eAAe,GAAGC,OAAH,wBAArB;AACA;AACA;AACA;AACA;AACA;AACA;;;AACA,SAASC,oBAAT,CAA8BC,OAAO,GAAG,SAAxC,EAAmDC,OAAO,GAAGJ,eAAe,CAACK,OAA7E,EAAsF;AAClF,QAAMC,IAAI,GAAGP,SAAS,CAACI,OAAD,CAAtB;AACA,SAAQI,IAAD,IAAU;AACbA,IAAAA,IAAI,CAACC,WAAL,GAAmBF,IAAI,CAACC,IAAI,CAACE,MAAN,EAAcL,OAAd,CAAvB;AACA,WAAOG,IAAP;AACH,GAHD;AAIH;;AACDV,OAAO,CAACa,OAAR,GAAkBR,oBAAlB;;AACA,SAASH,SAAT,CAAmBI,OAAnB,EAA4B;AACxB,MAAIA,OAAO,KAAK,SAAhB,EACI,OAAOQ,aAAP;AACJ,MAAIR,OAAO,KAAK,UAAhB,EACI,OAAOS,cAAP;AACJ,SAAOT,OAAP;AACH;;AACDN,OAAO,CAACE,SAAR,GAAoBA,SAApB;;AACA,SAASY,aAAT,CAAuBE,KAAvB,EAA8BT,OAAO,GAAGJ,eAAe,CAACK,OAAxD,EAAiE;AAC7D,SAAOQ,KAAK,CACPC,GADE,CACE,CAAC;AAAEC,IAAAA,MAAM,EAAE;AAAEP,MAAAA;AAAF;AAAV,GAAD,KAAiCA,WAAW,CAACQ,IAAZ,EADnC,EAEFC,MAFE,CAEMT,WAAD,IAAiBA,WAAW,KAAK,EAFtC,EAGFF,IAHE,CAGG,GAHH,CAAP;AAIH;;AACD,MAAMY,MAAM,GAAG,CAACC,GAAD,EAAM;AAAEJ,EAAAA;AAAF,CAAN,EAAkBK,CAAlB,KAAwBL,MAAM,CAACM,IAAP,KAAgB,EAAhB,GAAqBF,GAArB,GAA2BC,CAAlE;;AACA,MAAME,cAAc,GAAG,CAAC;AAAEP,EAAAA;AAAF,CAAD,KAAgB,CAACA,MAAM,CAACQ,SAAP,KAAqB,EAArB,GAA0BR,MAAM,CAACS,KAAjC,GAAyCT,MAAM,CAACU,aAAP,CAAqBC,KAArB,CAA2B,CAA3B,CAA1C,IACnCX,MAAM,CAACP,WADX;;AAEA,SAASI,cAAT,CAAwBC,KAAxB,EAA+BT,OAAO,GAAGJ,eAAe,CAACK,OAAzD,EAAkE;AAC9D,MAAIQ,KAAK,CAACc,MAAN,KAAiB,CAArB,EACI,OAAO,EAAP,CAF0D,CAG9D;;AACA,MAAId,KAAK,CAAC,CAAD,CAAL,CAASE,MAAT,CAAgBP,WAAhB,KAAgC,EAAhC,IACAK,KAAK,CAAC,CAAD,CAAL,CAASE,MAAT,CAAgBQ,SAAhB,KAA8BnB,OAAO,CAACoB,KAD1C,EAEIX,KAAK,GAAGA,KAAK,CAACa,KAAN,CAAY,CAAZ,CAAR,CAN0D,CAO9D;;AACA,QAAME,QAAQ,GAAGf,KAAK,CAACA,KAAK,CAACc,MAAN,GAAe,CAAhB,CAAtB;AACA,MAAIC,QAAQ,KAAKC,SAAb,IACAD,QAAQ,CAACb,MAAT,CAAgBP,WAAhB,KAAgC,EADhC,IAEAoB,QAAQ,CAACb,MAAT,CAAgBe,GAAhB,CAAoBC,QAApB,CAA6B3B,OAAO,CAAC0B,GAArC,CAFJ,EAGIjB,KAAK,GAAGA,KAAK,CAACa,KAAN,CAAY,CAAZ,EAAe,CAAC,CAAhB,CAAR,CAZ0D,CAa9D;;AACAb,EAAAA,KAAK,GAAGA,KAAK,CAACa,KAAN,CAAYb,KAAK,CAACmB,MAAN,CAAad,MAAb,EAAqB,CAArB,CAAZ,CAAR;AACA,SAAOL,KAAK,CAACC,GAAN,CAAUQ,cAAV,EAA0BhB,IAA1B,CAA+B,IAA/B,CAAP;AACH","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getJoiner = void 0;\nconst primitives_js_1 = require(\"../../primitives.js\");\n/**\n * Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`\n * following given spacing srtategy\n * @param {Spacing} spacing tells how to handle the whitespace\n * @param {BlockMarkers} markers tells how to handle comment block delimitation\n */\nfunction descriptionTokenizer(spacing = 'compact', markers = primitives_js_1.Markers) {\n const join = getJoiner(spacing);\n return (spec) => {\n spec.description = join(spec.source, markers);\n return spec;\n };\n}\nexports.default = descriptionTokenizer;\nfunction getJoiner(spacing) {\n if (spacing === 'compact')\n return compactJoiner;\n if (spacing === 'preserve')\n return preserveJoiner;\n return spacing;\n}\nexports.getJoiner = getJoiner;\nfunction compactJoiner(lines, markers = primitives_js_1.Markers) {\n return lines\n .map(({ tokens: { description } }) => description.trim())\n .filter((description) => description !== '')\n .join(' ');\n}\nconst lineNo = (num, { tokens }, i) => tokens.type === '' ? num : i;\nconst getDescription = ({ tokens }) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +\n tokens.description;\nfunction preserveJoiner(lines, markers = primitives_js_1.Markers) {\n if (lines.length === 0)\n return '';\n // skip the opening line with no description\n if (lines[0].tokens.description === '' &&\n lines[0].tokens.delimiter === markers.start)\n lines = lines.slice(1);\n // skip the closing line with no description\n const lastLine = lines[lines.length - 1];\n if (lastLine !== undefined &&\n lastLine.tokens.description === '' &&\n lastLine.tokens.end.endsWith(markers.end))\n lines = lines.slice(0, -1);\n // description starts at the last line of type definition\n lines = lines.slice(lines.reduce(lineNo, 0));\n return lines.map(getDescription).join('\\n');\n}\n"],"file":"description.cjs"}
|
20
node_modules/comment-parser/lib/parser/tokenizers/description.d.ts
generated
vendored
Normal file
20
node_modules/comment-parser/lib/parser/tokenizers/description.d.ts
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
import { Line, BlockMarkers, Markers } from '../../primitives.js';
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Walks over provided lines joining description token into a single string.
|
||||
* */
|
||||
export type Joiner = (lines: Line[], markers?: BlockMarkers) => string;
|
||||
/**
|
||||
* Shortcut for standard Joiners
|
||||
* compact - strip surrounding whitespace and concat lines using a single string
|
||||
* preserve - preserves original whitespace and line breaks as is
|
||||
*/
|
||||
export type Spacing = 'compact' | 'preserve' | Joiner;
|
||||
/**
|
||||
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
|
||||
* following given spacing srtategy
|
||||
* @param {Spacing} spacing tells how to handle the whitespace
|
||||
* @param {BlockMarkers} markers tells how to handle comment block delimitation
|
||||
*/
|
||||
export default function descriptionTokenizer(spacing?: Spacing, markers?: typeof Markers): Tokenizer;
|
||||
export declare function getJoiner(spacing: Spacing): Joiner;
|
6
node_modules/comment-parser/lib/parser/tokenizers/index.cjs
generated
vendored
Normal file
6
node_modules/comment-parser/lib/parser/tokenizers/index.cjs
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
//# sourceMappingURL=index.cjs.map
|
1
node_modules/comment-parser/lib/parser/tokenizers/index.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/parser/tokenizers/index.cjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["index.js"],"names":["Object","defineProperty","exports","value"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n"],"file":"index.cjs"}
|
7
node_modules/comment-parser/lib/parser/tokenizers/index.d.ts
generated
vendored
Normal file
7
node_modules/comment-parser/lib/parser/tokenizers/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import { Spec } from '../../primitives.js';
|
||||
/**
|
||||
* Splits `spect.lines[].token.description` into other tokens,
|
||||
* and populates the spec.{tag, name, type, description}. Invoked in a chaing
|
||||
* with other tokens, operations listed above can be moved to separate tokenizers
|
||||
*/
|
||||
export type Tokenizer = (spec: Spec) => Spec;
|
109
node_modules/comment-parser/lib/parser/tokenizers/name.cjs
generated
vendored
Normal file
109
node_modules/comment-parser/lib/parser/tokenizers/name.cjs
generated
vendored
Normal file
@ -0,0 +1,109 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
const util_js_1 = require("../../util.cjs");
|
||||
|
||||
const isQuoted = s => s && s.startsWith('"') && s.endsWith('"');
|
||||
/**
|
||||
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
|
||||
* and populates the `spec.name`
|
||||
*/
|
||||
|
||||
|
||||
function nameTokenizer() {
|
||||
const typeEnd = (num, {
|
||||
tokens
|
||||
}, i) => tokens.type === '' ? num : i;
|
||||
|
||||
return spec => {
|
||||
// look for the name in the line where {type} ends
|
||||
const {
|
||||
tokens
|
||||
} = spec.source[spec.source.reduce(typeEnd, 0)];
|
||||
const source = tokens.description.trimLeft();
|
||||
const quotedGroups = source.split('"'); // if it starts with quoted group, assume it is a literal
|
||||
|
||||
if (quotedGroups.length > 1 && quotedGroups[0] === '' && quotedGroups.length % 2 === 1) {
|
||||
spec.name = quotedGroups[1];
|
||||
tokens.name = `"${quotedGroups[1]}"`;
|
||||
[tokens.postName, tokens.description] = (0, util_js_1.splitSpace)(source.slice(tokens.name.length));
|
||||
return spec;
|
||||
}
|
||||
|
||||
let brackets = 0;
|
||||
let name = '';
|
||||
let optional = false;
|
||||
let defaultValue; // assume name is non-space string or anything wrapped into brackets
|
||||
|
||||
for (const ch of source) {
|
||||
if (brackets === 0 && (0, util_js_1.isSpace)(ch)) break;
|
||||
if (ch === '[') brackets++;
|
||||
if (ch === ']') brackets--;
|
||||
name += ch;
|
||||
}
|
||||
|
||||
if (brackets !== 0) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:unpaired-brackets',
|
||||
message: 'unpaired brackets',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
|
||||
const nameToken = name;
|
||||
|
||||
if (name[0] === '[' && name[name.length - 1] === ']') {
|
||||
optional = true;
|
||||
name = name.slice(1, -1);
|
||||
const parts = name.split('=');
|
||||
name = parts[0].trim();
|
||||
if (parts[1] !== undefined) defaultValue = parts.slice(1).join('=').trim();
|
||||
|
||||
if (name === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-name',
|
||||
message: 'empty name',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
|
||||
if (defaultValue === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-default',
|
||||
message: 'empty default value',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
} // has "=" and is not a string, except for "=>"
|
||||
|
||||
|
||||
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:invalid-default',
|
||||
message: 'invalid default value syntax',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
}
|
||||
|
||||
spec.optional = optional;
|
||||
spec.name = name;
|
||||
tokens.name = nameToken;
|
||||
if (defaultValue !== undefined) spec.default = defaultValue;
|
||||
[tokens.postName, tokens.description] = (0, util_js_1.splitSpace)(source.slice(tokens.name.length));
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
|
||||
exports.default = nameTokenizer;
|
||||
//# sourceMappingURL=name.cjs.map
|
1
node_modules/comment-parser/lib/parser/tokenizers/name.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/parser/tokenizers/name.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
6
node_modules/comment-parser/lib/parser/tokenizers/name.d.ts
generated
vendored
Normal file
6
node_modules/comment-parser/lib/parser/tokenizers/name.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
|
||||
* and populates the `spec.name`
|
||||
*/
|
||||
export default function nameTokenizer(): Tokenizer;
|
37
node_modules/comment-parser/lib/parser/tokenizers/tag.cjs
generated
vendored
Normal file
37
node_modules/comment-parser/lib/parser/tokenizers/tag.cjs
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
/**
|
||||
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
|
||||
* and populates `spec.tag`
|
||||
*/
|
||||
|
||||
function tagTokenizer() {
|
||||
return spec => {
|
||||
const {
|
||||
tokens
|
||||
} = spec.source[0];
|
||||
const match = tokens.description.match(/\s*(@(\S+))(\s*)/);
|
||||
|
||||
if (match === null) {
|
||||
spec.problems.push({
|
||||
code: 'spec:tag:prefix',
|
||||
message: 'tag should start with "@" symbol',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
|
||||
tokens.tag = match[1];
|
||||
tokens.postTag = match[3];
|
||||
tokens.description = tokens.description.slice(match[0].length);
|
||||
spec.tag = match[2];
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
|
||||
exports.default = tagTokenizer;
|
||||
//# sourceMappingURL=tag.cjs.map
|
1
node_modules/comment-parser/lib/parser/tokenizers/tag.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/parser/tokenizers/tag.cjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["tag.js"],"names":["Object","defineProperty","exports","value","tagTokenizer","spec","tokens","source","match","description","problems","push","code","message","line","number","critical","tag","postTag","slice","length","default"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;AACA;AACA;AACA;AACA;;AACA,SAASC,YAAT,GAAwB;AACpB,SAAQC,IAAD,IAAU;AACb,UAAM;AAAEC,MAAAA;AAAF,QAAaD,IAAI,CAACE,MAAL,CAAY,CAAZ,CAAnB;AACA,UAAMC,KAAK,GAAGF,MAAM,CAACG,WAAP,CAAmBD,KAAnB,CAAyB,kBAAzB,CAAd;;AACA,QAAIA,KAAK,KAAK,IAAd,EAAoB;AAChBH,MAAAA,IAAI,CAACK,QAAL,CAAcC,IAAd,CAAmB;AACfC,QAAAA,IAAI,EAAE,iBADS;AAEfC,QAAAA,OAAO,EAAE,kCAFM;AAGfC,QAAAA,IAAI,EAAET,IAAI,CAACE,MAAL,CAAY,CAAZ,EAAeQ,MAHN;AAIfC,QAAAA,QAAQ,EAAE;AAJK,OAAnB;AAMA,aAAOX,IAAP;AACH;;AACDC,IAAAA,MAAM,CAACW,GAAP,GAAaT,KAAK,CAAC,CAAD,CAAlB;AACAF,IAAAA,MAAM,CAACY,OAAP,GAAiBV,KAAK,CAAC,CAAD,CAAtB;AACAF,IAAAA,MAAM,CAACG,WAAP,GAAqBH,MAAM,CAACG,WAAP,CAAmBU,KAAnB,CAAyBX,KAAK,CAAC,CAAD,CAAL,CAASY,MAAlC,CAArB;AACAf,IAAAA,IAAI,CAACY,GAAL,GAAWT,KAAK,CAAC,CAAD,CAAhB;AACA,WAAOH,IAAP;AACH,GAjBD;AAkBH;;AACDH,OAAO,CAACmB,OAAR,GAAkBjB,YAAlB","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,\n * and populates `spec.tag`\n */\nfunction tagTokenizer() {\n return (spec) => {\n const { tokens } = spec.source[0];\n const match = tokens.description.match(/\\s*(@(\\S+))(\\s*)/);\n if (match === null) {\n spec.problems.push({\n code: 'spec:tag:prefix',\n message: 'tag should start with \"@\" symbol',\n line: spec.source[0].number,\n critical: true,\n });\n return spec;\n }\n tokens.tag = match[1];\n tokens.postTag = match[3];\n tokens.description = tokens.description.slice(match[0].length);\n spec.tag = match[2];\n return spec;\n };\n}\nexports.default = tagTokenizer;\n"],"file":"tag.cjs"}
|
6
node_modules/comment-parser/lib/parser/tokenizers/tag.d.ts
generated
vendored
Normal file
6
node_modules/comment-parser/lib/parser/tokenizers/tag.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
|
||||
* and populates `spec.tag`
|
||||
*/
|
||||
export default function tagTokenizer(): Tokenizer;
|
79
node_modules/comment-parser/lib/parser/tokenizers/type.cjs
generated
vendored
Normal file
79
node_modules/comment-parser/lib/parser/tokenizers/type.cjs
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
const util_js_1 = require("../../util.cjs");
|
||||
/**
|
||||
* Sets splits remaining `Spec.lines[].tokes.description` into `type` and `description`
|
||||
* tokens and populates Spec.type`
|
||||
*
|
||||
* @param {Spacing} spacing tells how to deal with a whitespace
|
||||
* for type values going over multiple lines
|
||||
*/
|
||||
|
||||
|
||||
function typeTokenizer(spacing = 'compact') {
|
||||
const join = getJoiner(spacing);
|
||||
return spec => {
|
||||
let curlies = 0;
|
||||
let lines = [];
|
||||
|
||||
for (const [i, {
|
||||
tokens
|
||||
}] of spec.source.entries()) {
|
||||
let type = '';
|
||||
if (i === 0 && tokens.description[0] !== '{') return spec;
|
||||
|
||||
for (const ch of tokens.description) {
|
||||
if (ch === '{') curlies++;
|
||||
if (ch === '}') curlies--;
|
||||
type += ch;
|
||||
if (curlies === 0) break;
|
||||
}
|
||||
|
||||
lines.push([tokens, type]);
|
||||
if (curlies === 0) break;
|
||||
}
|
||||
|
||||
if (curlies !== 0) {
|
||||
spec.problems.push({
|
||||
code: 'spec:type:unpaired-curlies',
|
||||
message: 'unpaired curlies',
|
||||
line: spec.source[0].number,
|
||||
critical: true
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
|
||||
const parts = [];
|
||||
const offset = lines[0][0].postDelimiter.length;
|
||||
|
||||
for (const [i, [tokens, type]] of lines.entries()) {
|
||||
tokens.type = type;
|
||||
|
||||
if (i > 0) {
|
||||
tokens.type = tokens.postDelimiter.slice(offset) + type;
|
||||
tokens.postDelimiter = tokens.postDelimiter.slice(0, offset);
|
||||
}
|
||||
|
||||
[tokens.postType, tokens.description] = (0, util_js_1.splitSpace)(tokens.description.slice(type.length));
|
||||
parts.push(tokens.type);
|
||||
}
|
||||
|
||||
parts[0] = parts[0].slice(1);
|
||||
parts[parts.length - 1] = parts[parts.length - 1].slice(0, -1);
|
||||
spec.type = join(parts);
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
|
||||
exports.default = typeTokenizer;
|
||||
|
||||
const trim = x => x.trim();
|
||||
|
||||
function getJoiner(spacing) {
|
||||
if (spacing === 'compact') return t => t.map(trim).join('');else if (spacing === 'preserve') return t => t.join('\n');else return spacing;
|
||||
}
|
||||
//# sourceMappingURL=type.cjs.map
|
1
node_modules/comment-parser/lib/parser/tokenizers/type.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/parser/tokenizers/type.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
27
node_modules/comment-parser/lib/parser/tokenizers/type.d.ts
generated
vendored
Normal file
27
node_modules/comment-parser/lib/parser/tokenizers/type.d.ts
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Joiner is a function taking collected type token string parts,
|
||||
* and joining them together. In most of the cases this will be
|
||||
* a single piece like {type-name}, but type may go over multipe line
|
||||
* ```
|
||||
* @tag {function(
|
||||
* number,
|
||||
* string
|
||||
* )}
|
||||
* ```
|
||||
*/
|
||||
export type Joiner = (parts: string[]) => string;
|
||||
/**
|
||||
* Shortcut for standard Joiners
|
||||
* compact - trim surrounding space, replace line breaks with a single space
|
||||
* preserve - concat as is
|
||||
*/
|
||||
export type Spacing = 'compact' | 'preserve' | Joiner;
|
||||
/**
|
||||
* Sets splits remaining `Spec.lines[].tokes.description` into `type` and `description`
|
||||
* tokens and populates Spec.type`
|
||||
*
|
||||
* @param {Spacing} spacing tells how to deal with a whitespace
|
||||
* for type values going over multiple lines
|
||||
*/
|
||||
export default function typeTokenizer(spacing?: Spacing): Tokenizer;
|
17
node_modules/comment-parser/lib/primitives.cjs
generated
vendored
Normal file
17
node_modules/comment-parser/lib/primitives.cjs
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.Markers = void 0;
|
||||
/** @deprecated */
|
||||
|
||||
var Markers;
|
||||
|
||||
(function (Markers) {
|
||||
Markers["start"] = "/**";
|
||||
Markers["nostart"] = "/***";
|
||||
Markers["delim"] = "*";
|
||||
Markers["end"] = "*/";
|
||||
})(Markers = exports.Markers || (exports.Markers = {}));
|
||||
//# sourceMappingURL=primitives.cjs.map
|
1
node_modules/comment-parser/lib/primitives.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/primitives.cjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["primitives.js"],"names":["Object","defineProperty","exports","value","Markers"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;AACAD,OAAO,CAACE,OAAR,GAAkB,KAAK,CAAvB;AACA;;AACA,IAAIA,OAAJ;;AACA,CAAC,UAAUA,OAAV,EAAmB;AAChBA,EAAAA,OAAO,CAAC,OAAD,CAAP,GAAmB,KAAnB;AACAA,EAAAA,OAAO,CAAC,SAAD,CAAP,GAAqB,MAArB;AACAA,EAAAA,OAAO,CAAC,OAAD,CAAP,GAAmB,GAAnB;AACAA,EAAAA,OAAO,CAAC,KAAD,CAAP,GAAiB,IAAjB;AACH,CALD,EAKGA,OAAO,GAAGF,OAAO,CAACE,OAAR,KAAoBF,OAAO,CAACE,OAAR,GAAkB,EAAtC,CALb","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Markers = void 0;\n/** @deprecated */\nvar Markers;\n(function (Markers) {\n Markers[\"start\"] = \"/**\";\n Markers[\"nostart\"] = \"/***\";\n Markers[\"delim\"] = \"*\";\n Markers[\"end\"] = \"*/\";\n})(Markers = exports.Markers || (exports.Markers = {}));\n"],"file":"primitives.cjs"}
|
54
node_modules/comment-parser/lib/primitives.d.ts
generated
vendored
Normal file
54
node_modules/comment-parser/lib/primitives.d.ts
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
/** @deprecated */
|
||||
export declare enum Markers {
|
||||
start = "/**",
|
||||
nostart = "/***",
|
||||
delim = "*",
|
||||
end = "*/"
|
||||
}
|
||||
export interface BlockMarkers {
|
||||
start: string;
|
||||
nostart: string;
|
||||
delim: string;
|
||||
end: string;
|
||||
}
|
||||
export interface Block {
|
||||
description: string;
|
||||
tags: Spec[];
|
||||
source: Line[];
|
||||
problems: Problem[];
|
||||
}
|
||||
export interface Spec {
|
||||
tag: string;
|
||||
name: string;
|
||||
default?: string;
|
||||
type: string;
|
||||
optional: boolean;
|
||||
description: string;
|
||||
problems: Problem[];
|
||||
source: Line[];
|
||||
}
|
||||
export interface Line {
|
||||
number: number;
|
||||
source: string;
|
||||
tokens: Tokens;
|
||||
}
|
||||
export interface Tokens {
|
||||
start: string;
|
||||
delimiter: string;
|
||||
postDelimiter: string;
|
||||
tag: string;
|
||||
postTag: string;
|
||||
name: string;
|
||||
postName: string;
|
||||
type: string;
|
||||
postType: string;
|
||||
description: string;
|
||||
end: string;
|
||||
lineEnd: string;
|
||||
}
|
||||
export interface Problem {
|
||||
code: 'unhandled' | 'custom' | 'source:startline' | 'spec:tag:prefix' | 'spec:type:unpaired-curlies' | 'spec:name:unpaired-brackets' | 'spec:name:empty-name' | 'spec:name:invalid-default' | 'spec:name:empty-default';
|
||||
message: string;
|
||||
line: number;
|
||||
critical: boolean;
|
||||
}
|
18
node_modules/comment-parser/lib/stringifier/index.cjs
generated
vendored
Normal file
18
node_modules/comment-parser/lib/stringifier/index.cjs
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
function join(tokens) {
|
||||
return tokens.start + tokens.delimiter + tokens.postDelimiter + tokens.tag + tokens.postTag + tokens.type + tokens.postType + tokens.name + tokens.postName + tokens.description + tokens.end + tokens.lineEnd;
|
||||
}
|
||||
|
||||
function getStringifier() {
|
||||
return block => block.source.map(({
|
||||
tokens
|
||||
}) => join(tokens)).join('\n');
|
||||
}
|
||||
|
||||
exports.default = getStringifier;
|
||||
//# sourceMappingURL=index.cjs.map
|
1
node_modules/comment-parser/lib/stringifier/index.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/stringifier/index.cjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["index.js"],"names":["Object","defineProperty","exports","value","join","tokens","start","delimiter","postDelimiter","tag","postTag","type","postType","name","postName","description","end","lineEnd","getStringifier","block","source","map","default"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;;AACA,SAASC,IAAT,CAAcC,MAAd,EAAsB;AAClB,SAAQA,MAAM,CAACC,KAAP,GACJD,MAAM,CAACE,SADH,GAEJF,MAAM,CAACG,aAFH,GAGJH,MAAM,CAACI,GAHH,GAIJJ,MAAM,CAACK,OAJH,GAKJL,MAAM,CAACM,IALH,GAMJN,MAAM,CAACO,QANH,GAOJP,MAAM,CAACQ,IAPH,GAQJR,MAAM,CAACS,QARH,GASJT,MAAM,CAACU,WATH,GAUJV,MAAM,CAACW,GAVH,GAWJX,MAAM,CAACY,OAXX;AAYH;;AACD,SAASC,cAAT,GAA0B;AACtB,SAAQC,KAAD,IAAWA,KAAK,CAACC,MAAN,CAAaC,GAAb,CAAiB,CAAC;AAAEhB,IAAAA;AAAF,GAAD,KAAgBD,IAAI,CAACC,MAAD,CAArC,EAA+CD,IAA/C,CAAoD,IAApD,CAAlB;AACH;;AACDF,OAAO,CAACoB,OAAR,GAAkBJ,cAAlB","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction join(tokens) {\n return (tokens.start +\n tokens.delimiter +\n tokens.postDelimiter +\n tokens.tag +\n tokens.postTag +\n tokens.type +\n tokens.postType +\n tokens.name +\n tokens.postName +\n tokens.description +\n tokens.end +\n tokens.lineEnd);\n}\nfunction getStringifier() {\n return (block) => block.source.map(({ tokens }) => join(tokens)).join('\\n');\n}\nexports.default = getStringifier;\n"],"file":"index.cjs"}
|
3
node_modules/comment-parser/lib/stringifier/index.d.ts
generated
vendored
Normal file
3
node_modules/comment-parser/lib/stringifier/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
import { Block } from '../primitives.js';
|
||||
export type Stringifier = (block: Block) => string;
|
||||
export default function getStringifier(): Stringifier;
|
72
node_modules/comment-parser/lib/stringifier/inspect.cjs
generated
vendored
Normal file
72
node_modules/comment-parser/lib/stringifier/inspect.cjs
generated
vendored
Normal file
@ -0,0 +1,72 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
const util_js_1 = require("../util.cjs");
|
||||
|
||||
const zeroWidth = {
|
||||
line: 0,
|
||||
start: 0,
|
||||
delimiter: 0,
|
||||
postDelimiter: 0,
|
||||
tag: 0,
|
||||
postTag: 0,
|
||||
name: 0,
|
||||
postName: 0,
|
||||
type: 0,
|
||||
postType: 0,
|
||||
description: 0,
|
||||
end: 0,
|
||||
lineEnd: 0
|
||||
};
|
||||
const headers = {
|
||||
lineEnd: 'CR'
|
||||
};
|
||||
const fields = Object.keys(zeroWidth);
|
||||
|
||||
const repr = x => (0, util_js_1.isSpace)(x) ? `{${x.length}}` : x;
|
||||
|
||||
const frame = line => '|' + line.join('|') + '|';
|
||||
|
||||
const align = (width, tokens) => Object.keys(tokens).map(k => repr(tokens[k]).padEnd(width[k]));
|
||||
|
||||
function inspect({
|
||||
source
|
||||
}) {
|
||||
var _a, _b;
|
||||
|
||||
if (source.length === 0) return '';
|
||||
const width = Object.assign({}, zeroWidth);
|
||||
|
||||
for (const f of fields) width[f] = ((_a = headers[f]) !== null && _a !== void 0 ? _a : f).length;
|
||||
|
||||
for (const {
|
||||
number,
|
||||
tokens
|
||||
} of source) {
|
||||
width.line = Math.max(width.line, number.toString().length);
|
||||
|
||||
for (const k in tokens) width[k] = Math.max(width[k], repr(tokens[k]).length);
|
||||
}
|
||||
|
||||
const lines = [[], []];
|
||||
|
||||
for (const f of fields) lines[0].push(((_b = headers[f]) !== null && _b !== void 0 ? _b : f).padEnd(width[f]));
|
||||
|
||||
for (const f of fields) lines[1].push('-'.padEnd(width[f], '-'));
|
||||
|
||||
for (const {
|
||||
number,
|
||||
tokens
|
||||
} of source) {
|
||||
const line = number.toString().padStart(width.line);
|
||||
lines.push([line, ...align(width, tokens)]);
|
||||
}
|
||||
|
||||
return lines.map(frame).join('\n');
|
||||
}
|
||||
|
||||
exports.default = inspect;
|
||||
//# sourceMappingURL=inspect.cjs.map
|
1
node_modules/comment-parser/lib/stringifier/inspect.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/stringifier/inspect.cjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["inspect.js"],"names":["Object","defineProperty","exports","value","util_js_1","require","zeroWidth","line","start","delimiter","postDelimiter","tag","postTag","name","postName","type","postType","description","end","lineEnd","headers","fields","keys","repr","x","isSpace","length","frame","join","align","width","tokens","map","k","padEnd","inspect","source","_a","_b","assign","f","number","Math","max","toString","lines","push","padStart","default"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;;AACA,MAAMC,SAAS,GAAGC,OAAH,eAAf;;AACA,MAAMC,SAAS,GAAG;AACdC,EAAAA,IAAI,EAAE,CADQ;AAEdC,EAAAA,KAAK,EAAE,CAFO;AAGdC,EAAAA,SAAS,EAAE,CAHG;AAIdC,EAAAA,aAAa,EAAE,CAJD;AAKdC,EAAAA,GAAG,EAAE,CALS;AAMdC,EAAAA,OAAO,EAAE,CANK;AAOdC,EAAAA,IAAI,EAAE,CAPQ;AAQdC,EAAAA,QAAQ,EAAE,CARI;AASdC,EAAAA,IAAI,EAAE,CATQ;AAUdC,EAAAA,QAAQ,EAAE,CAVI;AAWdC,EAAAA,WAAW,EAAE,CAXC;AAYdC,EAAAA,GAAG,EAAE,CAZS;AAadC,EAAAA,OAAO,EAAE;AAbK,CAAlB;AAeA,MAAMC,OAAO,GAAG;AAAED,EAAAA,OAAO,EAAE;AAAX,CAAhB;AACA,MAAME,MAAM,GAAGrB,MAAM,CAACsB,IAAP,CAAYhB,SAAZ,CAAf;;AACA,MAAMiB,IAAI,GAAIC,CAAD,IAAQ,CAAC,GAAGpB,SAAS,CAACqB,OAAd,EAAuBD,CAAvB,IAA6B,IAAGA,CAAC,CAACE,MAAO,GAAzC,GAA8CF,CAAnE;;AACA,MAAMG,KAAK,GAAIpB,IAAD,IAAU,MAAMA,IAAI,CAACqB,IAAL,CAAU,GAAV,CAAN,GAAuB,GAA/C;;AACA,MAAMC,KAAK,GAAG,CAACC,KAAD,EAAQC,MAAR,KAAmB/B,MAAM,CAACsB,IAAP,CAAYS,MAAZ,EAAoBC,GAApB,CAAyBC,CAAD,IAAOV,IAAI,CAACQ,MAAM,CAACE,CAAD,CAAP,CAAJ,CAAgBC,MAAhB,CAAuBJ,KAAK,CAACG,CAAD,CAA5B,CAA/B,CAAjC;;AACA,SAASE,OAAT,CAAiB;AAAEC,EAAAA;AAAF,CAAjB,EAA6B;AACzB,MAAIC,EAAJ,EAAQC,EAAR;;AACA,MAAIF,MAAM,CAACV,MAAP,KAAkB,CAAtB,EACI,OAAO,EAAP;AACJ,QAAMI,KAAK,GAAG9B,MAAM,CAACuC,MAAP,CAAc,EAAd,EAAkBjC,SAAlB,CAAd;;AACA,OAAK,MAAMkC,CAAX,IAAgBnB,MAAhB,EACIS,KAAK,CAACU,CAAD,CAAL,GAAW,CAAC,CAACH,EAAE,GAAGjB,OAAO,CAACoB,CAAD,CAAb,MAAsB,IAAtB,IAA8BH,EAAE,KAAK,KAAK,CAA1C,GAA8CA,EAA9C,GAAmDG,CAApD,EAAuDd,MAAlE;;AACJ,OAAK,MAAM;AAAEe,IAAAA,MAAF;AAAUV,IAAAA;AAAV,GAAX,IAAiCK,MAAjC,EAAyC;AACrCN,IAAAA,KAAK,CAACvB,IAAN,GAAamC,IAAI,CAACC,GAAL,CAASb,KAAK,CAACvB,IAAf,EAAqBkC,MAAM,CAACG,QAAP,GAAkBlB,MAAvC,CAAb;;AACA,SAAK,MAAMO,CAAX,IAAgBF,MAAhB,EACID,KAAK,CAACG,CAAD,CAAL,GAAWS,IAAI,CAACC,GAAL,CAASb,KAAK,CAACG,CAAD,CAAd,EAAmBV,IAAI,CAACQ,MAAM,CAACE,CAAD,CAAP,CAAJ,CAAgBP,MAAnC,CAAX;AACP;;AACD,QAAMmB,KAAK,GAAG,CAAC,EAAD,EAAK,EAAL,CAAd;;AACA,OAAK,MAAML,CAAX,IAAgBnB,MAAhB,EACIwB,KAAK,CAAC,CAAD,CAAL,CAASC,IAAT,CAAc,CAAC,CAACR,EAAE,GAAGlB,OAAO,CAACoB,CAAD,CAAb,MAAsB,IAAtB,IAA8BF,EAAE,KAAK,KAAK,CAA1C,GAA8CA,EAA9C,GAAmDE,CAApD,EAAuDN,MAAvD,CAA8DJ,KAAK,CAACU,CAAD,CAAnE,CAAd;;AACJ,OAAK,MAAMA,CAAX,IAAgBnB,MAAhB,EACIwB,KAAK,CAAC,CAAD,CAAL,CAASC,IAAT,CAAc,IAAIZ,MAAJ,CAAWJ,KAAK,CAACU,CAAD,CAAhB,EAAqB,GAArB,CAAd;;AACJ,OAAK,MAAM;AAAEC,IAAAA,MAAF;AAAUV,IAAAA;AAAV,GAAX,IAAiCK,MAAjC,EAAyC;AACrC,UAAM7B,IAAI,GAAGkC,MAAM,CAACG,QAAP,GAAkBG,QAAlB,CAA2BjB,KAAK,CAACvB,IAAjC,CAAb;AACAsC,IAAAA,KAAK,CAACC,IAAN,CAAW,CAACvC,IAAD,EAAO,GAAGsB,KAAK,CAACC,KAAD,EAAQC,MAAR,CAAf,CAAX;AACH;;AACD,SAAOc,KAAK,CAACb,GAAN,CAAUL,KAAV,EAAiBC,IAAjB,CAAsB,IAAtB,CAAP;AACH;;AACD1B,OAAO,CAAC8C,OAAR,GAAkBb,OAAlB","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_js_1 = require(\"../util.js\");\nconst zeroWidth = {\n line: 0,\n start: 0,\n delimiter: 0,\n postDelimiter: 0,\n tag: 0,\n postTag: 0,\n name: 0,\n postName: 0,\n type: 0,\n postType: 0,\n description: 0,\n end: 0,\n lineEnd: 0,\n};\nconst headers = { lineEnd: 'CR' };\nconst fields = Object.keys(zeroWidth);\nconst repr = (x) => ((0, util_js_1.isSpace)(x) ? `{${x.length}}` : x);\nconst frame = (line) => '|' + line.join('|') + '|';\nconst align = (width, tokens) => Object.keys(tokens).map((k) => repr(tokens[k]).padEnd(width[k]));\nfunction inspect({ source }) {\n var _a, _b;\n if (source.length === 0)\n return '';\n const width = Object.assign({}, zeroWidth);\n for (const f of fields)\n width[f] = ((_a = headers[f]) !== null && _a !== void 0 ? _a : f).length;\n for (const { number, tokens } of source) {\n width.line = Math.max(width.line, number.toString().length);\n for (const k in tokens)\n width[k] = Math.max(width[k], repr(tokens[k]).length);\n }\n const lines = [[], []];\n for (const f of fields)\n lines[0].push(((_b = headers[f]) !== null && _b !== void 0 ? _b : f).padEnd(width[f]));\n for (const f of fields)\n lines[1].push('-'.padEnd(width[f], '-'));\n for (const { number, tokens } of source) {\n const line = number.toString().padStart(width.line);\n lines.push([line, ...align(width, tokens)]);\n }\n return lines.map(frame).join('\\n');\n}\nexports.default = inspect;\n"],"file":"inspect.cjs"}
|
2
node_modules/comment-parser/lib/stringifier/inspect.d.ts
generated
vendored
Normal file
2
node_modules/comment-parser/lib/stringifier/inspect.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import { Block } from '../primitives.js';
|
||||
export default function inspect({ source }: Block): string;
|
127
node_modules/comment-parser/lib/transforms/align.cjs
generated
vendored
Normal file
127
node_modules/comment-parser/lib/transforms/align.cjs
generated
vendored
Normal file
@ -0,0 +1,127 @@
|
||||
"use strict";
|
||||
|
||||
var __rest = this && this.__rest || function (s, e) {
|
||||
var t = {};
|
||||
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p];
|
||||
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
const primitives_js_1 = require("../primitives.cjs");
|
||||
|
||||
const util_js_1 = require("../util.cjs");
|
||||
|
||||
const zeroWidth = {
|
||||
start: 0,
|
||||
tag: 0,
|
||||
type: 0,
|
||||
name: 0
|
||||
};
|
||||
|
||||
const getWidth = (markers = primitives_js_1.Markers) => (w, {
|
||||
tokens: t
|
||||
}) => ({
|
||||
start: t.delimiter === markers.start ? t.start.length : w.start,
|
||||
tag: Math.max(w.tag, t.tag.length),
|
||||
type: Math.max(w.type, t.type.length),
|
||||
name: Math.max(w.name, t.name.length)
|
||||
});
|
||||
|
||||
const space = len => ''.padStart(len, ' ');
|
||||
|
||||
function align(markers = primitives_js_1.Markers) {
|
||||
let intoTags = false;
|
||||
let w;
|
||||
|
||||
function update(line) {
|
||||
const tokens = Object.assign({}, line.tokens);
|
||||
if (tokens.tag !== '') intoTags = true;
|
||||
const isEmpty = tokens.tag === '' && tokens.name === '' && tokens.type === '' && tokens.description === ''; // dangling '*/'
|
||||
|
||||
if (tokens.end === markers.end && isEmpty) {
|
||||
tokens.start = space(w.start + 1);
|
||||
return Object.assign(Object.assign({}, line), {
|
||||
tokens
|
||||
});
|
||||
}
|
||||
|
||||
switch (tokens.delimiter) {
|
||||
case markers.start:
|
||||
tokens.start = space(w.start);
|
||||
break;
|
||||
|
||||
case markers.delim:
|
||||
tokens.start = space(w.start + 1);
|
||||
break;
|
||||
|
||||
default:
|
||||
tokens.delimiter = '';
|
||||
tokens.start = space(w.start + 2);
|
||||
// compensate delimiter
|
||||
}
|
||||
|
||||
if (!intoTags) {
|
||||
tokens.postDelimiter = tokens.description === '' ? '' : ' ';
|
||||
return Object.assign(Object.assign({}, line), {
|
||||
tokens
|
||||
});
|
||||
}
|
||||
|
||||
const nothingAfter = {
|
||||
delim: false,
|
||||
tag: false,
|
||||
type: false,
|
||||
name: false
|
||||
};
|
||||
|
||||
if (tokens.description === '') {
|
||||
nothingAfter.name = true;
|
||||
tokens.postName = '';
|
||||
|
||||
if (tokens.name === '') {
|
||||
nothingAfter.type = true;
|
||||
tokens.postType = '';
|
||||
|
||||
if (tokens.type === '') {
|
||||
nothingAfter.tag = true;
|
||||
tokens.postTag = '';
|
||||
|
||||
if (tokens.tag === '') {
|
||||
nothingAfter.delim = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tokens.postDelimiter = nothingAfter.delim ? '' : ' ';
|
||||
if (!nothingAfter.tag) tokens.postTag = space(w.tag - tokens.tag.length + 1);
|
||||
if (!nothingAfter.type) tokens.postType = space(w.type - tokens.type.length + 1);
|
||||
if (!nothingAfter.name) tokens.postName = space(w.name - tokens.name.length + 1);
|
||||
return Object.assign(Object.assign({}, line), {
|
||||
tokens
|
||||
});
|
||||
}
|
||||
|
||||
return _a => {
|
||||
var {
|
||||
source
|
||||
} = _a,
|
||||
fields = __rest(_a, ["source"]);
|
||||
|
||||
w = source.reduce(getWidth(markers), Object.assign({}, zeroWidth));
|
||||
return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), {
|
||||
source: source.map(update)
|
||||
}));
|
||||
};
|
||||
}
|
||||
|
||||
exports.default = align;
|
||||
//# sourceMappingURL=align.cjs.map
|
1
node_modules/comment-parser/lib/transforms/align.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/transforms/align.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
node_modules/comment-parser/lib/transforms/align.d.ts
generated
vendored
Normal file
3
node_modules/comment-parser/lib/transforms/align.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
import { Transform } from './index.js';
|
||||
import { Markers } from '../primitives.js';
|
||||
export default function align(markers?: typeof Markers): Transform;
|
44
node_modules/comment-parser/lib/transforms/crlf.cjs
generated
vendored
Normal file
44
node_modules/comment-parser/lib/transforms/crlf.cjs
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
"use strict";
|
||||
|
||||
var __rest = this && this.__rest || function (s, e) {
|
||||
var t = {};
|
||||
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p];
|
||||
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
const util_js_1 = require("../util.cjs");
|
||||
|
||||
const order = ['end', 'description', 'postType', 'type', 'postName', 'name', 'postTag', 'tag', 'postDelimiter', 'delimiter', 'start'];
|
||||
|
||||
function crlf(ending) {
|
||||
function update(line) {
|
||||
return Object.assign(Object.assign({}, line), {
|
||||
tokens: Object.assign(Object.assign({}, line.tokens), {
|
||||
lineEnd: ending === 'LF' ? '' : '\r'
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
return _a => {
|
||||
var {
|
||||
source
|
||||
} = _a,
|
||||
fields = __rest(_a, ["source"]);
|
||||
|
||||
return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), {
|
||||
source: source.map(update)
|
||||
}));
|
||||
};
|
||||
}
|
||||
|
||||
exports.default = crlf;
|
||||
//# sourceMappingURL=crlf.cjs.map
|
1
node_modules/comment-parser/lib/transforms/crlf.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/transforms/crlf.cjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["crlf.js"],"names":["__rest","s","e","t","p","Object","prototype","hasOwnProperty","call","indexOf","getOwnPropertySymbols","i","length","propertyIsEnumerable","defineProperty","exports","value","util_js_1","require","order","crlf","ending","update","line","assign","tokens","lineEnd","_a","source","fields","rewireSource","map","default"],"mappings":"AAAA;;AACA,IAAIA,MAAM,GAAI,QAAQ,KAAKA,MAAd,IAAyB,UAAUC,CAAV,EAAaC,CAAb,EAAgB;AAClD,MAAIC,CAAC,GAAG,EAAR;;AACA,OAAK,IAAIC,CAAT,IAAcH,CAAd,EAAiB,IAAII,MAAM,CAACC,SAAP,CAAiBC,cAAjB,CAAgCC,IAAhC,CAAqCP,CAArC,EAAwCG,CAAxC,KAA8CF,CAAC,CAACO,OAAF,CAAUL,CAAV,IAAe,CAAjE,EACbD,CAAC,CAACC,CAAD,CAAD,GAAOH,CAAC,CAACG,CAAD,CAAR;;AACJ,MAAIH,CAAC,IAAI,IAAL,IAAa,OAAOI,MAAM,CAACK,qBAAd,KAAwC,UAAzD,EACI,KAAK,IAAIC,CAAC,GAAG,CAAR,EAAWP,CAAC,GAAGC,MAAM,CAACK,qBAAP,CAA6BT,CAA7B,CAApB,EAAqDU,CAAC,GAAGP,CAAC,CAACQ,MAA3D,EAAmED,CAAC,EAApE,EAAwE;AACpE,QAAIT,CAAC,CAACO,OAAF,CAAUL,CAAC,CAACO,CAAD,CAAX,IAAkB,CAAlB,IAAuBN,MAAM,CAACC,SAAP,CAAiBO,oBAAjB,CAAsCL,IAAtC,CAA2CP,CAA3C,EAA8CG,CAAC,CAACO,CAAD,CAA/C,CAA3B,EACIR,CAAC,CAACC,CAAC,CAACO,CAAD,CAAF,CAAD,GAAUV,CAAC,CAACG,CAAC,CAACO,CAAD,CAAF,CAAX;AACP;AACL,SAAOR,CAAP;AACH,CAVD;;AAWAE,MAAM,CAACS,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;;AACA,MAAMC,SAAS,GAAGC,OAAH,eAAf;;AACA,MAAMC,KAAK,GAAG,CACV,KADU,EAEV,aAFU,EAGV,UAHU,EAIV,MAJU,EAKV,UALU,EAMV,MANU,EAOV,SAPU,EAQV,KARU,EASV,eATU,EAUV,WAVU,EAWV,OAXU,CAAd;;AAaA,SAASC,IAAT,CAAcC,MAAd,EAAsB;AAClB,WAASC,MAAT,CAAgBC,IAAhB,EAAsB;AAClB,WAAOlB,MAAM,CAACmB,MAAP,CAAcnB,MAAM,CAACmB,MAAP,CAAc,EAAd,EAAkBD,IAAlB,CAAd,EAAuC;AAAEE,MAAAA,MAAM,EAAEpB,MAAM,CAACmB,MAAP,CAAcnB,MAAM,CAACmB,MAAP,CAAc,EAAd,EAAkBD,IAAI,CAACE,MAAvB,CAAd,EAA8C;AAAEC,QAAAA,OAAO,EAAEL,MAAM,KAAK,IAAX,GAAkB,EAAlB,GAAuB;AAAlC,OAA9C;AAAV,KAAvC,CAAP;AACH;;AACD,SAAQM,EAAD,IAAQ;AACX,QAAI;AAAEC,MAAAA;AAAF,QAAaD,EAAjB;AAAA,QAAqBE,MAAM,GAAG7B,MAAM,CAAC2B,EAAD,EAAK,CAAC,QAAD,CAAL,CAApC;;AACA,WAAO,CAAC,GAAGV,SAAS,CAACa,YAAd,EAA4BzB,MAAM,CAACmB,MAAP,CAAcnB,MAAM,CAACmB,MAAP,CAAc,EAAd,EAAkBK,MAAlB,CAAd,EAAyC;AAAED,MAAAA,MAAM,EAAEA,MAAM,CAACG,GAAP,CAAWT,MAAX;AAAV,KAAzC,CAA5B,CAAP;AACH,GAHD;AAIH;;AACDP,OAAO,CAACiB,OAAR,GAAkBZ,IAAlB","sourcesContent":["\"use strict\";\nvar __rest = (this && this.__rest) || function (s, e) {\n var t = {};\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\n t[p] = s[p];\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\n t[p[i]] = s[p[i]];\n }\n return t;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_js_1 = require(\"../util.js\");\nconst order = [\n 'end',\n 'description',\n 'postType',\n 'type',\n 'postName',\n 'name',\n 'postTag',\n 'tag',\n 'postDelimiter',\n 'delimiter',\n 'start',\n];\nfunction crlf(ending) {\n function update(line) {\n return Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { lineEnd: ending === 'LF' ? '' : '\\r' }) });\n }\n return (_a) => {\n var { source } = _a, fields = __rest(_a, [\"source\"]);\n return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), { source: source.map(update) }));\n };\n}\nexports.default = crlf;\n"],"file":"crlf.cjs"}
|
3
node_modules/comment-parser/lib/transforms/crlf.d.ts
generated
vendored
Normal file
3
node_modules/comment-parser/lib/transforms/crlf.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
import { Transform } from './index.js';
|
||||
export type Ending = 'LF' | 'CRLF';
|
||||
export default function crlf(ending: Ending): Transform;
|
58
node_modules/comment-parser/lib/transforms/indent.cjs
generated
vendored
Normal file
58
node_modules/comment-parser/lib/transforms/indent.cjs
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
"use strict";
|
||||
|
||||
var __rest = this && this.__rest || function (s, e) {
|
||||
var t = {};
|
||||
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p];
|
||||
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
const util_js_1 = require("../util.cjs");
|
||||
|
||||
const pull = offset => str => str.slice(offset);
|
||||
|
||||
const push = offset => {
|
||||
const space = ''.padStart(offset, ' ');
|
||||
return str => str + space;
|
||||
};
|
||||
|
||||
function indent(pos) {
|
||||
let shift;
|
||||
|
||||
const pad = start => {
|
||||
if (shift === undefined) {
|
||||
const offset = pos - start.length;
|
||||
shift = offset > 0 ? push(offset) : pull(-offset);
|
||||
}
|
||||
|
||||
return shift(start);
|
||||
};
|
||||
|
||||
const update = line => Object.assign(Object.assign({}, line), {
|
||||
tokens: Object.assign(Object.assign({}, line.tokens), {
|
||||
start: pad(line.tokens.start)
|
||||
})
|
||||
});
|
||||
|
||||
return _a => {
|
||||
var {
|
||||
source
|
||||
} = _a,
|
||||
fields = __rest(_a, ["source"]);
|
||||
|
||||
return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), {
|
||||
source: source.map(update)
|
||||
}));
|
||||
};
|
||||
}
|
||||
|
||||
exports.default = indent;
|
||||
//# sourceMappingURL=indent.cjs.map
|
1
node_modules/comment-parser/lib/transforms/indent.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/transforms/indent.cjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["indent.js"],"names":["__rest","s","e","t","p","Object","prototype","hasOwnProperty","call","indexOf","getOwnPropertySymbols","i","length","propertyIsEnumerable","defineProperty","exports","value","util_js_1","require","pull","offset","str","slice","push","space","padStart","indent","pos","shift","pad","start","undefined","update","line","assign","tokens","_a","source","fields","rewireSource","map","default"],"mappings":"AAAA;;AACA,IAAIA,MAAM,GAAI,QAAQ,KAAKA,MAAd,IAAyB,UAAUC,CAAV,EAAaC,CAAb,EAAgB;AAClD,MAAIC,CAAC,GAAG,EAAR;;AACA,OAAK,IAAIC,CAAT,IAAcH,CAAd,EAAiB,IAAII,MAAM,CAACC,SAAP,CAAiBC,cAAjB,CAAgCC,IAAhC,CAAqCP,CAArC,EAAwCG,CAAxC,KAA8CF,CAAC,CAACO,OAAF,CAAUL,CAAV,IAAe,CAAjE,EACbD,CAAC,CAACC,CAAD,CAAD,GAAOH,CAAC,CAACG,CAAD,CAAR;;AACJ,MAAIH,CAAC,IAAI,IAAL,IAAa,OAAOI,MAAM,CAACK,qBAAd,KAAwC,UAAzD,EACI,KAAK,IAAIC,CAAC,GAAG,CAAR,EAAWP,CAAC,GAAGC,MAAM,CAACK,qBAAP,CAA6BT,CAA7B,CAApB,EAAqDU,CAAC,GAAGP,CAAC,CAACQ,MAA3D,EAAmED,CAAC,EAApE,EAAwE;AACpE,QAAIT,CAAC,CAACO,OAAF,CAAUL,CAAC,CAACO,CAAD,CAAX,IAAkB,CAAlB,IAAuBN,MAAM,CAACC,SAAP,CAAiBO,oBAAjB,CAAsCL,IAAtC,CAA2CP,CAA3C,EAA8CG,CAAC,CAACO,CAAD,CAA/C,CAA3B,EACIR,CAAC,CAACC,CAAC,CAACO,CAAD,CAAF,CAAD,GAAUV,CAAC,CAACG,CAAC,CAACO,CAAD,CAAF,CAAX;AACP;AACL,SAAOR,CAAP;AACH,CAVD;;AAWAE,MAAM,CAACS,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;;AACA,MAAMC,SAAS,GAAGC,OAAH,eAAf;;AACA,MAAMC,IAAI,GAAIC,MAAD,IAAaC,GAAD,IAASA,GAAG,CAACC,KAAJ,CAAUF,MAAV,CAAlC;;AACA,MAAMG,IAAI,GAAIH,MAAD,IAAY;AACrB,QAAMI,KAAK,GAAG,GAAGC,QAAH,CAAYL,MAAZ,EAAoB,GAApB,CAAd;AACA,SAAQC,GAAD,IAASA,GAAG,GAAGG,KAAtB;AACH,CAHD;;AAIA,SAASE,MAAT,CAAgBC,GAAhB,EAAqB;AACjB,MAAIC,KAAJ;;AACA,QAAMC,GAAG,GAAIC,KAAD,IAAW;AACnB,QAAIF,KAAK,KAAKG,SAAd,EAAyB;AACrB,YAAMX,MAAM,GAAGO,GAAG,GAAGG,KAAK,CAAClB,MAA3B;AACAgB,MAAAA,KAAK,GAAGR,MAAM,GAAG,CAAT,GAAaG,IAAI,CAACH,MAAD,CAAjB,GAA4BD,IAAI,CAAC,CAACC,MAAF,CAAxC;AACH;;AACD,WAAOQ,KAAK,CAACE,KAAD,CAAZ;AACH,GAND;;AAOA,QAAME,MAAM,GAAIC,IAAD,IAAW5B,MAAM,CAAC6B,MAAP,CAAc7B,MAAM,CAAC6B,MAAP,CAAc,EAAd,EAAkBD,IAAlB,CAAd,EAAuC;AAAEE,IAAAA,MAAM,EAAE9B,MAAM,CAAC6B,MAAP,CAAc7B,MAAM,CAAC6B,MAAP,CAAc,EAAd,EAAkBD,IAAI,CAACE,MAAvB,CAAd,EAA8C;AAAEL,MAAAA,KAAK,EAAED,GAAG,CAACI,IAAI,CAACE,MAAL,CAAYL,KAAb;AAAZ,KAA9C;AAAV,GAAvC,CAA1B;;AACA,SAAQM,EAAD,IAAQ;AACX,QAAI;AAAEC,MAAAA;AAAF,QAAaD,EAAjB;AAAA,QAAqBE,MAAM,GAAGtC,MAAM,CAACoC,EAAD,EAAK,CAAC,QAAD,CAAL,CAApC;;AACA,WAAO,CAAC,GAAGnB,SAAS,CAACsB,YAAd,EAA4BlC,MAAM,CAAC6B,MAAP,CAAc7B,MAAM,CAAC6B,MAAP,CAAc,EAAd,EAAkBI,MAAlB,CAAd,EAAyC;AAAED,MAAAA,MAAM,EAAEA,MAAM,CAACG,GAAP,CAAWR,MAAX;AAAV,KAAzC,CAA5B,CAAP;AACH,GAHD;AAIH;;AACDjB,OAAO,CAAC0B,OAAR,GAAkBf,MAAlB","sourcesContent":["\"use strict\";\nvar __rest = (this && this.__rest) || function (s, e) {\n var t = {};\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\n t[p] = s[p];\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\n t[p[i]] = s[p[i]];\n }\n return t;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst util_js_1 = require(\"../util.js\");\nconst pull = (offset) => (str) => str.slice(offset);\nconst push = (offset) => {\n const space = ''.padStart(offset, ' ');\n return (str) => str + space;\n};\nfunction indent(pos) {\n let shift;\n const pad = (start) => {\n if (shift === undefined) {\n const offset = pos - start.length;\n shift = offset > 0 ? push(offset) : pull(-offset);\n }\n return shift(start);\n };\n const update = (line) => (Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { start: pad(line.tokens.start) }) }));\n return (_a) => {\n var { source } = _a, fields = __rest(_a, [\"source\"]);\n return (0, util_js_1.rewireSource)(Object.assign(Object.assign({}, fields), { source: source.map(update) }));\n };\n}\nexports.default = indent;\n"],"file":"indent.cjs"}
|
2
node_modules/comment-parser/lib/transforms/indent.d.ts
generated
vendored
Normal file
2
node_modules/comment-parser/lib/transforms/indent.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import { Transform } from './index.js';
|
||||
export default function indent(pos: number): Transform;
|
13
node_modules/comment-parser/lib/transforms/index.cjs
generated
vendored
Normal file
13
node_modules/comment-parser/lib/transforms/index.cjs
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.flow = void 0;
|
||||
|
||||
function flow(...transforms) {
|
||||
return block => transforms.reduce((block, t) => t(block), block);
|
||||
}
|
||||
|
||||
exports.flow = flow;
|
||||
//# sourceMappingURL=index.cjs.map
|
1
node_modules/comment-parser/lib/transforms/index.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/transforms/index.cjs.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["index.js"],"names":["Object","defineProperty","exports","value","flow","transforms","block","reduce","t"],"mappings":"AAAA;;AACAA,MAAM,CAACC,cAAP,CAAsBC,OAAtB,EAA+B,YAA/B,EAA6C;AAAEC,EAAAA,KAAK,EAAE;AAAT,CAA7C;AACAD,OAAO,CAACE,IAAR,GAAe,KAAK,CAApB;;AACA,SAASA,IAAT,CAAc,GAAGC,UAAjB,EAA6B;AACzB,SAAQC,KAAD,IAAWD,UAAU,CAACE,MAAX,CAAkB,CAACD,KAAD,EAAQE,CAAR,KAAcA,CAAC,CAACF,KAAD,CAAjC,EAA0CA,KAA1C,CAAlB;AACH;;AACDJ,OAAO,CAACE,IAAR,GAAeA,IAAf","sourcesContent":["\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.flow = void 0;\nfunction flow(...transforms) {\n return (block) => transforms.reduce((block, t) => t(block), block);\n}\nexports.flow = flow;\n"],"file":"index.cjs"}
|
3
node_modules/comment-parser/lib/transforms/index.d.ts
generated
vendored
Normal file
3
node_modules/comment-parser/lib/transforms/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
import { Block } from '../primitives.js';
|
||||
export type Transform = (Block: Block) => Block;
|
||||
export declare function flow(...transforms: Transform[]): Transform;
|
113
node_modules/comment-parser/lib/util.cjs
generated
vendored
Normal file
113
node_modules/comment-parser/lib/util.cjs
generated
vendored
Normal file
@ -0,0 +1,113 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.rewireSpecs = exports.rewireSource = exports.seedTokens = exports.seedSpec = exports.seedBlock = exports.splitLines = exports.splitSpace = exports.splitCR = exports.hasCR = exports.isSpace = void 0;
|
||||
|
||||
function isSpace(source) {
|
||||
return /^\s+$/.test(source);
|
||||
}
|
||||
|
||||
exports.isSpace = isSpace;
|
||||
|
||||
function hasCR(source) {
|
||||
return /\r$/.test(source);
|
||||
}
|
||||
|
||||
exports.hasCR = hasCR;
|
||||
|
||||
function splitCR(source) {
|
||||
const matches = source.match(/\r+$/);
|
||||
return matches == null ? ['', source] : [source.slice(-matches[0].length), source.slice(0, -matches[0].length)];
|
||||
}
|
||||
|
||||
exports.splitCR = splitCR;
|
||||
|
||||
function splitSpace(source) {
|
||||
const matches = source.match(/^\s+/);
|
||||
return matches == null ? ['', source] : [source.slice(0, matches[0].length), source.slice(matches[0].length)];
|
||||
}
|
||||
|
||||
exports.splitSpace = splitSpace;
|
||||
|
||||
function splitLines(source) {
|
||||
return source.split(/\n/);
|
||||
}
|
||||
|
||||
exports.splitLines = splitLines;
|
||||
|
||||
function seedBlock(block = {}) {
|
||||
return Object.assign({
|
||||
description: '',
|
||||
tags: [],
|
||||
source: [],
|
||||
problems: []
|
||||
}, block);
|
||||
}
|
||||
|
||||
exports.seedBlock = seedBlock;
|
||||
|
||||
function seedSpec(spec = {}) {
|
||||
return Object.assign({
|
||||
tag: '',
|
||||
name: '',
|
||||
type: '',
|
||||
optional: false,
|
||||
description: '',
|
||||
problems: [],
|
||||
source: []
|
||||
}, spec);
|
||||
}
|
||||
|
||||
exports.seedSpec = seedSpec;
|
||||
|
||||
function seedTokens(tokens = {}) {
|
||||
return Object.assign({
|
||||
start: '',
|
||||
delimiter: '',
|
||||
postDelimiter: '',
|
||||
tag: '',
|
||||
postTag: '',
|
||||
name: '',
|
||||
postName: '',
|
||||
type: '',
|
||||
postType: '',
|
||||
description: '',
|
||||
end: '',
|
||||
lineEnd: ''
|
||||
}, tokens);
|
||||
}
|
||||
|
||||
exports.seedTokens = seedTokens;
|
||||
/**
|
||||
* Assures Block.tags[].source contains references to the Block.source items,
|
||||
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
|
||||
function rewireSource(block) {
|
||||
const source = block.source.reduce((acc, line) => acc.set(line.number, line), new Map());
|
||||
|
||||
for (const spec of block.tags) {
|
||||
spec.source = spec.source.map(line => source.get(line.number));
|
||||
}
|
||||
|
||||
return block;
|
||||
}
|
||||
|
||||
exports.rewireSource = rewireSource;
|
||||
/**
|
||||
* Assures Block.source contains references to the Block.tags[].source items,
|
||||
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
|
||||
function rewireSpecs(block) {
|
||||
const source = block.tags.reduce((acc, spec) => spec.source.reduce((acc, line) => acc.set(line.number, line), acc), new Map());
|
||||
block.source = block.source.map(line => source.get(line.number) || line);
|
||||
return block;
|
||||
}
|
||||
|
||||
exports.rewireSpecs = rewireSpecs;
|
||||
//# sourceMappingURL=util.cjs.map
|
1
node_modules/comment-parser/lib/util.cjs.map
generated
vendored
Normal file
1
node_modules/comment-parser/lib/util.cjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
21
node_modules/comment-parser/lib/util.d.ts
generated
vendored
Normal file
21
node_modules/comment-parser/lib/util.d.ts
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
import { Block, Tokens, Spec } from './primitives.js';
|
||||
export declare function isSpace(source: string): boolean;
|
||||
export declare function hasCR(source: string): boolean;
|
||||
export declare function splitCR(source: string): [string, string];
|
||||
export declare function splitSpace(source: string): [string, string];
|
||||
export declare function splitLines(source: string): string[];
|
||||
export declare function seedBlock(block?: Partial<Block>): Block;
|
||||
export declare function seedSpec(spec?: Partial<Spec>): Spec;
|
||||
export declare function seedTokens(tokens?: Partial<Tokens>): Tokens;
|
||||
/**
|
||||
* Assures Block.tags[].source contains references to the Block.source items,
|
||||
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
export declare function rewireSource(block: Block): Block;
|
||||
/**
|
||||
* Assures Block.source contains references to the Block.tags[].source items,
|
||||
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
export declare function rewireSpecs(block: Block): Block;
|
Reference in New Issue
Block a user