forked from public/fvtt-cthulhu-eternal
Initial import with skill sheet working
This commit is contained in:
32
node_modules/comment-parser/es6/index.d.ts
generated
vendored
Normal file
32
node_modules/comment-parser/es6/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
import { Options as ParserOptions } from './parser/index.js';
|
||||
import descriptionTokenizer from './parser/tokenizers/description.js';
|
||||
import nameTokenizer from './parser/tokenizers/name.js';
|
||||
import tagTokenizer from './parser/tokenizers/tag.js';
|
||||
import typeTokenizer from './parser/tokenizers/type.js';
|
||||
import alignTransform from './transforms/align.js';
|
||||
import indentTransform from './transforms/indent.js';
|
||||
import crlfTransform from './transforms/crlf.js';
|
||||
import { flow as flowTransform } from './transforms/index.js';
|
||||
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util.js';
|
||||
export * from './primitives.js';
|
||||
export declare function parse(source: string, options?: Partial<ParserOptions>): import("./primitives.js").Block[];
|
||||
export declare const stringify: import("./stringifier/index.js").Stringifier;
|
||||
export { default as inspect } from './stringifier/inspect.js';
|
||||
export declare const transforms: {
|
||||
flow: typeof flowTransform;
|
||||
align: typeof alignTransform;
|
||||
indent: typeof indentTransform;
|
||||
crlf: typeof crlfTransform;
|
||||
};
|
||||
export declare const tokenizers: {
|
||||
tag: typeof tagTokenizer;
|
||||
type: typeof typeTokenizer;
|
||||
name: typeof nameTokenizer;
|
||||
description: typeof descriptionTokenizer;
|
||||
};
|
||||
export declare const util: {
|
||||
rewireSpecs: typeof rewireSpecs;
|
||||
rewireSource: typeof rewireSource;
|
||||
seedBlock: typeof seedBlock;
|
||||
seedTokens: typeof seedTokens;
|
||||
};
|
30
node_modules/comment-parser/es6/index.js
generated
vendored
Normal file
30
node_modules/comment-parser/es6/index.js
generated
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
import getParser from './parser/index.js';
|
||||
import descriptionTokenizer from './parser/tokenizers/description.js';
|
||||
import nameTokenizer from './parser/tokenizers/name.js';
|
||||
import tagTokenizer from './parser/tokenizers/tag.js';
|
||||
import typeTokenizer from './parser/tokenizers/type.js';
|
||||
import getStringifier from './stringifier/index.js';
|
||||
import alignTransform from './transforms/align.js';
|
||||
import indentTransform from './transforms/indent.js';
|
||||
import crlfTransform from './transforms/crlf.js';
|
||||
import { flow as flowTransform } from './transforms/index.js';
|
||||
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util.js';
|
||||
export * from './primitives.js';
|
||||
export function parse(source, options = {}) {
|
||||
return getParser(options)(source);
|
||||
}
|
||||
export const stringify = getStringifier();
|
||||
export { default as inspect } from './stringifier/inspect.js';
|
||||
export const transforms = {
|
||||
flow: flowTransform,
|
||||
align: alignTransform,
|
||||
indent: indentTransform,
|
||||
crlf: crlfTransform,
|
||||
};
|
||||
export const tokenizers = {
|
||||
tag: tagTokenizer,
|
||||
type: typeTokenizer,
|
||||
name: nameTokenizer,
|
||||
description: descriptionTokenizer,
|
||||
};
|
||||
export const util = { rewireSpecs, rewireSource, seedBlock, seedTokens };
|
24
node_modules/comment-parser/es6/parser/block-parser.d.ts
generated
vendored
Normal file
24
node_modules/comment-parser/es6/parser/block-parser.d.ts
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
import { Line } from '../primitives.js';
|
||||
/**
|
||||
* Groups source lines in sections representing tags.
|
||||
* First section is a block description if present. Last section captures lines starting with
|
||||
* the last tag to the end of the block, including dangling closing marker.
|
||||
* @param {Line[]} block souce lines making a single comment block
|
||||
*/
|
||||
export type Parser = (block: Line[]) => Line[][];
|
||||
/**
|
||||
* Predicate telling if string contains opening/closing escaping sequence
|
||||
* @param {string} source raw source line
|
||||
*/
|
||||
export type Fencer = (source: string) => boolean;
|
||||
/**
|
||||
* `Parser` configuration options
|
||||
*/
|
||||
export interface Options {
|
||||
fence: string | Fencer;
|
||||
}
|
||||
/**
|
||||
* Creates configured `Parser`
|
||||
* @param {Partial<Options>} options
|
||||
*/
|
||||
export default function getParser({ fence, }?: Partial<Options>): Parser;
|
29
node_modules/comment-parser/es6/parser/block-parser.js
generated
vendored
Normal file
29
node_modules/comment-parser/es6/parser/block-parser.js
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
const reTag = /^@\S+/;
|
||||
/**
|
||||
* Creates configured `Parser`
|
||||
* @param {Partial<Options>} options
|
||||
*/
|
||||
export default function getParser({ fence = '```', } = {}) {
|
||||
const fencer = getFencer(fence);
|
||||
const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;
|
||||
return function parseBlock(source) {
|
||||
// start with description section
|
||||
const sections = [[]];
|
||||
let isFenced = false;
|
||||
for (const line of source) {
|
||||
if (reTag.test(line.tokens.description) && !isFenced) {
|
||||
sections.push([line]);
|
||||
}
|
||||
else {
|
||||
sections[sections.length - 1].push(line);
|
||||
}
|
||||
isFenced = toggleFence(line.tokens.description, isFenced);
|
||||
}
|
||||
return sections;
|
||||
};
|
||||
}
|
||||
function getFencer(fence) {
|
||||
if (typeof fence === 'string')
|
||||
return (source) => source.split(fence).length % 2 === 0;
|
||||
return fence;
|
||||
}
|
11
node_modules/comment-parser/es6/parser/index.d.ts
generated
vendored
Normal file
11
node_modules/comment-parser/es6/parser/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
import { Block, BlockMarkers } from '../primitives.js';
|
||||
import { Tokenizer } from './tokenizers/index.js';
|
||||
export interface Options {
|
||||
startLine: number;
|
||||
fence: string;
|
||||
spacing: 'compact' | 'preserve';
|
||||
markers: BlockMarkers;
|
||||
tokenizers: Tokenizer[];
|
||||
}
|
||||
export type Parser = (source: string) => Block[];
|
||||
export default function getParser({ startLine, fence, spacing, markers, tokenizers, }?: Partial<Options>): Parser;
|
39
node_modules/comment-parser/es6/parser/index.js
generated
vendored
Normal file
39
node_modules/comment-parser/es6/parser/index.js
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
import { Markers } from '../primitives.js';
|
||||
import { splitLines } from '../util.js';
|
||||
import blockParser from './block-parser.js';
|
||||
import sourceParser from './source-parser.js';
|
||||
import specParser from './spec-parser.js';
|
||||
import tokenizeTag from './tokenizers/tag.js';
|
||||
import tokenizeType from './tokenizers/type.js';
|
||||
import tokenizeName from './tokenizers/name.js';
|
||||
import tokenizeDescription, { getJoiner as getDescriptionJoiner, } from './tokenizers/description.js';
|
||||
export default function getParser({ startLine = 0, fence = '```', spacing = 'compact', markers = Markers, tokenizers = [
|
||||
tokenizeTag(),
|
||||
tokenizeType(spacing),
|
||||
tokenizeName(),
|
||||
tokenizeDescription(spacing),
|
||||
], } = {}) {
|
||||
if (startLine < 0 || startLine % 1 > 0)
|
||||
throw new Error('Invalid startLine');
|
||||
const parseSource = sourceParser({ startLine, markers });
|
||||
const parseBlock = blockParser({ fence });
|
||||
const parseSpec = specParser({ tokenizers });
|
||||
const joinDescription = getDescriptionJoiner(spacing);
|
||||
return function (source) {
|
||||
const blocks = [];
|
||||
for (const line of splitLines(source)) {
|
||||
const lines = parseSource(line);
|
||||
if (lines === null)
|
||||
continue;
|
||||
const sections = parseBlock(lines);
|
||||
const specs = sections.slice(1).map(parseSpec);
|
||||
blocks.push({
|
||||
description: joinDescription(sections[0], markers),
|
||||
tags: specs,
|
||||
source: lines,
|
||||
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),
|
||||
});
|
||||
}
|
||||
return blocks;
|
||||
};
|
||||
}
|
7
node_modules/comment-parser/es6/parser/source-parser.d.ts
generated
vendored
Normal file
7
node_modules/comment-parser/es6/parser/source-parser.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import { Line, BlockMarkers } from '../primitives.js';
|
||||
export interface Options {
|
||||
startLine: number;
|
||||
markers: BlockMarkers;
|
||||
}
|
||||
export type Parser = (source: string) => Line[] | null;
|
||||
export default function getParser({ startLine, markers, }?: Partial<Options>): Parser;
|
46
node_modules/comment-parser/es6/parser/source-parser.js
generated
vendored
Normal file
46
node_modules/comment-parser/es6/parser/source-parser.js
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
import { Markers } from '../primitives.js';
|
||||
import { seedTokens, splitSpace, splitCR } from '../util.js';
|
||||
export default function getParser({ startLine = 0, markers = Markers, } = {}) {
|
||||
let block = null;
|
||||
let num = startLine;
|
||||
return function parseSource(source) {
|
||||
let rest = source;
|
||||
const tokens = seedTokens();
|
||||
[tokens.lineEnd, rest] = splitCR(rest);
|
||||
[tokens.start, rest] = splitSpace(rest);
|
||||
if (block === null &&
|
||||
rest.startsWith(markers.start) &&
|
||||
!rest.startsWith(markers.nostart)) {
|
||||
block = [];
|
||||
tokens.delimiter = rest.slice(0, markers.start.length);
|
||||
rest = rest.slice(markers.start.length);
|
||||
[tokens.postDelimiter, rest] = splitSpace(rest);
|
||||
}
|
||||
if (block === null) {
|
||||
num++;
|
||||
return null;
|
||||
}
|
||||
const isClosed = rest.trimRight().endsWith(markers.end);
|
||||
if (tokens.delimiter === '' &&
|
||||
rest.startsWith(markers.delim) &&
|
||||
!rest.startsWith(markers.end)) {
|
||||
tokens.delimiter = markers.delim;
|
||||
rest = rest.slice(markers.delim.length);
|
||||
[tokens.postDelimiter, rest] = splitSpace(rest);
|
||||
}
|
||||
if (isClosed) {
|
||||
const trimmed = rest.trimRight();
|
||||
tokens.end = rest.slice(trimmed.length - markers.end.length);
|
||||
rest = trimmed.slice(0, -markers.end.length);
|
||||
}
|
||||
tokens.description = rest;
|
||||
block.push({ number: num, source, tokens });
|
||||
num++;
|
||||
if (isClosed) {
|
||||
const result = block.slice();
|
||||
block = null;
|
||||
return result;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
}
|
7
node_modules/comment-parser/es6/parser/spec-parser.d.ts
generated
vendored
Normal file
7
node_modules/comment-parser/es6/parser/spec-parser.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import { Line, Spec } from '../primitives.js';
|
||||
import { Tokenizer } from './tokenizers/index.js';
|
||||
export type Parser = (source: Line[]) => Spec;
|
||||
export interface Options {
|
||||
tokenizers: Tokenizer[];
|
||||
}
|
||||
export default function getParser({ tokenizers }: Options): Parser;
|
13
node_modules/comment-parser/es6/parser/spec-parser.js
generated
vendored
Normal file
13
node_modules/comment-parser/es6/parser/spec-parser.js
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
import { seedSpec } from '../util.js';
|
||||
export default function getParser({ tokenizers }) {
|
||||
return function parseSpec(source) {
|
||||
var _a;
|
||||
let spec = seedSpec({ source });
|
||||
for (const tokenize of tokenizers) {
|
||||
spec = tokenize(spec);
|
||||
if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical)
|
||||
break;
|
||||
}
|
||||
return spec;
|
||||
};
|
||||
}
|
20
node_modules/comment-parser/es6/parser/tokenizers/description.d.ts
generated
vendored
Normal file
20
node_modules/comment-parser/es6/parser/tokenizers/description.d.ts
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
import { Line, BlockMarkers, Markers } from '../../primitives.js';
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Walks over provided lines joining description token into a single string.
|
||||
* */
|
||||
export type Joiner = (lines: Line[], markers?: BlockMarkers) => string;
|
||||
/**
|
||||
* Shortcut for standard Joiners
|
||||
* compact - strip surrounding whitespace and concat lines using a single string
|
||||
* preserve - preserves original whitespace and line breaks as is
|
||||
*/
|
||||
export type Spacing = 'compact' | 'preserve' | Joiner;
|
||||
/**
|
||||
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
|
||||
* following given spacing srtategy
|
||||
* @param {Spacing} spacing tells how to handle the whitespace
|
||||
* @param {BlockMarkers} markers tells how to handle comment block delimitation
|
||||
*/
|
||||
export default function descriptionTokenizer(spacing?: Spacing, markers?: typeof Markers): Tokenizer;
|
||||
export declare function getJoiner(spacing: Spacing): Joiner;
|
47
node_modules/comment-parser/es6/parser/tokenizers/description.js
generated
vendored
Normal file
47
node_modules/comment-parser/es6/parser/tokenizers/description.js
generated
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
import { Markers } from '../../primitives.js';
|
||||
/**
|
||||
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
|
||||
* following given spacing srtategy
|
||||
* @param {Spacing} spacing tells how to handle the whitespace
|
||||
* @param {BlockMarkers} markers tells how to handle comment block delimitation
|
||||
*/
|
||||
export default function descriptionTokenizer(spacing = 'compact', markers = Markers) {
|
||||
const join = getJoiner(spacing);
|
||||
return (spec) => {
|
||||
spec.description = join(spec.source, markers);
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
export function getJoiner(spacing) {
|
||||
if (spacing === 'compact')
|
||||
return compactJoiner;
|
||||
if (spacing === 'preserve')
|
||||
return preserveJoiner;
|
||||
return spacing;
|
||||
}
|
||||
function compactJoiner(lines, markers = Markers) {
|
||||
return lines
|
||||
.map(({ tokens: { description } }) => description.trim())
|
||||
.filter((description) => description !== '')
|
||||
.join(' ');
|
||||
}
|
||||
const lineNo = (num, { tokens }, i) => tokens.type === '' ? num : i;
|
||||
const getDescription = ({ tokens }) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +
|
||||
tokens.description;
|
||||
function preserveJoiner(lines, markers = Markers) {
|
||||
if (lines.length === 0)
|
||||
return '';
|
||||
// skip the opening line with no description
|
||||
if (lines[0].tokens.description === '' &&
|
||||
lines[0].tokens.delimiter === markers.start)
|
||||
lines = lines.slice(1);
|
||||
// skip the closing line with no description
|
||||
const lastLine = lines[lines.length - 1];
|
||||
if (lastLine !== undefined &&
|
||||
lastLine.tokens.description === '' &&
|
||||
lastLine.tokens.end.endsWith(markers.end))
|
||||
lines = lines.slice(0, -1);
|
||||
// description starts at the last line of type definition
|
||||
lines = lines.slice(lines.reduce(lineNo, 0));
|
||||
return lines.map(getDescription).join('\n');
|
||||
}
|
7
node_modules/comment-parser/es6/parser/tokenizers/index.d.ts
generated
vendored
Normal file
7
node_modules/comment-parser/es6/parser/tokenizers/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import { Spec } from '../../primitives.js';
|
||||
/**
|
||||
* Splits `spect.lines[].token.description` into other tokens,
|
||||
* and populates the spec.{tag, name, type, description}. Invoked in a chaing
|
||||
* with other tokens, operations listed above can be moved to separate tokenizers
|
||||
*/
|
||||
export type Tokenizer = (spec: Spec) => Spec;
|
1
node_modules/comment-parser/es6/parser/tokenizers/index.js
generated
vendored
Normal file
1
node_modules/comment-parser/es6/parser/tokenizers/index.js
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
6
node_modules/comment-parser/es6/parser/tokenizers/name.d.ts
generated
vendored
Normal file
6
node_modules/comment-parser/es6/parser/tokenizers/name.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
|
||||
* and populates the `spec.name`
|
||||
*/
|
||||
export default function nameTokenizer(): Tokenizer;
|
91
node_modules/comment-parser/es6/parser/tokenizers/name.js
generated
vendored
Normal file
91
node_modules/comment-parser/es6/parser/tokenizers/name.js
generated
vendored
Normal file
@ -0,0 +1,91 @@
|
||||
import { splitSpace, isSpace } from '../../util.js';
|
||||
const isQuoted = (s) => s && s.startsWith('"') && s.endsWith('"');
|
||||
/**
|
||||
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
|
||||
* and populates the `spec.name`
|
||||
*/
|
||||
export default function nameTokenizer() {
|
||||
const typeEnd = (num, { tokens }, i) => tokens.type === '' ? num : i;
|
||||
return (spec) => {
|
||||
// look for the name in the line where {type} ends
|
||||
const { tokens } = spec.source[spec.source.reduce(typeEnd, 0)];
|
||||
const source = tokens.description.trimLeft();
|
||||
const quotedGroups = source.split('"');
|
||||
// if it starts with quoted group, assume it is a literal
|
||||
if (quotedGroups.length > 1 &&
|
||||
quotedGroups[0] === '' &&
|
||||
quotedGroups.length % 2 === 1) {
|
||||
spec.name = quotedGroups[1];
|
||||
tokens.name = `"${quotedGroups[1]}"`;
|
||||
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
|
||||
return spec;
|
||||
}
|
||||
let brackets = 0;
|
||||
let name = '';
|
||||
let optional = false;
|
||||
let defaultValue;
|
||||
// assume name is non-space string or anything wrapped into brackets
|
||||
for (const ch of source) {
|
||||
if (brackets === 0 && isSpace(ch))
|
||||
break;
|
||||
if (ch === '[')
|
||||
brackets++;
|
||||
if (ch === ']')
|
||||
brackets--;
|
||||
name += ch;
|
||||
}
|
||||
if (brackets !== 0) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:unpaired-brackets',
|
||||
message: 'unpaired brackets',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
const nameToken = name;
|
||||
if (name[0] === '[' && name[name.length - 1] === ']') {
|
||||
optional = true;
|
||||
name = name.slice(1, -1);
|
||||
const parts = name.split('=');
|
||||
name = parts[0].trim();
|
||||
if (parts[1] !== undefined)
|
||||
defaultValue = parts.slice(1).join('=').trim();
|
||||
if (name === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-name',
|
||||
message: 'empty name',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
if (defaultValue === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-default',
|
||||
message: 'empty default value',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
// has "=" and is not a string, except for "=>"
|
||||
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:invalid-default',
|
||||
message: 'invalid default value syntax',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
}
|
||||
spec.optional = optional;
|
||||
spec.name = name;
|
||||
tokens.name = nameToken;
|
||||
if (defaultValue !== undefined)
|
||||
spec.default = defaultValue;
|
||||
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
|
||||
return spec;
|
||||
};
|
||||
}
|
6
node_modules/comment-parser/es6/parser/tokenizers/tag.d.ts
generated
vendored
Normal file
6
node_modules/comment-parser/es6/parser/tokenizers/tag.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
|
||||
* and populates `spec.tag`
|
||||
*/
|
||||
export default function tagTokenizer(): Tokenizer;
|
24
node_modules/comment-parser/es6/parser/tokenizers/tag.js
generated
vendored
Normal file
24
node_modules/comment-parser/es6/parser/tokenizers/tag.js
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
/**
|
||||
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
|
||||
* and populates `spec.tag`
|
||||
*/
|
||||
export default function tagTokenizer() {
|
||||
return (spec) => {
|
||||
const { tokens } = spec.source[0];
|
||||
const match = tokens.description.match(/\s*(@(\S+))(\s*)/);
|
||||
if (match === null) {
|
||||
spec.problems.push({
|
||||
code: 'spec:tag:prefix',
|
||||
message: 'tag should start with "@" symbol',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
tokens.tag = match[1];
|
||||
tokens.postTag = match[3];
|
||||
tokens.description = tokens.description.slice(match[0].length);
|
||||
spec.tag = match[2];
|
||||
return spec;
|
||||
};
|
||||
}
|
27
node_modules/comment-parser/es6/parser/tokenizers/type.d.ts
generated
vendored
Normal file
27
node_modules/comment-parser/es6/parser/tokenizers/type.d.ts
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
import { Tokenizer } from './index.js';
|
||||
/**
|
||||
* Joiner is a function taking collected type token string parts,
|
||||
* and joining them together. In most of the cases this will be
|
||||
* a single piece like {type-name}, but type may go over multipe line
|
||||
* ```
|
||||
* @tag {function(
|
||||
* number,
|
||||
* string
|
||||
* )}
|
||||
* ```
|
||||
*/
|
||||
export type Joiner = (parts: string[]) => string;
|
||||
/**
|
||||
* Shortcut for standard Joiners
|
||||
* compact - trim surrounding space, replace line breaks with a single space
|
||||
* preserve - concat as is
|
||||
*/
|
||||
export type Spacing = 'compact' | 'preserve' | Joiner;
|
||||
/**
|
||||
* Sets splits remaining `Spec.lines[].tokes.description` into `type` and `description`
|
||||
* tokens and populates Spec.type`
|
||||
*
|
||||
* @param {Spacing} spacing tells how to deal with a whitespace
|
||||
* for type values going over multiple lines
|
||||
*/
|
||||
export default function typeTokenizer(spacing?: Spacing): Tokenizer;
|
65
node_modules/comment-parser/es6/parser/tokenizers/type.js
generated
vendored
Normal file
65
node_modules/comment-parser/es6/parser/tokenizers/type.js
generated
vendored
Normal file
@ -0,0 +1,65 @@
|
||||
import { splitSpace } from '../../util.js';
|
||||
/**
|
||||
* Sets splits remaining `Spec.lines[].tokes.description` into `type` and `description`
|
||||
* tokens and populates Spec.type`
|
||||
*
|
||||
* @param {Spacing} spacing tells how to deal with a whitespace
|
||||
* for type values going over multiple lines
|
||||
*/
|
||||
export default function typeTokenizer(spacing = 'compact') {
|
||||
const join = getJoiner(spacing);
|
||||
return (spec) => {
|
||||
let curlies = 0;
|
||||
let lines = [];
|
||||
for (const [i, { tokens }] of spec.source.entries()) {
|
||||
let type = '';
|
||||
if (i === 0 && tokens.description[0] !== '{')
|
||||
return spec;
|
||||
for (const ch of tokens.description) {
|
||||
if (ch === '{')
|
||||
curlies++;
|
||||
if (ch === '}')
|
||||
curlies--;
|
||||
type += ch;
|
||||
if (curlies === 0)
|
||||
break;
|
||||
}
|
||||
lines.push([tokens, type]);
|
||||
if (curlies === 0)
|
||||
break;
|
||||
}
|
||||
if (curlies !== 0) {
|
||||
spec.problems.push({
|
||||
code: 'spec:type:unpaired-curlies',
|
||||
message: 'unpaired curlies',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
const parts = [];
|
||||
const offset = lines[0][0].postDelimiter.length;
|
||||
for (const [i, [tokens, type]] of lines.entries()) {
|
||||
tokens.type = type;
|
||||
if (i > 0) {
|
||||
tokens.type = tokens.postDelimiter.slice(offset) + type;
|
||||
tokens.postDelimiter = tokens.postDelimiter.slice(0, offset);
|
||||
}
|
||||
[tokens.postType, tokens.description] = splitSpace(tokens.description.slice(type.length));
|
||||
parts.push(tokens.type);
|
||||
}
|
||||
parts[0] = parts[0].slice(1);
|
||||
parts[parts.length - 1] = parts[parts.length - 1].slice(0, -1);
|
||||
spec.type = join(parts);
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
const trim = (x) => x.trim();
|
||||
function getJoiner(spacing) {
|
||||
if (spacing === 'compact')
|
||||
return (t) => t.map(trim).join('');
|
||||
else if (spacing === 'preserve')
|
||||
return (t) => t.join('\n');
|
||||
else
|
||||
return spacing;
|
||||
}
|
54
node_modules/comment-parser/es6/primitives.d.ts
generated
vendored
Normal file
54
node_modules/comment-parser/es6/primitives.d.ts
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
/** @deprecated */
|
||||
export declare enum Markers {
|
||||
start = "/**",
|
||||
nostart = "/***",
|
||||
delim = "*",
|
||||
end = "*/"
|
||||
}
|
||||
export interface BlockMarkers {
|
||||
start: string;
|
||||
nostart: string;
|
||||
delim: string;
|
||||
end: string;
|
||||
}
|
||||
export interface Block {
|
||||
description: string;
|
||||
tags: Spec[];
|
||||
source: Line[];
|
||||
problems: Problem[];
|
||||
}
|
||||
export interface Spec {
|
||||
tag: string;
|
||||
name: string;
|
||||
default?: string;
|
||||
type: string;
|
||||
optional: boolean;
|
||||
description: string;
|
||||
problems: Problem[];
|
||||
source: Line[];
|
||||
}
|
||||
export interface Line {
|
||||
number: number;
|
||||
source: string;
|
||||
tokens: Tokens;
|
||||
}
|
||||
export interface Tokens {
|
||||
start: string;
|
||||
delimiter: string;
|
||||
postDelimiter: string;
|
||||
tag: string;
|
||||
postTag: string;
|
||||
name: string;
|
||||
postName: string;
|
||||
type: string;
|
||||
postType: string;
|
||||
description: string;
|
||||
end: string;
|
||||
lineEnd: string;
|
||||
}
|
||||
export interface Problem {
|
||||
code: 'unhandled' | 'custom' | 'source:startline' | 'spec:tag:prefix' | 'spec:type:unpaired-curlies' | 'spec:name:unpaired-brackets' | 'spec:name:empty-name' | 'spec:name:invalid-default' | 'spec:name:empty-default';
|
||||
message: string;
|
||||
line: number;
|
||||
critical: boolean;
|
||||
}
|
8
node_modules/comment-parser/es6/primitives.js
generated
vendored
Normal file
8
node_modules/comment-parser/es6/primitives.js
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
/** @deprecated */
|
||||
export var Markers;
|
||||
(function (Markers) {
|
||||
Markers["start"] = "/**";
|
||||
Markers["nostart"] = "/***";
|
||||
Markers["delim"] = "*";
|
||||
Markers["end"] = "*/";
|
||||
})(Markers = Markers || (Markers = {}));
|
3
node_modules/comment-parser/es6/stringifier/index.d.ts
generated
vendored
Normal file
3
node_modules/comment-parser/es6/stringifier/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
import { Block } from '../primitives.js';
|
||||
export type Stringifier = (block: Block) => string;
|
||||
export default function getStringifier(): Stringifier;
|
17
node_modules/comment-parser/es6/stringifier/index.js
generated
vendored
Normal file
17
node_modules/comment-parser/es6/stringifier/index.js
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
function join(tokens) {
|
||||
return (tokens.start +
|
||||
tokens.delimiter +
|
||||
tokens.postDelimiter +
|
||||
tokens.tag +
|
||||
tokens.postTag +
|
||||
tokens.type +
|
||||
tokens.postType +
|
||||
tokens.name +
|
||||
tokens.postName +
|
||||
tokens.description +
|
||||
tokens.end +
|
||||
tokens.lineEnd);
|
||||
}
|
||||
export default function getStringifier() {
|
||||
return (block) => block.source.map(({ tokens }) => join(tokens)).join('\n');
|
||||
}
|
2
node_modules/comment-parser/es6/stringifier/inspect.d.ts
generated
vendored
Normal file
2
node_modules/comment-parser/es6/stringifier/inspect.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import { Block } from '../primitives.js';
|
||||
export default function inspect({ source }: Block): string;
|
44
node_modules/comment-parser/es6/stringifier/inspect.js
generated
vendored
Normal file
44
node_modules/comment-parser/es6/stringifier/inspect.js
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
import { isSpace } from '../util.js';
|
||||
const zeroWidth = {
|
||||
line: 0,
|
||||
start: 0,
|
||||
delimiter: 0,
|
||||
postDelimiter: 0,
|
||||
tag: 0,
|
||||
postTag: 0,
|
||||
name: 0,
|
||||
postName: 0,
|
||||
type: 0,
|
||||
postType: 0,
|
||||
description: 0,
|
||||
end: 0,
|
||||
lineEnd: 0,
|
||||
};
|
||||
const headers = { lineEnd: 'CR' };
|
||||
const fields = Object.keys(zeroWidth);
|
||||
const repr = (x) => (isSpace(x) ? `{${x.length}}` : x);
|
||||
const frame = (line) => '|' + line.join('|') + '|';
|
||||
const align = (width, tokens) => Object.keys(tokens).map((k) => repr(tokens[k]).padEnd(width[k]));
|
||||
export default function inspect({ source }) {
|
||||
var _a, _b;
|
||||
if (source.length === 0)
|
||||
return '';
|
||||
const width = Object.assign({}, zeroWidth);
|
||||
for (const f of fields)
|
||||
width[f] = ((_a = headers[f]) !== null && _a !== void 0 ? _a : f).length;
|
||||
for (const { number, tokens } of source) {
|
||||
width.line = Math.max(width.line, number.toString().length);
|
||||
for (const k in tokens)
|
||||
width[k] = Math.max(width[k], repr(tokens[k]).length);
|
||||
}
|
||||
const lines = [[], []];
|
||||
for (const f of fields)
|
||||
lines[0].push(((_b = headers[f]) !== null && _b !== void 0 ? _b : f).padEnd(width[f]));
|
||||
for (const f of fields)
|
||||
lines[1].push('-'.padEnd(width[f], '-'));
|
||||
for (const { number, tokens } of source) {
|
||||
const line = number.toString().padStart(width.line);
|
||||
lines.push([line, ...align(width, tokens)]);
|
||||
}
|
||||
return lines.map(frame).join('\n');
|
||||
}
|
3
node_modules/comment-parser/es6/transforms/align.d.ts
generated
vendored
Normal file
3
node_modules/comment-parser/es6/transforms/align.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
import { Transform } from './index.js';
|
||||
import { Markers } from '../primitives.js';
|
||||
export default function align(markers?: typeof Markers): Transform;
|
93
node_modules/comment-parser/es6/transforms/align.js
generated
vendored
Normal file
93
node_modules/comment-parser/es6/transforms/align.js
generated
vendored
Normal file
@ -0,0 +1,93 @@
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
import { Markers } from '../primitives.js';
|
||||
import { rewireSource } from '../util.js';
|
||||
const zeroWidth = {
|
||||
start: 0,
|
||||
tag: 0,
|
||||
type: 0,
|
||||
name: 0,
|
||||
};
|
||||
const getWidth = (markers = Markers) => (w, { tokens: t }) => ({
|
||||
start: t.delimiter === markers.start ? t.start.length : w.start,
|
||||
tag: Math.max(w.tag, t.tag.length),
|
||||
type: Math.max(w.type, t.type.length),
|
||||
name: Math.max(w.name, t.name.length),
|
||||
});
|
||||
const space = (len) => ''.padStart(len, ' ');
|
||||
export default function align(markers = Markers) {
|
||||
let intoTags = false;
|
||||
let w;
|
||||
function update(line) {
|
||||
const tokens = Object.assign({}, line.tokens);
|
||||
if (tokens.tag !== '')
|
||||
intoTags = true;
|
||||
const isEmpty = tokens.tag === '' &&
|
||||
tokens.name === '' &&
|
||||
tokens.type === '' &&
|
||||
tokens.description === '';
|
||||
// dangling '*/'
|
||||
if (tokens.end === markers.end && isEmpty) {
|
||||
tokens.start = space(w.start + 1);
|
||||
return Object.assign(Object.assign({}, line), { tokens });
|
||||
}
|
||||
switch (tokens.delimiter) {
|
||||
case markers.start:
|
||||
tokens.start = space(w.start);
|
||||
break;
|
||||
case markers.delim:
|
||||
tokens.start = space(w.start + 1);
|
||||
break;
|
||||
default:
|
||||
tokens.delimiter = '';
|
||||
tokens.start = space(w.start + 2); // compensate delimiter
|
||||
}
|
||||
if (!intoTags) {
|
||||
tokens.postDelimiter = tokens.description === '' ? '' : ' ';
|
||||
return Object.assign(Object.assign({}, line), { tokens });
|
||||
}
|
||||
const nothingAfter = {
|
||||
delim: false,
|
||||
tag: false,
|
||||
type: false,
|
||||
name: false,
|
||||
};
|
||||
if (tokens.description === '') {
|
||||
nothingAfter.name = true;
|
||||
tokens.postName = '';
|
||||
if (tokens.name === '') {
|
||||
nothingAfter.type = true;
|
||||
tokens.postType = '';
|
||||
if (tokens.type === '') {
|
||||
nothingAfter.tag = true;
|
||||
tokens.postTag = '';
|
||||
if (tokens.tag === '') {
|
||||
nothingAfter.delim = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
tokens.postDelimiter = nothingAfter.delim ? '' : ' ';
|
||||
if (!nothingAfter.tag)
|
||||
tokens.postTag = space(w.tag - tokens.tag.length + 1);
|
||||
if (!nothingAfter.type)
|
||||
tokens.postType = space(w.type - tokens.type.length + 1);
|
||||
if (!nothingAfter.name)
|
||||
tokens.postName = space(w.name - tokens.name.length + 1);
|
||||
return Object.assign(Object.assign({}, line), { tokens });
|
||||
}
|
||||
return (_a) => {
|
||||
var { source } = _a, fields = __rest(_a, ["source"]);
|
||||
w = source.reduce(getWidth(markers), Object.assign({}, zeroWidth));
|
||||
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
|
||||
};
|
||||
}
|
3
node_modules/comment-parser/es6/transforms/crlf.d.ts
generated
vendored
Normal file
3
node_modules/comment-parser/es6/transforms/crlf.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
import { Transform } from './index.js';
|
||||
export type Ending = 'LF' | 'CRLF';
|
||||
export default function crlf(ending: Ending): Transform;
|
34
node_modules/comment-parser/es6/transforms/crlf.js
generated
vendored
Normal file
34
node_modules/comment-parser/es6/transforms/crlf.js
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
import { rewireSource } from '../util.js';
|
||||
const order = [
|
||||
'end',
|
||||
'description',
|
||||
'postType',
|
||||
'type',
|
||||
'postName',
|
||||
'name',
|
||||
'postTag',
|
||||
'tag',
|
||||
'postDelimiter',
|
||||
'delimiter',
|
||||
'start',
|
||||
];
|
||||
export default function crlf(ending) {
|
||||
function update(line) {
|
||||
return Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { lineEnd: ending === 'LF' ? '' : '\r' }) });
|
||||
}
|
||||
return (_a) => {
|
||||
var { source } = _a, fields = __rest(_a, ["source"]);
|
||||
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
|
||||
};
|
||||
}
|
2
node_modules/comment-parser/es6/transforms/indent.d.ts
generated
vendored
Normal file
2
node_modules/comment-parser/es6/transforms/indent.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import { Transform } from './index.js';
|
||||
export default function indent(pos: number): Transform;
|
32
node_modules/comment-parser/es6/transforms/indent.js
generated
vendored
Normal file
32
node_modules/comment-parser/es6/transforms/indent.js
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
import { rewireSource } from '../util.js';
|
||||
const pull = (offset) => (str) => str.slice(offset);
|
||||
const push = (offset) => {
|
||||
const space = ''.padStart(offset, ' ');
|
||||
return (str) => str + space;
|
||||
};
|
||||
export default function indent(pos) {
|
||||
let shift;
|
||||
const pad = (start) => {
|
||||
if (shift === undefined) {
|
||||
const offset = pos - start.length;
|
||||
shift = offset > 0 ? push(offset) : pull(-offset);
|
||||
}
|
||||
return shift(start);
|
||||
};
|
||||
const update = (line) => (Object.assign(Object.assign({}, line), { tokens: Object.assign(Object.assign({}, line.tokens), { start: pad(line.tokens.start) }) }));
|
||||
return (_a) => {
|
||||
var { source } = _a, fields = __rest(_a, ["source"]);
|
||||
return rewireSource(Object.assign(Object.assign({}, fields), { source: source.map(update) }));
|
||||
};
|
||||
}
|
3
node_modules/comment-parser/es6/transforms/index.d.ts
generated
vendored
Normal file
3
node_modules/comment-parser/es6/transforms/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
import { Block } from '../primitives.js';
|
||||
export type Transform = (Block: Block) => Block;
|
||||
export declare function flow(...transforms: Transform[]): Transform;
|
3
node_modules/comment-parser/es6/transforms/index.js
generated
vendored
Normal file
3
node_modules/comment-parser/es6/transforms/index.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
export function flow(...transforms) {
|
||||
return (block) => transforms.reduce((block, t) => t(block), block);
|
||||
}
|
21
node_modules/comment-parser/es6/util.d.ts
generated
vendored
Normal file
21
node_modules/comment-parser/es6/util.d.ts
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
import { Block, Tokens, Spec } from './primitives.js';
|
||||
export declare function isSpace(source: string): boolean;
|
||||
export declare function hasCR(source: string): boolean;
|
||||
export declare function splitCR(source: string): [string, string];
|
||||
export declare function splitSpace(source: string): [string, string];
|
||||
export declare function splitLines(source: string): string[];
|
||||
export declare function seedBlock(block?: Partial<Block>): Block;
|
||||
export declare function seedSpec(spec?: Partial<Spec>): Spec;
|
||||
export declare function seedTokens(tokens?: Partial<Tokens>): Tokens;
|
||||
/**
|
||||
* Assures Block.tags[].source contains references to the Block.source items,
|
||||
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
export declare function rewireSource(block: Block): Block;
|
||||
/**
|
||||
* Assures Block.source contains references to the Block.tags[].source items,
|
||||
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
export declare function rewireSpecs(block: Block): Block;
|
52
node_modules/comment-parser/es6/util.js
generated
vendored
Normal file
52
node_modules/comment-parser/es6/util.js
generated
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
export function isSpace(source) {
|
||||
return /^\s+$/.test(source);
|
||||
}
|
||||
export function hasCR(source) {
|
||||
return /\r$/.test(source);
|
||||
}
|
||||
export function splitCR(source) {
|
||||
const matches = source.match(/\r+$/);
|
||||
return matches == null
|
||||
? ['', source]
|
||||
: [source.slice(-matches[0].length), source.slice(0, -matches[0].length)];
|
||||
}
|
||||
export function splitSpace(source) {
|
||||
const matches = source.match(/^\s+/);
|
||||
return matches == null
|
||||
? ['', source]
|
||||
: [source.slice(0, matches[0].length), source.slice(matches[0].length)];
|
||||
}
|
||||
export function splitLines(source) {
|
||||
return source.split(/\n/);
|
||||
}
|
||||
export function seedBlock(block = {}) {
|
||||
return Object.assign({ description: '', tags: [], source: [], problems: [] }, block);
|
||||
}
|
||||
export function seedSpec(spec = {}) {
|
||||
return Object.assign({ tag: '', name: '', type: '', optional: false, description: '', problems: [], source: [] }, spec);
|
||||
}
|
||||
export function seedTokens(tokens = {}) {
|
||||
return Object.assign({ start: '', delimiter: '', postDelimiter: '', tag: '', postTag: '', name: '', postName: '', type: '', postType: '', description: '', end: '', lineEnd: '' }, tokens);
|
||||
}
|
||||
/**
|
||||
* Assures Block.tags[].source contains references to the Block.source items,
|
||||
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
export function rewireSource(block) {
|
||||
const source = block.source.reduce((acc, line) => acc.set(line.number, line), new Map());
|
||||
for (const spec of block.tags) {
|
||||
spec.source = spec.source.map((line) => source.get(line.number));
|
||||
}
|
||||
return block;
|
||||
}
|
||||
/**
|
||||
* Assures Block.source contains references to the Block.tags[].source items,
|
||||
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
|
||||
* @param block parsed coments block
|
||||
*/
|
||||
export function rewireSpecs(block) {
|
||||
const source = block.tags.reduce((acc, spec) => spec.source.reduce((acc, line) => acc.set(line.number, line), acc), new Map());
|
||||
block.source = block.source.map((line) => source.get(line.number) || line);
|
||||
return block;
|
||||
}
|
Reference in New Issue
Block a user