Addnew sheets (armor, weapons, malefica) and v13 support
This commit is contained in:
60
node_modules/comment-parser/src/parser/block-parser.ts
generated
vendored
Normal file
60
node_modules/comment-parser/src/parser/block-parser.ts
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
import { Line } from '../primitives.js';
|
||||
|
||||
const reTag = /^@\S+/;
|
||||
|
||||
/**
|
||||
* Groups source lines in sections representing tags.
|
||||
* First section is a block description if present. Last section captures lines starting with
|
||||
* the last tag to the end of the block, including dangling closing marker.
|
||||
* @param {Line[]} block souce lines making a single comment block
|
||||
*/
|
||||
export type Parser = (block: Line[]) => Line[][];
|
||||
|
||||
/**
|
||||
* Predicate telling if string contains opening/closing escaping sequence
|
||||
* @param {string} source raw source line
|
||||
*/
|
||||
export type Fencer = (source: string) => boolean;
|
||||
|
||||
/**
|
||||
* `Parser` configuration options
|
||||
*/
|
||||
export interface Options {
|
||||
// escaping sequence or predicate
|
||||
fence: string | Fencer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates configured `Parser`
|
||||
* @param {Partial<Options>} options
|
||||
*/
|
||||
export default function getParser({
|
||||
fence = '```',
|
||||
}: Partial<Options> = {}): Parser {
|
||||
const fencer = getFencer(fence);
|
||||
const toggleFence = (source: string, isFenced: boolean): boolean =>
|
||||
fencer(source) ? !isFenced : isFenced;
|
||||
|
||||
return function parseBlock(source: Line[]): Line[][] {
|
||||
// start with description section
|
||||
const sections: Line[][] = [[]];
|
||||
|
||||
let isFenced = false;
|
||||
for (const line of source) {
|
||||
if (reTag.test(line.tokens.description) && !isFenced) {
|
||||
sections.push([line]);
|
||||
} else {
|
||||
sections[sections.length - 1].push(line);
|
||||
}
|
||||
isFenced = toggleFence(line.tokens.description, isFenced);
|
||||
}
|
||||
|
||||
return sections;
|
||||
};
|
||||
}
|
||||
|
||||
function getFencer(fence: string | Fencer): Fencer {
|
||||
if (typeof fence === 'string')
|
||||
return (source: string) => source.split(fence).length % 2 === 0;
|
||||
return fence;
|
||||
}
|
||||
70
node_modules/comment-parser/src/parser/index.ts
generated
vendored
Normal file
70
node_modules/comment-parser/src/parser/index.ts
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
import { Block, Line, Problem, BlockMarkers, Markers } from '../primitives.js';
|
||||
import { splitLines } from '../util.js';
|
||||
import blockParser from './block-parser.js';
|
||||
import sourceParser from './source-parser.js';
|
||||
import specParser from './spec-parser.js';
|
||||
import { Tokenizer } from './tokenizers/index.js';
|
||||
import tokenizeTag from './tokenizers/tag.js';
|
||||
import tokenizeType from './tokenizers/type.js';
|
||||
import tokenizeName from './tokenizers/name.js';
|
||||
import tokenizeDescription, {
|
||||
getJoiner as getDescriptionJoiner,
|
||||
} from './tokenizers/description.js';
|
||||
|
||||
export interface Options {
|
||||
// start count for source line numbers
|
||||
startLine: number;
|
||||
// escaping chars sequence marking wrapped content literal for the parser
|
||||
fence: string;
|
||||
// block and comment description compaction strategy
|
||||
spacing: 'compact' | 'preserve';
|
||||
// comment description markers
|
||||
markers: BlockMarkers;
|
||||
// tokenizer functions extracting name, type, and description out of tag, see Tokenizer
|
||||
tokenizers: Tokenizer[];
|
||||
}
|
||||
|
||||
export type Parser = (source: string) => Block[];
|
||||
|
||||
export default function getParser({
|
||||
startLine = 0,
|
||||
fence = '```',
|
||||
spacing = 'compact',
|
||||
markers = Markers,
|
||||
tokenizers = [
|
||||
tokenizeTag(),
|
||||
tokenizeType(spacing),
|
||||
tokenizeName(),
|
||||
tokenizeDescription(spacing),
|
||||
],
|
||||
}: Partial<Options> = {}): Parser {
|
||||
if (startLine < 0 || startLine % 1 > 0) throw new Error('Invalid startLine');
|
||||
|
||||
const parseSource = sourceParser({ startLine, markers });
|
||||
const parseBlock = blockParser({ fence });
|
||||
const parseSpec = specParser({ tokenizers });
|
||||
const joinDescription = getDescriptionJoiner(spacing);
|
||||
|
||||
return function (source: string): Block[] {
|
||||
const blocks: Block[] = [];
|
||||
for (const line of splitLines(source)) {
|
||||
const lines = parseSource(line);
|
||||
|
||||
if (lines === null) continue;
|
||||
|
||||
const sections = parseBlock(lines);
|
||||
const specs = sections.slice(1).map(parseSpec);
|
||||
|
||||
blocks.push({
|
||||
description: joinDescription(sections[0], markers),
|
||||
tags: specs,
|
||||
source: lines,
|
||||
problems: specs.reduce(
|
||||
(acc: Problem[], spec) => acc.concat(spec.problems),
|
||||
[]
|
||||
),
|
||||
});
|
||||
}
|
||||
return blocks;
|
||||
};
|
||||
}
|
||||
71
node_modules/comment-parser/src/parser/source-parser.ts
generated
vendored
Normal file
71
node_modules/comment-parser/src/parser/source-parser.ts
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
import { Line, Tokens, BlockMarkers, Markers } from '../primitives.js';
|
||||
import { seedTokens, splitSpace, splitCR } from '../util.js';
|
||||
|
||||
export interface Options {
|
||||
startLine: number;
|
||||
markers: BlockMarkers;
|
||||
}
|
||||
|
||||
export type Parser = (source: string) => Line[] | null;
|
||||
|
||||
export default function getParser({
|
||||
startLine = 0,
|
||||
markers = Markers,
|
||||
}: Partial<Options> = {}): Parser {
|
||||
let block: Line[] | null = null;
|
||||
let num = startLine;
|
||||
|
||||
return function parseSource(source: string): Line[] | null {
|
||||
let rest = source;
|
||||
const tokens: Tokens = seedTokens();
|
||||
|
||||
[tokens.lineEnd, rest] = splitCR(rest);
|
||||
[tokens.start, rest] = splitSpace(rest);
|
||||
|
||||
if (
|
||||
block === null &&
|
||||
rest.startsWith(markers.start) &&
|
||||
!rest.startsWith(markers.nostart)
|
||||
) {
|
||||
block = [];
|
||||
tokens.delimiter = rest.slice(0, markers.start.length);
|
||||
rest = rest.slice(markers.start.length);
|
||||
[tokens.postDelimiter, rest] = splitSpace(rest);
|
||||
}
|
||||
|
||||
if (block === null) {
|
||||
num++;
|
||||
return null;
|
||||
}
|
||||
|
||||
const isClosed = rest.trimRight().endsWith(markers.end);
|
||||
|
||||
if (
|
||||
tokens.delimiter === '' &&
|
||||
rest.startsWith(markers.delim) &&
|
||||
!rest.startsWith(markers.end)
|
||||
) {
|
||||
tokens.delimiter = markers.delim;
|
||||
rest = rest.slice(markers.delim.length);
|
||||
[tokens.postDelimiter, rest] = splitSpace(rest);
|
||||
}
|
||||
|
||||
if (isClosed) {
|
||||
const trimmed = rest.trimRight();
|
||||
tokens.end = rest.slice(trimmed.length - markers.end.length);
|
||||
rest = trimmed.slice(0, -markers.end.length);
|
||||
}
|
||||
|
||||
tokens.description = rest;
|
||||
block.push({ number: num, source, tokens });
|
||||
num++;
|
||||
|
||||
if (isClosed) {
|
||||
const result = block.slice();
|
||||
block = null;
|
||||
return result;
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
}
|
||||
20
node_modules/comment-parser/src/parser/spec-parser.ts
generated
vendored
Normal file
20
node_modules/comment-parser/src/parser/spec-parser.ts
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import { Line, Spec } from '../primitives.js';
|
||||
import { seedSpec } from '../util.js';
|
||||
import { Tokenizer } from './tokenizers/index.js';
|
||||
|
||||
export type Parser = (source: Line[]) => Spec;
|
||||
|
||||
export interface Options {
|
||||
tokenizers: Tokenizer[];
|
||||
}
|
||||
|
||||
export default function getParser({ tokenizers }: Options): Parser {
|
||||
return function parseSpec(source: Line[]): Spec {
|
||||
let spec = seedSpec({ source });
|
||||
for (const tokenize of tokenizers) {
|
||||
spec = tokenize(spec);
|
||||
if (spec.problems[spec.problems.length - 1]?.critical) break;
|
||||
}
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
78
node_modules/comment-parser/src/parser/tokenizers/description.ts
generated
vendored
Normal file
78
node_modules/comment-parser/src/parser/tokenizers/description.ts
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
import { Spec, Line, BlockMarkers, Markers } from '../../primitives.js';
|
||||
import { Tokenizer } from './index.js';
|
||||
|
||||
/**
|
||||
* Walks over provided lines joining description token into a single string.
|
||||
* */
|
||||
export type Joiner = (lines: Line[], markers?: BlockMarkers) => string;
|
||||
|
||||
/**
|
||||
* Shortcut for standard Joiners
|
||||
* compact - strip surrounding whitespace and concat lines using a single string
|
||||
* preserve - preserves original whitespace and line breaks as is
|
||||
*/
|
||||
export type Spacing = 'compact' | 'preserve' | Joiner;
|
||||
|
||||
/**
|
||||
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
|
||||
* following given spacing srtategy
|
||||
* @param {Spacing} spacing tells how to handle the whitespace
|
||||
* @param {BlockMarkers} markers tells how to handle comment block delimitation
|
||||
*/
|
||||
export default function descriptionTokenizer(
|
||||
spacing: Spacing = 'compact',
|
||||
markers = Markers
|
||||
): Tokenizer {
|
||||
const join = getJoiner(spacing);
|
||||
return (spec: Spec): Spec => {
|
||||
spec.description = join(spec.source, markers);
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
|
||||
export function getJoiner(spacing: Spacing): Joiner {
|
||||
if (spacing === 'compact') return compactJoiner;
|
||||
if (spacing === 'preserve') return preserveJoiner;
|
||||
|
||||
return spacing;
|
||||
}
|
||||
|
||||
function compactJoiner(lines: Line[], markers = Markers): string {
|
||||
return lines
|
||||
.map(({ tokens: { description } }: Line) => description.trim())
|
||||
.filter((description) => description !== '')
|
||||
.join(' ');
|
||||
}
|
||||
|
||||
const lineNo = (num: number, { tokens }: Line, i: number) =>
|
||||
tokens.type === '' ? num : i;
|
||||
|
||||
const getDescription = ({ tokens }: Line) =>
|
||||
(tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +
|
||||
tokens.description;
|
||||
|
||||
function preserveJoiner(lines: Line[], markers = Markers): string {
|
||||
if (lines.length === 0) return '';
|
||||
|
||||
// skip the opening line with no description
|
||||
if (
|
||||
lines[0].tokens.description === '' &&
|
||||
lines[0].tokens.delimiter === markers.start
|
||||
)
|
||||
lines = lines.slice(1);
|
||||
|
||||
// skip the closing line with no description
|
||||
const lastLine = lines[lines.length - 1];
|
||||
|
||||
if (
|
||||
lastLine !== undefined &&
|
||||
lastLine.tokens.description === '' &&
|
||||
lastLine.tokens.end.endsWith(markers.end)
|
||||
)
|
||||
lines = lines.slice(0, -1);
|
||||
|
||||
// description starts at the last line of type definition
|
||||
lines = lines.slice(lines.reduce(lineNo, 0));
|
||||
|
||||
return lines.map(getDescription).join('\n');
|
||||
}
|
||||
8
node_modules/comment-parser/src/parser/tokenizers/index.ts
generated
vendored
Normal file
8
node_modules/comment-parser/src/parser/tokenizers/index.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import { Spec } from '../../primitives.js';
|
||||
|
||||
/**
|
||||
* Splits `spect.lines[].token.description` into other tokens,
|
||||
* and populates the spec.{tag, name, type, description}. Invoked in a chaing
|
||||
* with other tokens, operations listed above can be moved to separate tokenizers
|
||||
*/
|
||||
export type Tokenizer = (spec: Spec) => Spec;
|
||||
112
node_modules/comment-parser/src/parser/tokenizers/name.ts
generated
vendored
Normal file
112
node_modules/comment-parser/src/parser/tokenizers/name.ts
generated
vendored
Normal file
@@ -0,0 +1,112 @@
|
||||
import { Spec, Line } from '../../primitives.js';
|
||||
import { splitSpace, isSpace } from '../../util.js';
|
||||
import { Tokenizer } from './index.js';
|
||||
|
||||
const isQuoted = (s: string) => s && s.startsWith('"') && s.endsWith('"');
|
||||
|
||||
/**
|
||||
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
|
||||
* and populates the `spec.name`
|
||||
*/
|
||||
export default function nameTokenizer(): Tokenizer {
|
||||
const typeEnd = (num: number, { tokens }: Line, i: number) =>
|
||||
tokens.type === '' ? num : i;
|
||||
|
||||
return (spec: Spec): Spec => {
|
||||
// look for the name in the line where {type} ends
|
||||
const { tokens } = spec.source[spec.source.reduce(typeEnd, 0)];
|
||||
const source = tokens.description.trimLeft();
|
||||
|
||||
const quotedGroups = source.split('"');
|
||||
|
||||
// if it starts with quoted group, assume it is a literal
|
||||
if (
|
||||
quotedGroups.length > 1 &&
|
||||
quotedGroups[0] === '' &&
|
||||
quotedGroups.length % 2 === 1
|
||||
) {
|
||||
spec.name = quotedGroups[1];
|
||||
tokens.name = `"${quotedGroups[1]}"`;
|
||||
[tokens.postName, tokens.description] = splitSpace(
|
||||
source.slice(tokens.name.length)
|
||||
);
|
||||
return spec;
|
||||
}
|
||||
|
||||
let brackets = 0;
|
||||
let name = '';
|
||||
let optional = false;
|
||||
let defaultValue: string;
|
||||
|
||||
// assume name is non-space string or anything wrapped into brackets
|
||||
for (const ch of source) {
|
||||
if (brackets === 0 && isSpace(ch)) break;
|
||||
if (ch === '[') brackets++;
|
||||
if (ch === ']') brackets--;
|
||||
name += ch;
|
||||
}
|
||||
|
||||
if (brackets !== 0) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:unpaired-brackets',
|
||||
message: 'unpaired brackets',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
|
||||
const nameToken = name;
|
||||
|
||||
if (name[0] === '[' && name[name.length - 1] === ']') {
|
||||
optional = true;
|
||||
name = name.slice(1, -1);
|
||||
|
||||
const parts = name.split('=');
|
||||
name = parts[0].trim();
|
||||
if (parts[1] !== undefined)
|
||||
defaultValue = parts.slice(1).join('=').trim();
|
||||
|
||||
if (name === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-name',
|
||||
message: 'empty name',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
|
||||
if (defaultValue === '') {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:empty-default',
|
||||
message: 'empty default value',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
|
||||
// has "=" and is not a string, except for "=>"
|
||||
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
|
||||
spec.problems.push({
|
||||
code: 'spec:name:invalid-default',
|
||||
message: 'invalid default value syntax',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
}
|
||||
|
||||
spec.optional = optional;
|
||||
spec.name = name;
|
||||
tokens.name = nameToken;
|
||||
|
||||
if (defaultValue !== undefined) spec.default = defaultValue;
|
||||
[tokens.postName, tokens.description] = splitSpace(
|
||||
source.slice(tokens.name.length)
|
||||
);
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
30
node_modules/comment-parser/src/parser/tokenizers/tag.ts
generated
vendored
Normal file
30
node_modules/comment-parser/src/parser/tokenizers/tag.ts
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
import { Spec } from '../../primitives.js';
|
||||
import { Tokenizer } from './index.js';
|
||||
|
||||
/**
|
||||
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
|
||||
* and populates `spec.tag`
|
||||
*/
|
||||
export default function tagTokenizer(): Tokenizer {
|
||||
return (spec: Spec): Spec => {
|
||||
const { tokens } = spec.source[0];
|
||||
const match = tokens.description.match(/\s*(@(\S+))(\s*)/);
|
||||
|
||||
if (match === null) {
|
||||
spec.problems.push({
|
||||
code: 'spec:tag:prefix',
|
||||
message: 'tag should start with "@" symbol',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
|
||||
tokens.tag = match[1];
|
||||
tokens.postTag = match[3];
|
||||
tokens.description = tokens.description.slice(match[0].length);
|
||||
|
||||
spec.tag = match[2];
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
91
node_modules/comment-parser/src/parser/tokenizers/type.ts
generated
vendored
Normal file
91
node_modules/comment-parser/src/parser/tokenizers/type.ts
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
import { Spec, Tokens } from '../../primitives.js';
|
||||
import { splitSpace } from '../../util.js';
|
||||
import { Tokenizer } from './index.js';
|
||||
|
||||
/**
|
||||
* Joiner is a function taking collected type token string parts,
|
||||
* and joining them together. In most of the cases this will be
|
||||
* a single piece like {type-name}, but type may go over multipe line
|
||||
* ```
|
||||
* @tag {function(
|
||||
* number,
|
||||
* string
|
||||
* )}
|
||||
* ```
|
||||
*/
|
||||
export type Joiner = (parts: string[]) => string;
|
||||
|
||||
/**
|
||||
* Shortcut for standard Joiners
|
||||
* compact - trim surrounding space, replace line breaks with a single space
|
||||
* preserve - concat as is
|
||||
*/
|
||||
export type Spacing = 'compact' | 'preserve' | Joiner;
|
||||
|
||||
/**
|
||||
* Sets splits remaining `Spec.lines[].tokes.description` into `type` and `description`
|
||||
* tokens and populates Spec.type`
|
||||
*
|
||||
* @param {Spacing} spacing tells how to deal with a whitespace
|
||||
* for type values going over multiple lines
|
||||
*/
|
||||
export default function typeTokenizer(spacing: Spacing = 'compact'): Tokenizer {
|
||||
const join = getJoiner(spacing);
|
||||
return (spec: Spec): Spec => {
|
||||
let curlies = 0;
|
||||
let lines: [Tokens, string][] = [];
|
||||
|
||||
for (const [i, { tokens }] of spec.source.entries()) {
|
||||
let type = '';
|
||||
if (i === 0 && tokens.description[0] !== '{') return spec;
|
||||
|
||||
for (const ch of tokens.description) {
|
||||
if (ch === '{') curlies++;
|
||||
if (ch === '}') curlies--;
|
||||
type += ch;
|
||||
if (curlies === 0) break;
|
||||
}
|
||||
|
||||
lines.push([tokens, type]);
|
||||
if (curlies === 0) break;
|
||||
}
|
||||
|
||||
if (curlies !== 0) {
|
||||
spec.problems.push({
|
||||
code: 'spec:type:unpaired-curlies',
|
||||
message: 'unpaired curlies',
|
||||
line: spec.source[0].number,
|
||||
critical: true,
|
||||
});
|
||||
return spec;
|
||||
}
|
||||
|
||||
const parts: string[] = [];
|
||||
const offset = lines[0][0].postDelimiter.length;
|
||||
|
||||
for (const [i, [tokens, type]] of lines.entries()) {
|
||||
tokens.type = type;
|
||||
if (i > 0) {
|
||||
tokens.type = tokens.postDelimiter.slice(offset) + type;
|
||||
tokens.postDelimiter = tokens.postDelimiter.slice(0, offset);
|
||||
}
|
||||
[tokens.postType, tokens.description] = splitSpace(
|
||||
tokens.description.slice(type.length)
|
||||
);
|
||||
parts.push(tokens.type);
|
||||
}
|
||||
|
||||
parts[0] = parts[0].slice(1);
|
||||
parts[parts.length - 1] = parts[parts.length - 1].slice(0, -1);
|
||||
spec.type = join(parts);
|
||||
return spec;
|
||||
};
|
||||
}
|
||||
|
||||
const trim = (x: string) => x.trim();
|
||||
|
||||
function getJoiner(spacing: Spacing): Joiner {
|
||||
if (spacing === 'compact') return (t: string[]) => t.map(trim).join('');
|
||||
else if (spacing === 'preserve') return (t: string[]) => t.join('\n');
|
||||
else return spacing;
|
||||
}
|
||||
Reference in New Issue
Block a user