Fix actions/tour

This commit is contained in:
2025-02-13 23:19:32 +01:00
parent 47dd1adb30
commit fa21d30994
4543 changed files with 680810 additions and 0 deletions

36
node_modules/comment-parser/src/index.ts generated vendored Normal file
View File

@@ -0,0 +1,36 @@
import getParser, { Options as ParserOptions } from './parser/index.js';
import descriptionTokenizer from './parser/tokenizers/description.js';
import nameTokenizer from './parser/tokenizers/name.js';
import tagTokenizer from './parser/tokenizers/tag.js';
import typeTokenizer from './parser/tokenizers/type.js';
import getStringifier from './stringifier/index.js';
import alignTransform from './transforms/align.js';
import indentTransform from './transforms/indent.js';
import crlfTransform from './transforms/crlf.js';
import { flow as flowTransform } from './transforms/index.js';
import { rewireSpecs, rewireSource, seedBlock, seedTokens } from './util.js';
export * from './primitives.js';
export function parse(source: string, options: Partial<ParserOptions> = {}) {
return getParser(options)(source);
}
export const stringify = getStringifier();
export { default as inspect } from './stringifier/inspect.js';
export const transforms = {
flow: flowTransform,
align: alignTransform,
indent: indentTransform,
crlf: crlfTransform,
};
export const tokenizers = {
tag: tagTokenizer,
type: typeTokenizer,
name: nameTokenizer,
description: descriptionTokenizer,
};
export const util = { rewireSpecs, rewireSource, seedBlock, seedTokens };

60
node_modules/comment-parser/src/parser/block-parser.ts generated vendored Normal file
View File

@@ -0,0 +1,60 @@
import { Line } from '../primitives.js';
const reTag = /^@\S+/;
/**
* Groups source lines in sections representing tags.
* First section is a block description if present. Last section captures lines starting with
* the last tag to the end of the block, including dangling closing marker.
* @param {Line[]} block souce lines making a single comment block
*/
export type Parser = (block: Line[]) => Line[][];
/**
* Predicate telling if string contains opening/closing escaping sequence
* @param {string} source raw source line
*/
export type Fencer = (source: string) => boolean;
/**
* `Parser` configuration options
*/
export interface Options {
// escaping sequence or predicate
fence: string | Fencer;
}
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
export default function getParser({
fence = '```',
}: Partial<Options> = {}): Parser {
const fencer = getFencer(fence);
const toggleFence = (source: string, isFenced: boolean): boolean =>
fencer(source) ? !isFenced : isFenced;
return function parseBlock(source: Line[]): Line[][] {
// start with description section
const sections: Line[][] = [[]];
let isFenced = false;
for (const line of source) {
if (reTag.test(line.tokens.description) && !isFenced) {
sections.push([line]);
} else {
sections[sections.length - 1].push(line);
}
isFenced = toggleFence(line.tokens.description, isFenced);
}
return sections;
};
}
function getFencer(fence: string | Fencer): Fencer {
if (typeof fence === 'string')
return (source: string) => source.split(fence).length % 2 === 0;
return fence;
}

70
node_modules/comment-parser/src/parser/index.ts generated vendored Normal file
View File

@@ -0,0 +1,70 @@
import { Block, Line, Problem, BlockMarkers, Markers } from '../primitives.js';
import { splitLines } from '../util.js';
import blockParser from './block-parser.js';
import sourceParser from './source-parser.js';
import specParser from './spec-parser.js';
import { Tokenizer } from './tokenizers/index.js';
import tokenizeTag from './tokenizers/tag.js';
import tokenizeType from './tokenizers/type.js';
import tokenizeName from './tokenizers/name.js';
import tokenizeDescription, {
getJoiner as getDescriptionJoiner,
} from './tokenizers/description.js';
export interface Options {
// start count for source line numbers
startLine: number;
// escaping chars sequence marking wrapped content literal for the parser
fence: string;
// block and comment description compaction strategy
spacing: 'compact' | 'preserve';
// comment description markers
markers: BlockMarkers;
// tokenizer functions extracting name, type, and description out of tag, see Tokenizer
tokenizers: Tokenizer[];
}
export type Parser = (source: string) => Block[];
export default function getParser({
startLine = 0,
fence = '```',
spacing = 'compact',
markers = Markers,
tokenizers = [
tokenizeTag(),
tokenizeType(spacing),
tokenizeName(),
tokenizeDescription(spacing),
],
}: Partial<Options> = {}): Parser {
if (startLine < 0 || startLine % 1 > 0) throw new Error('Invalid startLine');
const parseSource = sourceParser({ startLine, markers });
const parseBlock = blockParser({ fence });
const parseSpec = specParser({ tokenizers });
const joinDescription = getDescriptionJoiner(spacing);
return function (source: string): Block[] {
const blocks: Block[] = [];
for (const line of splitLines(source)) {
const lines = parseSource(line);
if (lines === null) continue;
const sections = parseBlock(lines);
const specs = sections.slice(1).map(parseSpec);
blocks.push({
description: joinDescription(sections[0], markers),
tags: specs,
source: lines,
problems: specs.reduce(
(acc: Problem[], spec) => acc.concat(spec.problems),
[]
),
});
}
return blocks;
};
}

View File

@@ -0,0 +1,71 @@
import { Line, Tokens, BlockMarkers, Markers } from '../primitives.js';
import { seedTokens, splitSpace, splitCR } from '../util.js';
export interface Options {
startLine: number;
markers: BlockMarkers;
}
export type Parser = (source: string) => Line[] | null;
export default function getParser({
startLine = 0,
markers = Markers,
}: Partial<Options> = {}): Parser {
let block: Line[] | null = null;
let num = startLine;
return function parseSource(source: string): Line[] | null {
let rest = source;
const tokens: Tokens = seedTokens();
[tokens.lineEnd, rest] = splitCR(rest);
[tokens.start, rest] = splitSpace(rest);
if (
block === null &&
rest.startsWith(markers.start) &&
!rest.startsWith(markers.nostart)
) {
block = [];
tokens.delimiter = rest.slice(0, markers.start.length);
rest = rest.slice(markers.start.length);
[tokens.postDelimiter, rest] = splitSpace(rest);
}
if (block === null) {
num++;
return null;
}
const isClosed = rest.trimRight().endsWith(markers.end);
if (
tokens.delimiter === '' &&
rest.startsWith(markers.delim) &&
!rest.startsWith(markers.end)
) {
tokens.delimiter = markers.delim;
rest = rest.slice(markers.delim.length);
[tokens.postDelimiter, rest] = splitSpace(rest);
}
if (isClosed) {
const trimmed = rest.trimRight();
tokens.end = rest.slice(trimmed.length - markers.end.length);
rest = trimmed.slice(0, -markers.end.length);
}
tokens.description = rest;
block.push({ number: num, source, tokens });
num++;
if (isClosed) {
const result = block.slice();
block = null;
return result;
}
return null;
};
}

20
node_modules/comment-parser/src/parser/spec-parser.ts generated vendored Normal file
View File

@@ -0,0 +1,20 @@
import { Line, Spec } from '../primitives.js';
import { seedSpec } from '../util.js';
import { Tokenizer } from './tokenizers/index.js';
export type Parser = (source: Line[]) => Spec;
export interface Options {
tokenizers: Tokenizer[];
}
export default function getParser({ tokenizers }: Options): Parser {
return function parseSpec(source: Line[]): Spec {
let spec = seedSpec({ source });
for (const tokenize of tokenizers) {
spec = tokenize(spec);
if (spec.problems[spec.problems.length - 1]?.critical) break;
}
return spec;
};
}

View File

@@ -0,0 +1,78 @@
import { Spec, Line, BlockMarkers, Markers } from '../../primitives.js';
import { Tokenizer } from './index.js';
/**
* Walks over provided lines joining description token into a single string.
* */
export type Joiner = (lines: Line[], markers?: BlockMarkers) => string;
/**
* Shortcut for standard Joiners
* compact - strip surrounding whitespace and concat lines using a single string
* preserve - preserves original whitespace and line breaks as is
*/
export type Spacing = 'compact' | 'preserve' | Joiner;
/**
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
* following given spacing srtategy
* @param {Spacing} spacing tells how to handle the whitespace
* @param {BlockMarkers} markers tells how to handle comment block delimitation
*/
export default function descriptionTokenizer(
spacing: Spacing = 'compact',
markers = Markers
): Tokenizer {
const join = getJoiner(spacing);
return (spec: Spec): Spec => {
spec.description = join(spec.source, markers);
return spec;
};
}
export function getJoiner(spacing: Spacing): Joiner {
if (spacing === 'compact') return compactJoiner;
if (spacing === 'preserve') return preserveJoiner;
return spacing;
}
function compactJoiner(lines: Line[], markers = Markers): string {
return lines
.map(({ tokens: { description } }: Line) => description.trim())
.filter((description) => description !== '')
.join(' ');
}
const lineNo = (num: number, { tokens }: Line, i: number) =>
tokens.type === '' ? num : i;
const getDescription = ({ tokens }: Line) =>
(tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +
tokens.description;
function preserveJoiner(lines: Line[], markers = Markers): string {
if (lines.length === 0) return '';
// skip the opening line with no description
if (
lines[0].tokens.description === '' &&
lines[0].tokens.delimiter === markers.start
)
lines = lines.slice(1);
// skip the closing line with no description
const lastLine = lines[lines.length - 1];
if (
lastLine !== undefined &&
lastLine.tokens.description === '' &&
lastLine.tokens.end.endsWith(markers.end)
)
lines = lines.slice(0, -1);
// description starts at the last line of type definition
lines = lines.slice(lines.reduce(lineNo, 0));
return lines.map(getDescription).join('\n');
}

View File

@@ -0,0 +1,8 @@
import { Spec } from '../../primitives.js';
/**
* Splits `spect.lines[].token.description` into other tokens,
* and populates the spec.{tag, name, type, description}. Invoked in a chaing
* with other tokens, operations listed above can be moved to separate tokenizers
*/
export type Tokenizer = (spec: Spec) => Spec;

View File

@@ -0,0 +1,112 @@
import { Spec, Line } from '../../primitives.js';
import { splitSpace, isSpace } from '../../util.js';
import { Tokenizer } from './index.js';
const isQuoted = (s: string) => s && s.startsWith('"') && s.endsWith('"');
/**
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
* and populates the `spec.name`
*/
export default function nameTokenizer(): Tokenizer {
const typeEnd = (num: number, { tokens }: Line, i: number) =>
tokens.type === '' ? num : i;
return (spec: Spec): Spec => {
// look for the name in the line where {type} ends
const { tokens } = spec.source[spec.source.reduce(typeEnd, 0)];
const source = tokens.description.trimLeft();
const quotedGroups = source.split('"');
// if it starts with quoted group, assume it is a literal
if (
quotedGroups.length > 1 &&
quotedGroups[0] === '' &&
quotedGroups.length % 2 === 1
) {
spec.name = quotedGroups[1];
tokens.name = `"${quotedGroups[1]}"`;
[tokens.postName, tokens.description] = splitSpace(
source.slice(tokens.name.length)
);
return spec;
}
let brackets = 0;
let name = '';
let optional = false;
let defaultValue: string;
// assume name is non-space string or anything wrapped into brackets
for (const ch of source) {
if (brackets === 0 && isSpace(ch)) break;
if (ch === '[') brackets++;
if (ch === ']') brackets--;
name += ch;
}
if (brackets !== 0) {
spec.problems.push({
code: 'spec:name:unpaired-brackets',
message: 'unpaired brackets',
line: spec.source[0].number,
critical: true,
});
return spec;
}
const nameToken = name;
if (name[0] === '[' && name[name.length - 1] === ']') {
optional = true;
name = name.slice(1, -1);
const parts = name.split('=');
name = parts[0].trim();
if (parts[1] !== undefined)
defaultValue = parts.slice(1).join('=').trim();
if (name === '') {
spec.problems.push({
code: 'spec:name:empty-name',
message: 'empty name',
line: spec.source[0].number,
critical: true,
});
return spec;
}
if (defaultValue === '') {
spec.problems.push({
code: 'spec:name:empty-default',
message: 'empty default value',
line: spec.source[0].number,
critical: true,
});
return spec;
}
// has "=" and is not a string, except for "=>"
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
spec.problems.push({
code: 'spec:name:invalid-default',
message: 'invalid default value syntax',
line: spec.source[0].number,
critical: true,
});
return spec;
}
}
spec.optional = optional;
spec.name = name;
tokens.name = nameToken;
if (defaultValue !== undefined) spec.default = defaultValue;
[tokens.postName, tokens.description] = splitSpace(
source.slice(tokens.name.length)
);
return spec;
};
}

View File

@@ -0,0 +1,30 @@
import { Spec } from '../../primitives.js';
import { Tokenizer } from './index.js';
/**
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
* and populates `spec.tag`
*/
export default function tagTokenizer(): Tokenizer {
return (spec: Spec): Spec => {
const { tokens } = spec.source[0];
const match = tokens.description.match(/\s*(@(\S+))(\s*)/);
if (match === null) {
spec.problems.push({
code: 'spec:tag:prefix',
message: 'tag should start with "@" symbol',
line: spec.source[0].number,
critical: true,
});
return spec;
}
tokens.tag = match[1];
tokens.postTag = match[3];
tokens.description = tokens.description.slice(match[0].length);
spec.tag = match[2];
return spec;
};
}

View File

@@ -0,0 +1,91 @@
import { Spec, Tokens } from '../../primitives.js';
import { splitSpace } from '../../util.js';
import { Tokenizer } from './index.js';
/**
* Joiner is a function taking collected type token string parts,
* and joining them together. In most of the cases this will be
* a single piece like {type-name}, but type may go over multipe line
* ```
* @tag {function(
* number,
* string
* )}
* ```
*/
export type Joiner = (parts: string[]) => string;
/**
* Shortcut for standard Joiners
* compact - trim surrounding space, replace line breaks with a single space
* preserve - concat as is
*/
export type Spacing = 'compact' | 'preserve' | Joiner;
/**
* Sets splits remaining `Spec.lines[].tokes.description` into `type` and `description`
* tokens and populates Spec.type`
*
* @param {Spacing} spacing tells how to deal with a whitespace
* for type values going over multiple lines
*/
export default function typeTokenizer(spacing: Spacing = 'compact'): Tokenizer {
const join = getJoiner(spacing);
return (spec: Spec): Spec => {
let curlies = 0;
let lines: [Tokens, string][] = [];
for (const [i, { tokens }] of spec.source.entries()) {
let type = '';
if (i === 0 && tokens.description[0] !== '{') return spec;
for (const ch of tokens.description) {
if (ch === '{') curlies++;
if (ch === '}') curlies--;
type += ch;
if (curlies === 0) break;
}
lines.push([tokens, type]);
if (curlies === 0) break;
}
if (curlies !== 0) {
spec.problems.push({
code: 'spec:type:unpaired-curlies',
message: 'unpaired curlies',
line: spec.source[0].number,
critical: true,
});
return spec;
}
const parts: string[] = [];
const offset = lines[0][0].postDelimiter.length;
for (const [i, [tokens, type]] of lines.entries()) {
tokens.type = type;
if (i > 0) {
tokens.type = tokens.postDelimiter.slice(offset) + type;
tokens.postDelimiter = tokens.postDelimiter.slice(0, offset);
}
[tokens.postType, tokens.description] = splitSpace(
tokens.description.slice(type.length)
);
parts.push(tokens.type);
}
parts[0] = parts[0].slice(1);
parts[parts.length - 1] = parts[parts.length - 1].slice(0, -1);
spec.type = join(parts);
return spec;
};
}
const trim = (x: string) => x.trim();
function getJoiner(spacing: Spacing): Joiner {
if (spacing === 'compact') return (t: string[]) => t.map(trim).join('');
else if (spacing === 'preserve') return (t: string[]) => t.join('\n');
else return spacing;
}

69
node_modules/comment-parser/src/primitives.ts generated vendored Normal file
View File

@@ -0,0 +1,69 @@
/** @deprecated */
export enum Markers {
start = '/**',
nostart = '/***',
delim = '*',
end = '*/',
}
export interface BlockMarkers {
start: string;
nostart: string;
delim: string;
end: string;
}
export interface Block {
description: string;
tags: Spec[];
source: Line[];
problems: Problem[];
}
export interface Spec {
tag: string;
name: string;
default?: string;
type: string;
optional: boolean;
description: string;
problems: Problem[];
source: Line[];
}
export interface Line {
number: number;
source: string;
tokens: Tokens;
}
export interface Tokens {
start: string;
delimiter: string;
postDelimiter: string;
tag: string;
postTag: string;
name: string;
postName: string;
type: string;
postType: string;
description: string;
end: string;
lineEnd: string;
}
export interface Problem {
code:
| 'unhandled'
| 'custom'
| 'source:startline'
| 'spec:tag:prefix'
| 'spec:type:unpaired-curlies'
| 'spec:name:unpaired-brackets'
| 'spec:name:empty-name'
| 'spec:name:invalid-default'
| 'spec:name:empty-default';
message: string;
line: number;
critical: boolean;
}

25
node_modules/comment-parser/src/stringifier/index.ts generated vendored Normal file
View File

@@ -0,0 +1,25 @@
import { Block, Tokens } from '../primitives.js';
export type Stringifier = (block: Block) => string;
function join(tokens: Tokens): string {
return (
tokens.start +
tokens.delimiter +
tokens.postDelimiter +
tokens.tag +
tokens.postTag +
tokens.type +
tokens.postType +
tokens.name +
tokens.postName +
tokens.description +
tokens.end +
tokens.lineEnd
);
}
export default function getStringifier(): Stringifier {
return (block: Block): string =>
block.source.map(({ tokens }) => join(tokens)).join('\n');
}

68
node_modules/comment-parser/src/stringifier/inspect.ts generated vendored Normal file
View File

@@ -0,0 +1,68 @@
import { Block, Tokens } from '../primitives.js';
import { isSpace } from '../util.js';
interface Width {
line: number;
start: number;
delimiter: number;
postDelimiter: number;
tag: number;
postTag: number;
name: number;
postName: number;
type: number;
postType: number;
description: number;
end: number;
lineEnd: number;
}
const zeroWidth = {
line: 0,
start: 0,
delimiter: 0,
postDelimiter: 0,
tag: 0,
postTag: 0,
name: 0,
postName: 0,
type: 0,
postType: 0,
description: 0,
end: 0,
lineEnd: 0,
};
const headers = { lineEnd: 'CR' };
const fields = Object.keys(zeroWidth);
const repr = (x: string) => (isSpace(x) ? `{${x.length}}` : x);
const frame = (line: string[]) => '|' + line.join('|') + '|';
const align = (width: Width, tokens: Tokens): string[] =>
Object.keys(tokens).map((k) => repr(tokens[k]).padEnd(width[k]));
export default function inspect({ source }: Block): string {
if (source.length === 0) return '';
const width: Width = { ...zeroWidth };
for (const f of fields) width[f] = (headers[f] ?? f).length;
for (const { number, tokens } of source) {
width.line = Math.max(width.line, number.toString().length);
for (const k in tokens)
width[k] = Math.max(width[k], repr(tokens[k]).length);
}
const lines: string[][] = [[], []];
for (const f of fields) lines[0].push((headers[f] ?? f).padEnd(width[f]));
for (const f of fields) lines[1].push('-'.padEnd(width[f], '-'));
for (const { number, tokens } of source) {
const line = number.toString().padStart(width.line);
lines.push([line, ...align(width, tokens)]);
}
return lines.map(frame).join('\n');
}

109
node_modules/comment-parser/src/transforms/align.ts generated vendored Normal file
View File

@@ -0,0 +1,109 @@
import { Transform } from './index.js';
import { BlockMarkers, Block, Line, Markers } from '../primitives.js';
import { rewireSource } from '../util.js';
interface Width {
start: number;
tag: number;
type: number;
name: number;
}
const zeroWidth = {
start: 0,
tag: 0,
type: 0,
name: 0,
};
const getWidth =
(markers = Markers) =>
(w: Width, { tokens: t }: Line) => ({
start: t.delimiter === markers.start ? t.start.length : w.start,
tag: Math.max(w.tag, t.tag.length),
type: Math.max(w.type, t.type.length),
name: Math.max(w.name, t.name.length),
});
const space = (len: number) => ''.padStart(len, ' ');
export default function align(markers = Markers): Transform {
let intoTags = false;
let w: Width;
function update(line: Line): Line {
const tokens = { ...line.tokens };
if (tokens.tag !== '') intoTags = true;
const isEmpty =
tokens.tag === '' &&
tokens.name === '' &&
tokens.type === '' &&
tokens.description === '';
// dangling '*/'
if (tokens.end === markers.end && isEmpty) {
tokens.start = space(w.start + 1);
return { ...line, tokens };
}
switch (tokens.delimiter) {
case markers.start:
tokens.start = space(w.start);
break;
case markers.delim:
tokens.start = space(w.start + 1);
break;
default:
tokens.delimiter = '';
tokens.start = space(w.start + 2); // compensate delimiter
}
if (!intoTags) {
tokens.postDelimiter = tokens.description === '' ? '' : ' ';
return { ...line, tokens };
}
const nothingAfter = {
delim: false,
tag: false,
type: false,
name: false,
};
if (tokens.description === '') {
nothingAfter.name = true;
tokens.postName = '';
if (tokens.name === '') {
nothingAfter.type = true;
tokens.postType = '';
if (tokens.type === '') {
nothingAfter.tag = true;
tokens.postTag = '';
if (tokens.tag === '') {
nothingAfter.delim = true;
}
}
}
}
tokens.postDelimiter = nothingAfter.delim ? '' : ' ';
if (!nothingAfter.tag)
tokens.postTag = space(w.tag - tokens.tag.length + 1);
if (!nothingAfter.type)
tokens.postType = space(w.type - tokens.type.length + 1);
if (!nothingAfter.name)
tokens.postName = space(w.name - tokens.name.length + 1);
return { ...line, tokens };
}
return ({ source, ...fields }: Block): Block => {
w = source.reduce(getWidth(markers), { ...zeroWidth });
return rewireSource({ ...fields, source: source.map(update) });
};
}

31
node_modules/comment-parser/src/transforms/crlf.ts generated vendored Normal file
View File

@@ -0,0 +1,31 @@
import { Transform } from './index.js';
import { Block, Line } from '../primitives.js';
import { rewireSource } from '../util.js';
const order = [
'end',
'description',
'postType',
'type',
'postName',
'name',
'postTag',
'tag',
'postDelimiter',
'delimiter',
'start',
];
export type Ending = 'LF' | 'CRLF';
export default function crlf(ending: Ending): Transform {
function update(line: Line): Line {
return {
...line,
tokens: { ...line.tokens, lineEnd: ending === 'LF' ? '' : '\r' },
};
}
return ({ source, ...fields }: Block): Block =>
rewireSource({ ...fields, source: source.map(update) });
}

28
node_modules/comment-parser/src/transforms/indent.ts generated vendored Normal file
View File

@@ -0,0 +1,28 @@
import { Transform } from './index.js';
import { Block, Line } from '../primitives.js';
import { rewireSource } from '../util.js';
const pull = (offset: number) => (str) => str.slice(offset);
const push = (offset: number) => {
const space = ''.padStart(offset, ' ');
return (str) => str + space;
};
export default function indent(pos: number): Transform {
let shift: (string: string) => string;
const pad = (start: string) => {
if (shift === undefined) {
const offset = pos - start.length;
shift = offset > 0 ? push(offset) : pull(-offset);
}
return shift(start);
};
const update = (line: Line): Line => ({
...line,
tokens: { ...line.tokens, start: pad(line.tokens.start) },
});
return ({ source, ...fields }: Block): Block =>
rewireSource({ ...fields, source: source.map(update) });
}

8
node_modules/comment-parser/src/transforms/index.ts generated vendored Normal file
View File

@@ -0,0 +1,8 @@
import { Block } from '../primitives.js';
export type Transform = (Block: Block) => Block;
export function flow(...transforms: Transform[]): Transform {
return (block: Block): Block =>
transforms.reduce((block, t) => t(block), block);
}

99
node_modules/comment-parser/src/util.ts generated vendored Normal file
View File

@@ -0,0 +1,99 @@
import { Block, Tokens, Spec, Line } from './primitives.js';
export function isSpace(source: string): boolean {
return /^\s+$/.test(source);
}
export function hasCR(source: string): boolean {
return /\r$/.test(source);
}
export function splitCR(source: string): [string, string] {
const matches = source.match(/\r+$/);
return matches == null
? ['', source]
: [source.slice(-matches[0].length), source.slice(0, -matches[0].length)];
}
export function splitSpace(source: string): [string, string] {
const matches = source.match(/^\s+/);
return matches == null
? ['', source]
: [source.slice(0, matches[0].length), source.slice(matches[0].length)];
}
export function splitLines(source: string): string[] {
return source.split(/\n/);
}
export function seedBlock(block: Partial<Block> = {}): Block {
return {
description: '',
tags: [],
source: [],
problems: [],
...block,
};
}
export function seedSpec(spec: Partial<Spec> = {}): Spec {
return {
tag: '',
name: '',
type: '',
optional: false,
description: '',
problems: [],
source: [],
...spec,
};
}
export function seedTokens(tokens: Partial<Tokens> = {}): Tokens {
return {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '',
lineEnd: '',
...tokens,
};
}
/**
* Assures Block.tags[].source contains references to the Block.source items,
* using Block.source as a source of truth. This is a counterpart of rewireSpecs
* @param block parsed coments block
*/
export function rewireSource(block: Block): Block {
const source = block.source.reduce(
(acc, line) => acc.set(line.number, line),
new Map<number, Line>()
);
for (const spec of block.tags) {
spec.source = spec.source.map((line) => source.get(line.number));
}
return block;
}
/**
* Assures Block.source contains references to the Block.tags[].source items,
* using Block.tags[].source as a source of truth. This is a counterpart of rewireSource
* @param block parsed coments block
*/
export function rewireSpecs(block: Block): Block {
const source = block.tags.reduce(
(acc, spec) =>
spec.source.reduce((acc, line) => acc.set(line.number, line), acc),
new Map<number, Line>()
);
block.source = block.source.map((line) => source.get(line.number) || line);
return block;
}