Initial import with skill sheet working

This commit is contained in:
2024-12-04 00:11:23 +01:00
commit 9050c80ab4
4488 changed files with 671048 additions and 0 deletions

View File

@ -0,0 +1,24 @@
import { Line } from '../primitives.js';
/**
* Groups source lines in sections representing tags.
* First section is a block description if present. Last section captures lines starting with
* the last tag to the end of the block, including dangling closing marker.
* @param {Line[]} block souce lines making a single comment block
*/
export type Parser = (block: Line[]) => Line[][];
/**
* Predicate telling if string contains opening/closing escaping sequence
* @param {string} source raw source line
*/
export type Fencer = (source: string) => boolean;
/**
* `Parser` configuration options
*/
export interface Options {
fence: string | Fencer;
}
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
export default function getParser({ fence, }?: Partial<Options>): Parser;

29
node_modules/comment-parser/es6/parser/block-parser.js generated vendored Normal file
View File

@ -0,0 +1,29 @@
const reTag = /^@\S+/;
/**
* Creates configured `Parser`
* @param {Partial<Options>} options
*/
export default function getParser({ fence = '```', } = {}) {
const fencer = getFencer(fence);
const toggleFence = (source, isFenced) => fencer(source) ? !isFenced : isFenced;
return function parseBlock(source) {
// start with description section
const sections = [[]];
let isFenced = false;
for (const line of source) {
if (reTag.test(line.tokens.description) && !isFenced) {
sections.push([line]);
}
else {
sections[sections.length - 1].push(line);
}
isFenced = toggleFence(line.tokens.description, isFenced);
}
return sections;
};
}
function getFencer(fence) {
if (typeof fence === 'string')
return (source) => source.split(fence).length % 2 === 0;
return fence;
}

11
node_modules/comment-parser/es6/parser/index.d.ts generated vendored Normal file
View File

@ -0,0 +1,11 @@
import { Block, BlockMarkers } from '../primitives.js';
import { Tokenizer } from './tokenizers/index.js';
export interface Options {
startLine: number;
fence: string;
spacing: 'compact' | 'preserve';
markers: BlockMarkers;
tokenizers: Tokenizer[];
}
export type Parser = (source: string) => Block[];
export default function getParser({ startLine, fence, spacing, markers, tokenizers, }?: Partial<Options>): Parser;

39
node_modules/comment-parser/es6/parser/index.js generated vendored Normal file
View File

@ -0,0 +1,39 @@
import { Markers } from '../primitives.js';
import { splitLines } from '../util.js';
import blockParser from './block-parser.js';
import sourceParser from './source-parser.js';
import specParser from './spec-parser.js';
import tokenizeTag from './tokenizers/tag.js';
import tokenizeType from './tokenizers/type.js';
import tokenizeName from './tokenizers/name.js';
import tokenizeDescription, { getJoiner as getDescriptionJoiner, } from './tokenizers/description.js';
export default function getParser({ startLine = 0, fence = '```', spacing = 'compact', markers = Markers, tokenizers = [
tokenizeTag(),
tokenizeType(spacing),
tokenizeName(),
tokenizeDescription(spacing),
], } = {}) {
if (startLine < 0 || startLine % 1 > 0)
throw new Error('Invalid startLine');
const parseSource = sourceParser({ startLine, markers });
const parseBlock = blockParser({ fence });
const parseSpec = specParser({ tokenizers });
const joinDescription = getDescriptionJoiner(spacing);
return function (source) {
const blocks = [];
for (const line of splitLines(source)) {
const lines = parseSource(line);
if (lines === null)
continue;
const sections = parseBlock(lines);
const specs = sections.slice(1).map(parseSpec);
blocks.push({
description: joinDescription(sections[0], markers),
tags: specs,
source: lines,
problems: specs.reduce((acc, spec) => acc.concat(spec.problems), []),
});
}
return blocks;
};
}

View File

@ -0,0 +1,7 @@
import { Line, BlockMarkers } from '../primitives.js';
export interface Options {
startLine: number;
markers: BlockMarkers;
}
export type Parser = (source: string) => Line[] | null;
export default function getParser({ startLine, markers, }?: Partial<Options>): Parser;

View File

@ -0,0 +1,46 @@
import { Markers } from '../primitives.js';
import { seedTokens, splitSpace, splitCR } from '../util.js';
export default function getParser({ startLine = 0, markers = Markers, } = {}) {
let block = null;
let num = startLine;
return function parseSource(source) {
let rest = source;
const tokens = seedTokens();
[tokens.lineEnd, rest] = splitCR(rest);
[tokens.start, rest] = splitSpace(rest);
if (block === null &&
rest.startsWith(markers.start) &&
!rest.startsWith(markers.nostart)) {
block = [];
tokens.delimiter = rest.slice(0, markers.start.length);
rest = rest.slice(markers.start.length);
[tokens.postDelimiter, rest] = splitSpace(rest);
}
if (block === null) {
num++;
return null;
}
const isClosed = rest.trimRight().endsWith(markers.end);
if (tokens.delimiter === '' &&
rest.startsWith(markers.delim) &&
!rest.startsWith(markers.end)) {
tokens.delimiter = markers.delim;
rest = rest.slice(markers.delim.length);
[tokens.postDelimiter, rest] = splitSpace(rest);
}
if (isClosed) {
const trimmed = rest.trimRight();
tokens.end = rest.slice(trimmed.length - markers.end.length);
rest = trimmed.slice(0, -markers.end.length);
}
tokens.description = rest;
block.push({ number: num, source, tokens });
num++;
if (isClosed) {
const result = block.slice();
block = null;
return result;
}
return null;
};
}

View File

@ -0,0 +1,7 @@
import { Line, Spec } from '../primitives.js';
import { Tokenizer } from './tokenizers/index.js';
export type Parser = (source: Line[]) => Spec;
export interface Options {
tokenizers: Tokenizer[];
}
export default function getParser({ tokenizers }: Options): Parser;

13
node_modules/comment-parser/es6/parser/spec-parser.js generated vendored Normal file
View File

@ -0,0 +1,13 @@
import { seedSpec } from '../util.js';
export default function getParser({ tokenizers }) {
return function parseSpec(source) {
var _a;
let spec = seedSpec({ source });
for (const tokenize of tokenizers) {
spec = tokenize(spec);
if ((_a = spec.problems[spec.problems.length - 1]) === null || _a === void 0 ? void 0 : _a.critical)
break;
}
return spec;
};
}

View File

@ -0,0 +1,20 @@
import { Line, BlockMarkers, Markers } from '../../primitives.js';
import { Tokenizer } from './index.js';
/**
* Walks over provided lines joining description token into a single string.
* */
export type Joiner = (lines: Line[], markers?: BlockMarkers) => string;
/**
* Shortcut for standard Joiners
* compact - strip surrounding whitespace and concat lines using a single string
* preserve - preserves original whitespace and line breaks as is
*/
export type Spacing = 'compact' | 'preserve' | Joiner;
/**
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
* following given spacing srtategy
* @param {Spacing} spacing tells how to handle the whitespace
* @param {BlockMarkers} markers tells how to handle comment block delimitation
*/
export default function descriptionTokenizer(spacing?: Spacing, markers?: typeof Markers): Tokenizer;
export declare function getJoiner(spacing: Spacing): Joiner;

View File

@ -0,0 +1,47 @@
import { Markers } from '../../primitives.js';
/**
* Makes no changes to `spec.lines[].tokens` but joins them into `spec.description`
* following given spacing srtategy
* @param {Spacing} spacing tells how to handle the whitespace
* @param {BlockMarkers} markers tells how to handle comment block delimitation
*/
export default function descriptionTokenizer(spacing = 'compact', markers = Markers) {
const join = getJoiner(spacing);
return (spec) => {
spec.description = join(spec.source, markers);
return spec;
};
}
export function getJoiner(spacing) {
if (spacing === 'compact')
return compactJoiner;
if (spacing === 'preserve')
return preserveJoiner;
return spacing;
}
function compactJoiner(lines, markers = Markers) {
return lines
.map(({ tokens: { description } }) => description.trim())
.filter((description) => description !== '')
.join(' ');
}
const lineNo = (num, { tokens }, i) => tokens.type === '' ? num : i;
const getDescription = ({ tokens }) => (tokens.delimiter === '' ? tokens.start : tokens.postDelimiter.slice(1)) +
tokens.description;
function preserveJoiner(lines, markers = Markers) {
if (lines.length === 0)
return '';
// skip the opening line with no description
if (lines[0].tokens.description === '' &&
lines[0].tokens.delimiter === markers.start)
lines = lines.slice(1);
// skip the closing line with no description
const lastLine = lines[lines.length - 1];
if (lastLine !== undefined &&
lastLine.tokens.description === '' &&
lastLine.tokens.end.endsWith(markers.end))
lines = lines.slice(0, -1);
// description starts at the last line of type definition
lines = lines.slice(lines.reduce(lineNo, 0));
return lines.map(getDescription).join('\n');
}

View File

@ -0,0 +1,7 @@
import { Spec } from '../../primitives.js';
/**
* Splits `spect.lines[].token.description` into other tokens,
* and populates the spec.{tag, name, type, description}. Invoked in a chaing
* with other tokens, operations listed above can be moved to separate tokenizers
*/
export type Tokenizer = (spec: Spec) => Spec;

View File

@ -0,0 +1 @@
export {};

View File

@ -0,0 +1,6 @@
import { Tokenizer } from './index.js';
/**
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
* and populates the `spec.name`
*/
export default function nameTokenizer(): Tokenizer;

View File

@ -0,0 +1,91 @@
import { splitSpace, isSpace } from '../../util.js';
const isQuoted = (s) => s && s.startsWith('"') && s.endsWith('"');
/**
* Splits remaining `spec.lines[].tokens.description` into `name` and `descriptions` tokens,
* and populates the `spec.name`
*/
export default function nameTokenizer() {
const typeEnd = (num, { tokens }, i) => tokens.type === '' ? num : i;
return (spec) => {
// look for the name in the line where {type} ends
const { tokens } = spec.source[spec.source.reduce(typeEnd, 0)];
const source = tokens.description.trimLeft();
const quotedGroups = source.split('"');
// if it starts with quoted group, assume it is a literal
if (quotedGroups.length > 1 &&
quotedGroups[0] === '' &&
quotedGroups.length % 2 === 1) {
spec.name = quotedGroups[1];
tokens.name = `"${quotedGroups[1]}"`;
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
return spec;
}
let brackets = 0;
let name = '';
let optional = false;
let defaultValue;
// assume name is non-space string or anything wrapped into brackets
for (const ch of source) {
if (brackets === 0 && isSpace(ch))
break;
if (ch === '[')
brackets++;
if (ch === ']')
brackets--;
name += ch;
}
if (brackets !== 0) {
spec.problems.push({
code: 'spec:name:unpaired-brackets',
message: 'unpaired brackets',
line: spec.source[0].number,
critical: true,
});
return spec;
}
const nameToken = name;
if (name[0] === '[' && name[name.length - 1] === ']') {
optional = true;
name = name.slice(1, -1);
const parts = name.split('=');
name = parts[0].trim();
if (parts[1] !== undefined)
defaultValue = parts.slice(1).join('=').trim();
if (name === '') {
spec.problems.push({
code: 'spec:name:empty-name',
message: 'empty name',
line: spec.source[0].number,
critical: true,
});
return spec;
}
if (defaultValue === '') {
spec.problems.push({
code: 'spec:name:empty-default',
message: 'empty default value',
line: spec.source[0].number,
critical: true,
});
return spec;
}
// has "=" and is not a string, except for "=>"
if (!isQuoted(defaultValue) && /=(?!>)/.test(defaultValue)) {
spec.problems.push({
code: 'spec:name:invalid-default',
message: 'invalid default value syntax',
line: spec.source[0].number,
critical: true,
});
return spec;
}
}
spec.optional = optional;
spec.name = name;
tokens.name = nameToken;
if (defaultValue !== undefined)
spec.default = defaultValue;
[tokens.postName, tokens.description] = splitSpace(source.slice(tokens.name.length));
return spec;
};
}

View File

@ -0,0 +1,6 @@
import { Tokenizer } from './index.js';
/**
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
* and populates `spec.tag`
*/
export default function tagTokenizer(): Tokenizer;

View File

@ -0,0 +1,24 @@
/**
* Splits the `@prefix` from remaining `Spec.lines[].token.description` into the `tag` token,
* and populates `spec.tag`
*/
export default function tagTokenizer() {
return (spec) => {
const { tokens } = spec.source[0];
const match = tokens.description.match(/\s*(@(\S+))(\s*)/);
if (match === null) {
spec.problems.push({
code: 'spec:tag:prefix',
message: 'tag should start with "@" symbol',
line: spec.source[0].number,
critical: true,
});
return spec;
}
tokens.tag = match[1];
tokens.postTag = match[3];
tokens.description = tokens.description.slice(match[0].length);
spec.tag = match[2];
return spec;
};
}

View File

@ -0,0 +1,27 @@
import { Tokenizer } from './index.js';
/**
* Joiner is a function taking collected type token string parts,
* and joining them together. In most of the cases this will be
* a single piece like {type-name}, but type may go over multipe line
* ```
* @tag {function(
* number,
* string
* )}
* ```
*/
export type Joiner = (parts: string[]) => string;
/**
* Shortcut for standard Joiners
* compact - trim surrounding space, replace line breaks with a single space
* preserve - concat as is
*/
export type Spacing = 'compact' | 'preserve' | Joiner;
/**
* Sets splits remaining `Spec.lines[].tokes.description` into `type` and `description`
* tokens and populates Spec.type`
*
* @param {Spacing} spacing tells how to deal with a whitespace
* for type values going over multiple lines
*/
export default function typeTokenizer(spacing?: Spacing): Tokenizer;

View File

@ -0,0 +1,65 @@
import { splitSpace } from '../../util.js';
/**
* Sets splits remaining `Spec.lines[].tokes.description` into `type` and `description`
* tokens and populates Spec.type`
*
* @param {Spacing} spacing tells how to deal with a whitespace
* for type values going over multiple lines
*/
export default function typeTokenizer(spacing = 'compact') {
const join = getJoiner(spacing);
return (spec) => {
let curlies = 0;
let lines = [];
for (const [i, { tokens }] of spec.source.entries()) {
let type = '';
if (i === 0 && tokens.description[0] !== '{')
return spec;
for (const ch of tokens.description) {
if (ch === '{')
curlies++;
if (ch === '}')
curlies--;
type += ch;
if (curlies === 0)
break;
}
lines.push([tokens, type]);
if (curlies === 0)
break;
}
if (curlies !== 0) {
spec.problems.push({
code: 'spec:type:unpaired-curlies',
message: 'unpaired curlies',
line: spec.source[0].number,
critical: true,
});
return spec;
}
const parts = [];
const offset = lines[0][0].postDelimiter.length;
for (const [i, [tokens, type]] of lines.entries()) {
tokens.type = type;
if (i > 0) {
tokens.type = tokens.postDelimiter.slice(offset) + type;
tokens.postDelimiter = tokens.postDelimiter.slice(0, offset);
}
[tokens.postType, tokens.description] = splitSpace(tokens.description.slice(type.length));
parts.push(tokens.type);
}
parts[0] = parts[0].slice(1);
parts[parts.length - 1] = parts[parts.length - 1].slice(0, -1);
spec.type = join(parts);
return spec;
};
}
const trim = (x) => x.trim();
function getJoiner(spacing) {
if (spacing === 'compact')
return (t) => t.map(trim).join('');
else if (spacing === 'preserve')
return (t) => t.join('\n');
else
return spacing;
}