Initial import with skill sheet working

This commit is contained in:
2024-12-04 00:11:23 +01:00
commit 9050c80ab4
4488 changed files with 671048 additions and 0 deletions

244
node_modules/comment-parser/tests/e2e/examples.js generated vendored Normal file
View File

@ -0,0 +1,244 @@
// This file is a source for playground examples.
// Examples integrity is smoke-checked with examples.spec.js
function parse_defaults(source, parse, stringify, transforms) {
// Invoking parser with default config covers the most genearic cases.
// Note how /*** and /* blocks are ignored
/** One-liner */
/** @some-tag {someType} someName */
/**
* Description may go
* over multiple lines followed by @tags
* @param {string} name the name parameter
* @param {any} value the value parameter
*/
/*** ignored */
/* ignored */
const parsed = parse(source);
const stringified = parsed.map((block) => stringify(block));
}
function parse_line_numbering(source, parse, stringify, transforms) {
// Note, line numbers are off by 5 from what you see in editor
//
// Try changeing start line back to 0, or omit the option
// parse(source, {startLine: 0}) -- default
// parse(source, {startLine: 5}) -- enforce alternative start number
/**
* Description may go
* over multiple lines followed by @tags
* @param {string} name the name parameter
* @param {any} value the value parameter
*/
const parsed = parse(source, { startLine: 5 });
const stringified = parsed[0].tags
.map((tag) => `line ${tag.source[0].number + 1} : @${tag.tag} ${tag.name}`)
.join('\n');
}
function parse_spacing(source, parse, stringify, transforms) {
// Note, when spacing option is set to 'compact' or omited, tag and block descriptions are collapsed to look like a single sentense.
//
// Try changeing it to 'preserve' or defining custom function
// parse(source, {spacing: 'compact'}) -- default
// parse(source, {spacing: 'preserve'}) -- preserve spaces and line breaks
// parse(source, {spacing: lines => lines
// .map(tokens => tokens.description.trim())
// .filter(description => description !== '')
// .join(' ');
// }) -- mimic 'compact' implementation
/**
* Description may go
* over multiple lines followed by
* @param {string} name the name parameter
* with multiline description
* @param {function(
* number,
* string
* )} options the options
*/
const parsed = parse(source, { spacing: 'preserve' });
const stringified = parsed[0].tags
.map((tag) => `@${tag.tag} - ${tag.description}\n\n${tag.type}`)
.join('\n----\n');
}
function parse_escaping(source, parse, stringify, transforms) {
// Note, @decorator is not parsed as another tag because block is wrapped into ###.
//
// Try setting alternative escape sequence
// parse(source, {fence: '```'}) -- default
// parse(source, {fence: '###'}) -- update source correspondingly
/**
* @example "some code"
###
@decorator
function hello() {
// do something
}
###
*/
const parsed = parse(source, { fence: '###' });
const stringified = parsed[0].tags
.map((tag) => `@${tag.tag} - ${tag.description}`)
.join('\n');
}
function stringify_formatting(source, parse, stringify, transforms) {
// stringify preserves exact formatting by default, but you can transform parsing result first
// transform = align() -- align name, type, and description
// transform = flow(align(), indent(4)) -- align, then place the block's opening marker at pos 4
/**
* Description may go
* over multiple lines followed by @tags
* @param {string} name the name parameter
* @param {any} value the value parameter
*/
const { flow, align, indent } = transforms;
const transform = flow(align(), indent(4));
const parsed = parse(source);
const stringified = stringify(transform(parsed[0]));
}
function parse_source_exploration(source, parse, stringify, transforms) {
// parse() produces Block[].source keeping accurate track of origin source
/**
* Description may go
* over multiple lines followed by @tags
* @param {string} name the name parameter
* @param {any} value the value parameter
*/
const parsed = parse(source);
const summary = ({ source }) => ({
source: source
.map(
({ tokens }) =>
tokens.start +
tokens.delimiter +
tokens.postDelimiter +
tokens.tag +
tokens.postTag +
tokens.type +
tokens.postType +
tokens.name +
tokens.postName +
tokens.description +
tokens.end
)
.join('\n'),
start: {
line: source[0].number + 1,
column: source[0].tokens.start.length,
},
end: {
line: source[source.length - 1].number + 1,
column: source[source.length - 1].source.length,
},
});
const pos = (p) => p.line + ':' + p.column;
const stringified = parsed[0].tags
.map(summary)
.map((s) => `${pos(s.start)} - ${pos(s.end)}\n${s.source}`);
}
function parse_advanced_parsing(source, parse, _, _, tokenizers) {
// Each '@tag ...' section results into a Spec. The Spec is computed by
// the chain of tokenizers each contributing a change to the the Spec.* and the Spec.tags[].tokens.
// Default parse() options come with stadart tokenizers:
// {
// ...,
// spacing = 'compact',
// tokenizers = [
// tokenizers.tag(),
// tokenizers.type(spacing),
// tokenizers.name(),
// tokenizers.description(spacing),
// ]
// }
// You can reorder those, or even replace any with a custom function (spec: Spec) => Spec
// This example allows to parse "@tag description" comments
/**
* @arg0 my parameter
* @arg1
* another parameter
* with a strange formatting
*/
const parsed = parse(source, {
tokenizers: [tokenizers.tag(), tokenizers.description('preserve')],
});
const stringified = parsed[0].tags
.map((tag) => `@${tag.tag} - ${tag.description}`)
.join('\n');
}
function stringify_rename(source, parse, stringify, transforms) {
// You can do any manipulations with the parsed result
// See how each block is being mapped. If you are updating a Block.source
// then rewireSource(block) should be called on each changed block.
// If changes were made to Block.tags[].source then call rewireSpecs(block).
// This example shows how you can "rename" @param tags: value1 -> value11, value2 -> value22
/**
* Description may go
* over multiple lines followed by @tags
* @param {string} name the name parameter
* @param {any} value1 first value parameter
* with a multipline description
* @param {any} value2 second value parameter
*/
function renameParam(from, to) {
return (block) => {
for (const tag of block.tags) {
if (tag.tag === 'param' && tag.name === from) {
tag.name = to;
for (const line of tag.source) {
if (line.tokens.name === from) line.tokens.name = to;
}
}
}
return block;
};
}
const transform = transforms.flow(
renameParam('value1', 'value11'),
renameParam('value2', 'value22'),
stringify
);
const parsed = parse(source);
const stringified = parsed.map(transform);
}
(typeof window === 'undefined' ? module.exports : window).examples = [
parse_defaults,
parse_line_numbering,
parse_escaping,
parse_spacing,
parse_source_exploration,
parse_advanced_parsing,
stringify_formatting,
stringify_rename,
];

16
node_modules/comment-parser/tests/e2e/examples.spec.js generated vendored Normal file
View File

@ -0,0 +1,16 @@
const {
parse,
stringify,
transforms,
tokenizers,
} = require('../../lib/index.cjs');
const { examples } = require('./examples');
const table = examples.map((fn) => [fn.name.replace(/_/g, ' '), fn]);
test.each(table)('example - %s', (name, fn) => {
const source = fn.toString();
expect(() =>
fn(source, parse, stringify, transforms, tokenizers)
).not.toThrow();
});

View File

@ -0,0 +1,49 @@
const { parse, inspect } = require('../../lib/index.cjs');
const source = `
/**
* Typedef with multi-line property type.
*
* @typedef {object} MyType
* @property {function(
* number,
* {x:string}
* )} numberEater Method
* which takes a number.
*/`;
test('default', () => {
const parsed = parse(source);
// console.log(inspect(parsed[0]));
expect(parsed[0].tags[1]).toMatchObject({
tag: 'property',
type: 'function(number,{x:string})',
name: 'numberEater',
description: 'Method which takes a number.',
problems: [],
});
});
test('preserve', () => {
const parsed = parse(source, { spacing: 'preserve' });
// console.log(inspect(parsed[0]));
expect(parsed[0].tags[1]).toMatchObject({
tag: 'property',
type: 'function(\n number,\n {x:string}\n)',
name: 'numberEater',
description: 'Method\n which takes a number.',
problems: [],
});
});
test('compact', () => {
const parsed = parse(source, { spacing: 'compact' });
// console.log(inspect(parsed[0]));
expect(parsed[0].tags[1]).toMatchObject({
tag: 'property',
type: 'function(number,{x:string})',
name: 'numberEater',
description: 'Method which takes a number.',
problems: [],
});
});

View File

@ -0,0 +1,20 @@
const { parse, inspect } = require('../../lib/index.cjs');
const source = `
/**
* @param {Function} [processor=data => data] A function to run
*/`;
test('default', () => {
const parsed = parse(source);
// console.log(inspect(parsed[0]));
expect(parsed[0].problems).toEqual([]);
expect(parsed[0].tags[0]).toMatchObject({
name: 'processor',
default: 'data => data',
optional: true,
description: 'A function to run',
problems: [],
});
});

View File

@ -0,0 +1,23 @@
const { parse, inspect } = require('../../lib/index.cjs');
const source = `
/** Multi-line typedef for an options object type.
*
* @typedef {{
* prop: number
* }} MyOptions description text
*/`;
test('name after multiline tag', () => {
const parsed = parse(source);
// console.log(inspect(parsed[0]));
expect(parsed[0].problems).toEqual([]);
expect(parsed[0].tags[0]).toMatchObject({
tag: 'typedef',
name: 'MyOptions',
type: '{prop: number}',
description: 'description text',
problems: [],
});
});

View File

@ -0,0 +1,29 @@
const {
parse,
stringify,
transforms: { align },
} = require('../../lib/index.cjs');
test('align - ignore trailing right space', () => {
const source = `
/**
* Description may go
* over multiple lines followed by @tags
* @param {string} name
* @param {any} value the value parameter
*/`;
const expected = `
/**
* Description may go
* over multiple lines followed by @tags
* @param {string} name
* @param {any} value the value parameter
*/`.slice(1);
const parsed = parse(source);
const aligned = align()(parsed[0]);
const stringified = stringify(aligned);
expect(stringified).toEqual(expected);
});

View File

@ -0,0 +1,29 @@
const {
parse,
stringify,
transforms: { align },
} = require('../../lib/index.cjs');
test('align - collapse postDelim', () => {
const source = `
/**
* Description may go
* over multiple lines followed by @tags
* @param {string} name the name parameter
* @param {any} value the value parameter
*/`.slice(1);
const expected = `
/**
* Description may go
* over multiple lines followed by @tags
* @param {string} name the name parameter
* @param {any} value the value parameter
*/`.slice(1);
const parsed = parse(source);
const aligned = align()(parsed[0]);
const stringified = stringify(aligned);
expect(stringified).toEqual(expected);
});

105
node_modules/comment-parser/tests/e2e/issue-121.spec.js generated vendored Normal file
View File

@ -0,0 +1,105 @@
const { parse, inspect } = require('../../lib/index.cjs');
test('name cut off', () => {
const source = `
/**
* @param {{includeWhiteSpace: (boolean|undefined),
* ignoreElementOrder: (boolean|undefined)}} [options] The options.
*/`.slice(1);
const tagSource = [
{
number: 1,
source: ' * @param {{includeWhiteSpace: (boolean|undefined),',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@param',
postTag: ' ',
type: '{{includeWhiteSpace: (boolean|undefined),',
postType: '',
name: '',
postName: '',
description: '',
end: '',
lineEnd: '',
},
},
{
number: 2,
source:
' * ignoreElementOrder: (boolean|undefined)}} [options] The options.',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '',
postTag: '',
type: ' ignoreElementOrder: (boolean|undefined)}}',
postType: ' ',
name: '[options]',
postName: ' ',
description: 'The options.',
end: '',
lineEnd: '',
},
},
{
number: 3,
source: ' */',
tokens: {
start: ' ',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
type: '',
postType: '',
name: '',
postName: '',
description: '',
end: '*/',
lineEnd: '',
},
},
];
const parsed = parse(source);
// console.log(inspect(parsed[0]));
expect(parsed[0]).toMatchObject({
problems: [],
tags: [
{
tag: 'param',
type: '{includeWhiteSpace: (boolean|undefined),ignoreElementOrder: (boolean|undefined)}',
name: 'options',
optional: true,
description: 'The options.',
source: tagSource,
},
],
source: [
{
number: 0,
source: ' /**',
tokens: {
start: ' ',
delimiter: '/**',
postDelimiter: '',
tag: '',
postTag: '',
type: '',
postType: '',
name: '',
postName: '',
description: '',
end: '',
lineEnd: '',
},
},
...tagSource,
],
});
});

143
node_modules/comment-parser/tests/e2e/issue-129.spec.js generated vendored Normal file
View File

@ -0,0 +1,143 @@
const {
parse,
inspect,
stringify,
transforms: { align },
} = require('../../lib/index.cjs');
const tokens = {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
type: '',
postType: '',
name: '',
postName: '',
description: '',
end: '',
lineEnd: '',
};
test('carriage returns', () => {
const parsed = parse(`
/**
* description\r
* @param0 {param-type}\r
* @param1 {param-type} paramName param description\r
*/`);
const source = [
{
number: 1,
source: ' /**',
tokens: {
...tokens,
start: ' ',
delimiter: '/**',
},
},
{
number: 2,
source: ' * description\r',
tokens: {
...tokens,
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: 'description',
lineEnd: '\r',
},
},
{
number: 3,
source: ' * @param0 {param-type}\r',
tokens: {
...tokens,
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@param0',
postTag: ' ',
type: '{param-type}',
lineEnd: '\r',
},
},
{
number: 4,
source: ' * @param1 {param-type} paramName param description\r',
tokens: {
...tokens,
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@param1',
postTag: ' ',
type: '{param-type}',
postType: ' ',
name: 'paramName',
postName: ' ',
description: 'param description',
lineEnd: '\r',
},
},
{
number: 5,
source: ' */',
tokens: {
...tokens,
start: ' ',
end: '*/',
},
},
];
expect(parsed[0]).toMatchObject({
description: 'description',
problems: [],
source,
tags: [
{
tag: 'param0',
type: 'param-type',
name: '',
optional: false,
description: '',
source: [source[2]],
},
{
tag: 'param1',
type: 'param-type',
name: 'paramName',
optional: false,
description: 'param description',
source: [source[3], source[4]],
},
],
});
});
test('carriage returns with alignment', () => {
const source = `
/**\r
* Description may go\r
* over multiple lines followed by @tags\r
* @param {string} name the name parameter\r
* @param {any} value\r
*/\r`.slice(1);
const expected = `
/**\r
* Description may go\r
* over multiple lines followed by @tags\r
* @param {string} name the name parameter\r
* @param {any} value\r
*/\r`.slice(1);
const parsed = parse(source);
const aligned = align()(parsed[0]);
const stringified = stringify(aligned);
expect(stringified).toEqual(expected);
});

362
node_modules/comment-parser/tests/e2e/issue-41.spec.js generated vendored Normal file
View File

@ -0,0 +1,362 @@
const { default: getParser } = require('../../lib/parser/index.cjs');
test('quoted name', () => {
const parsed = getParser()(`
/**
* @section "Brand Colors" - Here you can find all the brand colors...
*/`);
expect(parsed).toEqual([
{
description: '',
tags: [
{
tag: 'section',
name: 'Brand Colors',
type: '',
optional: false,
description: '- Here you can find all the brand colors...',
problems: [],
source: [
{
number: 2,
source:
' * @section "Brand Colors" - Here you can find all the brand colors...',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@section',
postTag: ' ',
name: '"Brand Colors"',
postName: ' ',
type: '',
postType: '',
description: '- Here you can find all the brand colors...',
end: '',
lineEnd: '',
},
},
{
number: 3,
source: ' */',
tokens: {
start: ' ',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '*/',
lineEnd: '',
},
},
],
},
],
source: [
{
number: 1,
source: ' /**',
tokens: {
start: ' ',
delimiter: '/**',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '',
lineEnd: '',
},
},
{
number: 2,
source:
' * @section "Brand Colors" - Here you can find all the brand colors...',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@section',
postTag: ' ',
name: '"Brand Colors"',
postName: ' ',
type: '',
postType: '',
description: '- Here you can find all the brand colors...',
end: '',
lineEnd: '',
},
},
{
number: 3,
source: ' */',
tokens: {
start: ' ',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '*/',
lineEnd: '',
},
},
],
problems: [],
},
]);
});
test('optional name', () => {
const parsed = getParser()(`
/**
* @section [Brand Colors] - Here you can find all the brand colors...
*/`);
expect(parsed).toEqual([
{
description: '',
tags: [
{
tag: 'section',
name: 'Brand Colors',
type: '',
optional: true,
description: '- Here you can find all the brand colors...',
problems: [],
source: [
{
number: 2,
source:
' * @section [Brand Colors] - Here you can find all the brand colors...',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@section',
postTag: ' ',
name: '[Brand Colors]',
postName: ' ',
type: '',
postType: '',
description: '- Here you can find all the brand colors...',
end: '',
lineEnd: '',
},
},
{
number: 3,
source: ' */',
tokens: {
start: ' ',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '*/',
lineEnd: '',
},
},
],
},
],
source: [
{
number: 1,
source: ' /**',
tokens: {
start: ' ',
delimiter: '/**',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '',
lineEnd: '',
},
},
{
number: 2,
source:
' * @section [Brand Colors] - Here you can find all the brand colors...',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@section',
postTag: ' ',
name: '[Brand Colors]',
postName: ' ',
type: '',
postType: '',
description: '- Here you can find all the brand colors...',
end: '',
lineEnd: '',
},
},
{
number: 3,
source: ' */',
tokens: {
start: ' ',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '*/',
lineEnd: '',
},
},
],
problems: [],
},
]);
});
test('inconsistent quotes', () => {
const parsed = getParser()(`
/**
* @section "Brand Colors - Here you can find all the brand colors...
*/`);
expect(parsed).toEqual([
{
description: '',
tags: [
{
tag: 'section',
name: '"Brand',
type: '',
optional: false,
description: 'Colors - Here you can find all the brand colors...',
problems: [],
source: [
{
number: 2,
source:
' * @section "Brand Colors - Here you can find all the brand colors...',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@section',
postTag: ' ',
name: '"Brand',
postName: ' ',
type: '',
postType: '',
description:
'Colors - Here you can find all the brand colors...',
end: '',
lineEnd: '',
},
},
{
number: 3,
source: ' */',
tokens: {
start: ' ',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '*/',
lineEnd: '',
},
},
],
},
],
source: [
{
number: 1,
source: ' /**',
tokens: {
start: ' ',
delimiter: '/**',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '',
lineEnd: '',
},
},
{
number: 2,
source:
' * @section "Brand Colors - Here you can find all the brand colors...',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@section',
postTag: ' ',
name: '"Brand',
postName: ' ',
type: '',
postType: '',
description: 'Colors - Here you can find all the brand colors...',
end: '',
lineEnd: '',
},
},
{
number: 3,
source: ' */',
tokens: {
start: ' ',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '*/',
lineEnd: '',
},
},
],
problems: [],
},
]);
});

581
node_modules/comment-parser/tests/e2e/issue-61.spec.js generated vendored Normal file
View File

@ -0,0 +1,581 @@
const { default: getParser } = require('../../lib/parser/index.cjs');
test('fenced description', () => {
const parsed = getParser({ spacing: 'preserve' })(`
/**
* @example "" \`\`\`ts
@transient()
class Foo { }
\`\`\`
*/`);
const source = [
{
number: 1,
source: ' /**',
tokens: {
start: ' ',
delimiter: '/**',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '',
lineEnd: '',
},
},
{
number: 2,
source: ' * @example "" ```ts',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@example',
postTag: ' ',
name: '""',
postName: ' ',
type: '',
postType: '',
description: '```ts',
end: '',
lineEnd: '',
},
},
{
number: 3,
source: '@transient()',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '@transient()',
end: '',
lineEnd: '',
},
},
{
number: 4,
source: 'class Foo { }',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: 'class Foo { }',
end: '',
lineEnd: '',
},
},
{
number: 5,
source: '```',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '```',
end: '',
lineEnd: '',
},
},
{
number: 6,
source: ' */',
tokens: {
start: ' ',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '*/',
lineEnd: '',
},
},
];
expect(parsed).toEqual([
{
description: '',
tags: [
{
tag: 'example',
name: '',
type: '',
optional: false,
description: '```ts\n@transient()\nclass Foo { }\n```',
problems: [],
source: source.slice(1),
},
],
source,
problems: [],
},
]);
});
test('fenced one-liner', () => {
const parsed = getParser({ spacing: 'preserve' })(
'/** @example "" ```ts @transient() class Foo { } ```*/'
);
const source = [
{
number: 0,
source: '/** @example "" ```ts @transient() class Foo { } ```*/',
tokens: {
start: '',
delimiter: '/**',
postDelimiter: ' ',
tag: '@example',
postTag: ' ',
name: '""',
postName: ' ',
type: '',
postType: '',
description: '```ts @transient() class Foo { } ```',
end: '*/',
lineEnd: '',
},
},
];
expect(parsed).toEqual([
{
description: '',
tags: [
{
tag: 'example',
name: '',
type: '',
optional: false,
description: '```ts @transient() class Foo { } ```',
problems: [],
source,
},
],
source,
problems: [],
},
]);
});
test('multiple fences', () => {
const parsed = getParser({ spacing: 'preserve' })(`
/**
* @example "" \`\`\`ts
@one
\`\`\`
text
\`\`\`
@two
\`\`\`
*/`);
const source = [
{
number: 1,
source: ' /**',
tokens: {
start: ' ',
delimiter: '/**',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '',
lineEnd: '',
},
},
{
number: 2,
source: ' * @example "" ```ts',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@example',
postTag: ' ',
name: '""',
postName: ' ',
type: '',
postType: '',
description: '```ts',
end: '',
lineEnd: '',
},
},
{
number: 3,
source: '@one',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '@one',
end: '',
lineEnd: '',
},
},
{
number: 4,
source: '```',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '```',
end: '',
lineEnd: '',
},
},
{
number: 5,
source: 'text',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: 'text',
end: '',
lineEnd: '',
},
},
{
number: 6,
source: '```',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '```',
end: '',
lineEnd: '',
},
},
{
number: 7,
source: '@two',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '@two',
end: '',
lineEnd: '',
},
},
{
number: 8,
source: '```',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '```',
end: '',
lineEnd: '',
},
},
{
number: 9,
source: ' */',
tokens: {
start: ' ',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '*/',
lineEnd: '',
},
},
];
expect(parsed).toEqual([
{
description: '',
tags: [
{
tag: 'example',
name: '',
type: '',
optional: false,
description: '```ts\n@one\n```\ntext\n```\n@two\n```',
source: source.slice(1),
problems: [],
},
],
source,
problems: [],
},
]);
});
test('custom fences', () => {
const parsed = getParser({ spacing: 'preserve', fence: '###' })(`
/**
* @example "" ###ts
@one
###
text
###
@two
###
*/`);
const source = [
{
number: 1,
source: ' /**',
tokens: {
start: ' ',
delimiter: '/**',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '',
lineEnd: '',
},
},
{
number: 2,
source: ' * @example "" ###ts',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@example',
postTag: ' ',
name: '""',
postName: ' ',
type: '',
postType: '',
description: '###ts',
end: '',
lineEnd: '',
},
},
{
number: 3,
source: '@one',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '@one',
end: '',
lineEnd: '',
},
},
{
number: 4,
source: '###',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '###',
end: '',
lineEnd: '',
},
},
{
number: 5,
source: 'text',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: 'text',
end: '',
lineEnd: '',
},
},
{
number: 6,
source: '###',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '###',
end: '',
lineEnd: '',
},
},
{
number: 7,
source: '@two',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '@two',
end: '',
lineEnd: '',
},
},
{
number: 8,
source: '###',
tokens: {
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '###',
end: '',
lineEnd: '',
},
},
{
number: 9,
source: ' */',
tokens: {
start: ' ',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '*/',
lineEnd: '',
},
},
];
expect(parsed).toEqual([
{
description: '',
tags: [
{
tag: 'example',
name: '',
type: '',
optional: false,
description: '###ts\n@one\n###\ntext\n###\n@two\n###',
source: source.slice(1),
problems: [],
},
],
source,
problems: [],
},
]);
});

1840
node_modules/comment-parser/tests/e2e/parse.spec.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,13 @@
const { parse, stringify } = require('../../lib/index.cjs');
test('preserve formatting', () => {
const source = `
/**
* @my-tag {my.type} my-name description line 1
description line 2
* description line 3
*/`;
const parsed = parse(source);
const out = stringify(parsed[0]);
expect(out).toBe(source.slice(1));
});

View File

@ -0,0 +1,32 @@
const {
parse,
stringify,
transforms: { flow, indent, align },
} = require('../../lib/index.cjs');
test('align + indent', () => {
const source = `
/**
* Description may go
* over multiple lines followed by @tags
*
* @my-tag {my.type} my-name description line 1
description line 2
* description line 3
*/`;
const expected = `
/**
* Description may go
* over multiple lines followed by @tags
*
* @my-tag {my.type} my-name description line 1
description line 2
* description line 3
*/`;
const parsed = parse(source);
const transform = flow(indent(4), align());
const out = stringify(transform(parsed[0]));
expect(out).toBe(expected.slice(1));
});

View File

@ -0,0 +1,169 @@
import getParser from '../../src/parser/block-parser.js';
import { Line } from '../../src/primitives.js';
import { seedTokens } from '../../src/util.js';
let source: Line[];
beforeEach(() => {
source = [
{
number: 1,
source: ' /**',
tokens: seedTokens({
start: ' ',
delimiter: '/**',
postDelimiter: '',
description: '',
end: '',
}),
},
{
number: 2,
source: ' * description 0',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: 'description 0',
end: '',
}),
},
{
number: 3,
source: ' *',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: '',
description: '',
end: '',
}),
},
{
number: 4,
source: ' * description 1',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: 'description 1',
end: '',
}),
},
{
number: 5,
source: ' *',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: '',
description: '',
end: '',
}),
},
{
number: 6,
source: ' * @param {string} value value description 0',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: '@param {string} value value description 0',
end: '',
}),
},
{
number: 7,
source: ' ```',
tokens: seedTokens({
start: ' ',
delimiter: '',
postDelimiter: '',
description: '```',
end: '',
}),
},
{
number: 8,
source: ' @sample code',
tokens: seedTokens({
start: ' ',
delimiter: '',
postDelimiter: '',
description: '@sample code',
end: '',
}),
},
{
number: 9,
source: ' ```',
tokens: seedTokens({
start: ' ',
delimiter: '',
postDelimiter: '',
description: '```',
end: '',
}),
},
{
number: 10,
source: ' * value description 1',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: 'value description 1',
end: '',
}),
},
{
number: 11,
source: ' */',
tokens: seedTokens({
start: ' ',
delimiter: '',
postDelimiter: '',
description: '',
end: '*/',
}),
},
];
});
test('standard fences', () => {
const parser = getParser();
const groups: Line[][] = parser(source);
expect(groups.length).toBe(2);
expect(groups).toEqual([source.slice(0, 5), source.slice(5)]);
});
test('custom fence', () => {
source = source.map((line) => {
line.tokens.description = line.tokens.description.replace('```', '###');
return line;
});
const parser = getParser({ fence: '###' });
const groups: Line[][] = parser(source);
expect(groups.length).toBe(2);
expect(groups).toEqual([source.slice(0, 5), source.slice(5)]);
});
test('fence function', () => {
source = source.map((line) => {
line.tokens.description = line.tokens.description.replace('```', '###');
return line;
});
function isFenced(source: string) {
return source.split('###').length % 2 === 0;
}
const parser = getParser({ fence: isFenced });
const groups: Line[][] = parser(source);
expect(groups.length).toBe(2);
expect(groups).toEqual([source.slice(0, 5), source.slice(5)]);
});

43
node_modules/comment-parser/tests/unit/inspect.spec.ts generated vendored Normal file
View File

@ -0,0 +1,43 @@
import getParser from '../../src/parser/index.js';
import inspect from '../../src/stringifier/inspect.js';
import { seedBlock } from '../../src/util.js';
test('multiple lines', () => {
const source = `
/**
* Description may go\r\r
* over few lines followed by @tags\r
* @param {string} name name parameter
* @param {any} value value of any type
*/`.slice(1);
const parsed = getParser()(source);
const expected = `
|line|start|delimiter|postDelimiter|tag |postTag|name |postName|type |postType|description |end|CR |
|----|-----|---------|-------------|------|-------|-----|--------|--------|--------|--------------------------------|---|---|
| 0|{2} |/** | | | | | | | | | | |
| 1|{3} |* |{1} | | | | | | |Description may go | |{2}|
| 2|{3} |* |{1} | | | | | | |over few lines followed by @tags| |{1}|
| 3|{3} |* |{1} |@param|{1} |name |{1} |{string}|{1} |name parameter | | |
| 4|{3} |* |{1} |@param|{1} |value|{1} |{any} |{1} |value of any type | | |
| 5|{3} | | | | | | | | | |*/ | |`;
expect(inspect(parsed[0])).toEqual(expected.slice(1));
});
test('single line', () => {
const source = '/** @param {string} name name parameter */';
const parsed = getParser({ startLine: 12345 })(source);
const expected = `
|line |start|delimiter|postDelimiter|tag |postTag|name|postName|type |postType|description |end|CR|
|-----|-----|---------|-------------|------|-------|----|--------|--------|--------|---------------|---|--|
|12345| |/** |{1} |@param|{1} |name|{1} |{string}|{1} |name parameter |*/ | |`;
expect(inspect(parsed[0])).toEqual(expected.slice(1));
});
test('empty', () => {
const expected = '';
expect(inspect(seedBlock())).toEqual(expected.slice(1));
});

298
node_modules/comment-parser/tests/unit/parser.spec.ts generated vendored Normal file
View File

@ -0,0 +1,298 @@
import getParser from '../../src/parser/index.js';
import { seedTokens } from '../../src/util.js';
test('block with tags', () => {
const parsed = getParser()(`
/**
* Description may go\x20
* over few lines followed by @tags
* @param {string} name name parameter
*
* @param {any} value value of any type
*/`);
expect(parsed).toEqual([
{
description: 'Description may go over few lines followed by @tags',
tags: [
{
tag: 'param',
name: 'name',
type: 'string',
optional: false,
description: 'name parameter',
problems: [],
source: [
{
number: 4,
source: ' * @param {string} name name parameter',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@param',
postTag: ' ',
name: 'name',
postName: ' ',
type: '{string}',
postType: ' ',
description: 'name parameter',
}),
},
{
number: 5,
source: ' *',
tokens: seedTokens({
start: ' ',
delimiter: '*',
}),
},
],
},
{
tag: 'param',
name: 'value',
type: 'any',
optional: false,
description: 'value of any type',
problems: [],
source: [
{
number: 6,
source: ' * @param {any} value value of any type',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@param',
postTag: ' ',
name: 'value',
postName: ' ',
type: '{any}',
postType: ' ',
description: 'value of any type',
}),
},
{
number: 7,
source: ' */',
tokens: seedTokens({
start: ' ',
end: '*/',
}),
},
],
},
],
source: [
{
number: 1,
source: ' /**',
tokens: seedTokens({
start: ' ',
delimiter: '/**',
}),
},
{
number: 2,
source: ' * Description may go ',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: 'Description may go ',
}),
},
{
number: 3,
source: ' * over few lines followed by @tags',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: 'over few lines followed by @tags',
}),
},
{
number: 4,
source: ' * @param {string} name name parameter',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@param',
postTag: ' ',
name: 'name',
postName: ' ',
type: '{string}',
postType: ' ',
description: 'name parameter',
end: '',
}),
},
{
number: 5,
source: ' *',
tokens: seedTokens({
start: ' ',
delimiter: '*',
}),
},
{
number: 6,
source: ' * @param {any} value value of any type',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '@param',
postTag: ' ',
name: 'value',
postName: ' ',
type: '{any}',
postType: ' ',
description: 'value of any type',
}),
},
{
number: 7,
source: ' */',
tokens: seedTokens({
start: ' ',
end: '*/',
}),
},
],
problems: [],
},
]);
});
test('no source cloning', () => {
const parsed = getParser()(`
/**
* Description may go\x20
* over few lines followed by @tags
* @param {string} name name parameter
*
* @param {any} value value of any type
*/`);
expect(parsed[0].tags[0].source[0] === parsed[0].source[3]).toBe(true);
});
test('empty multi-line block', () => {
const parsed = getParser()(`
/**
*
*/`);
expect(parsed).toEqual([
{
description: '',
tags: [],
source: [
{
number: 1,
source: ' /**',
tokens: {
delimiter: '/**',
description: '',
end: '',
lineEnd: '',
name: '',
postDelimiter: '',
postName: '',
postTag: '',
postType: '',
start: ' ',
tag: '',
type: '',
},
},
{
number: 2,
source: ' *',
tokens: {
delimiter: '*',
description: '',
end: '',
lineEnd: '',
name: '',
postDelimiter: '',
postName: '',
postTag: '',
postType: '',
start: ' ',
tag: '',
type: '',
},
},
{
number: 3,
source: ' */',
tokens: {
delimiter: '',
description: '',
end: '*/',
lineEnd: '',
name: '',
postDelimiter: '',
postName: '',
postTag: '',
postType: '',
start: ' ',
tag: '',
type: '',
},
},
],
problems: [],
},
]);
});
test('empty one-line block', () => {
const parsed = getParser()(`
/** */`);
expect(parsed).toEqual([
{
description: '',
tags: [],
source: [
{
number: 1,
source: ' /** */',
tokens: {
delimiter: '/**',
description: '',
end: '*/',
lineEnd: '',
name: '',
postDelimiter: ' ',
postName: '',
postTag: '',
postType: '',
start: ' ',
tag: '',
type: '',
},
},
],
problems: [],
},
]);
});
test.each([
['one-star', '/*\n*\n*/'],
['three-star', '/***\n*\n*/'],
['one-star oneliner', '/* */'],
['three-star oneliner', '/*** */'],
])('skip block - %s', (name, source) => {
expect(getParser()(source)).toEqual([]);
});
test.each([
['negative', -1],
['float', 1.5],
])('invalid start line - %s', (name, startLine) => {
expect(() => getParser({ startLine })).toThrow('Invalid startLine');
});

View File

@ -0,0 +1,438 @@
import getParser, { Parser } from '../../src/parser/source-parser.js';
import { Line } from '../../src/primitives.js';
import { splitLines, seedBlock, seedTokens } from '../../src/util.js';
let _parse: Parser;
const nulls = (n: number): null[] => Array(n).fill(null);
const parse = (source: string): Array<Line[] | null> =>
splitLines(source).map(_parse);
beforeEach(() => {
_parse = getParser();
});
test('multi-line block', () => {
const parsed = parse(`
/**
* description 0
*
* description 1
*
* @param {string} value value description 0
\`\`\`
@sample code
\`\`\`
* description 1
*/`);
const block = [
{
number: 1,
source: ' /**',
tokens: seedTokens({
start: ' ',
delimiter: '/**',
postDelimiter: '',
description: '',
end: '',
}),
},
{
number: 2,
source: ' * description 0',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: 'description 0',
end: '',
}),
},
{
number: 3,
source: ' *',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: '',
description: '',
end: '',
}),
},
{
number: 4,
source: ' * description 1',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: 'description 1',
end: '',
}),
},
{
number: 5,
source: ' *',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: '',
description: '',
end: '',
}),
},
{
number: 6,
source: ' * @param {string} value value description 0',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: '@param {string} value value description 0',
end: '',
}),
},
{
number: 7,
source: ' ```',
tokens: seedTokens({
start: ' ',
delimiter: '',
postDelimiter: '',
description: '```',
end: '',
}),
},
{
number: 8,
source: ' @sample code',
tokens: seedTokens({
start: ' ',
delimiter: '',
postDelimiter: '',
description: '@sample code',
end: '',
}),
},
{
number: 9,
source: ' ```',
tokens: seedTokens({
start: ' ',
delimiter: '',
postDelimiter: '',
description: '```',
end: '',
}),
},
{
number: 10,
source: ' * description 1',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: 'description 1',
end: '',
}),
},
{
number: 11,
source: ' */',
tokens: seedTokens({
start: ' ',
delimiter: '',
postDelimiter: '',
description: '',
end: '*/',
}),
},
];
expect(parsed).toEqual([...nulls(11), block]);
});
test('one-line block', () => {
const parsed = parse(`
/** description */
`);
const block = [
{
number: 1,
source: ' /** description */',
tokens: seedTokens({
start: ' ',
delimiter: '/**',
postDelimiter: ' ',
description: 'description ',
end: '*/',
}),
},
];
expect(parsed).toEqual([null, block, null]);
});
test('multiple blocks', () => {
const parsed = parse(`
/** description 0 */
/** description 1 */
`);
const block0 = [
{
number: 1,
source: ' /** description 0 */',
tokens: seedTokens({
start: ' ',
delimiter: '/**',
postDelimiter: ' ',
description: 'description 0 ',
end: '*/',
}),
},
];
const block1 = [
{
number: 3,
source: ' /** description 1 */',
tokens: seedTokens({
start: ' ',
delimiter: '/**',
postDelimiter: ' ',
description: 'description 1 ',
end: '*/',
}),
},
];
expect(parsed).toEqual([null, block0, null, block1, null]);
});
test('start line number', () => {
const source = splitLines(`
/** description */`);
const parsed = source.map(getParser({ startLine: 5 }));
const block = [
{
number: 6,
source: ' /** description */',
tokens: seedTokens({
start: ' ',
delimiter: '/**',
postDelimiter: ' ',
description: 'description ',
end: '*/',
}),
},
];
expect(parsed).toEqual([null, block]);
});
test('carriage returns', () => {
const source = splitLines(
['/**', ' * description', ' *', ' */', ''].join('\r\n')
);
const parse = getParser();
const parsed = source.map(parse);
const block = [
{
number: 0,
source: '/**\r',
tokens: seedTokens({
delimiter: '/**',
lineEnd: '\r',
}),
},
{
number: 1,
source: ' * description\r',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: 'description',
lineEnd: '\r',
}),
},
{
number: 2,
source: ' *\r',
tokens: seedTokens({
start: ' ',
delimiter: '*',
lineEnd: '\r',
}),
},
{
number: 3,
source: ' */\r',
tokens: seedTokens({
start: ' ',
end: '*/',
lineEnd: '\r',
}),
},
];
expect(parsed).toEqual([...nulls(3), block, null]);
});
test('custom markers', () => {
_parse = getParser({
markers: {
start: '////',
nostart: '// ',
delim: '///',
end: '////',
},
});
const parsed = parse(`
////
/// description 0
///
/// description 1
///
/// @param {string} value value description 0
\`\`\`
@sample code
\`\`\`
/// description 1
////`);
const block = [
{
number: 1,
source: ' ////',
tokens: seedTokens({
start: ' ',
delimiter: '////',
postDelimiter: '',
description: '',
end: '',
}),
},
{
number: 2,
source: ' /// description 0',
tokens: seedTokens({
start: ' ',
delimiter: '///',
postDelimiter: ' ',
description: 'description 0',
end: '',
}),
},
{
number: 3,
source: ' ///',
tokens: seedTokens({
start: ' ',
delimiter: '///',
postDelimiter: '',
description: '',
end: '',
}),
},
{
number: 4,
source: ' /// description 1',
tokens: seedTokens({
start: ' ',
delimiter: '///',
postDelimiter: ' ',
description: 'description 1',
end: '',
}),
},
{
number: 5,
source: ' ///',
tokens: seedTokens({
start: ' ',
delimiter: '///',
postDelimiter: '',
description: '',
end: '',
}),
},
{
number: 6,
source: ' /// @param {string} value value description 0',
tokens: seedTokens({
start: ' ',
delimiter: '///',
postDelimiter: ' ',
description: '@param {string} value value description 0',
end: '',
}),
},
{
number: 7,
source: ' ```',
tokens: seedTokens({
start: ' ',
delimiter: '',
postDelimiter: '',
description: '```',
end: '',
}),
},
{
number: 8,
source: ' @sample code',
tokens: seedTokens({
start: ' ',
delimiter: '',
postDelimiter: '',
description: '@sample code',
end: '',
}),
},
{
number: 9,
source: ' ```',
tokens: seedTokens({
start: ' ',
delimiter: '',
postDelimiter: '',
description: '```',
end: '',
}),
},
{
number: 10,
source: ' /// description 1',
tokens: seedTokens({
start: ' ',
delimiter: '///',
postDelimiter: ' ',
description: 'description 1',
end: '',
}),
},
{
number: 11,
source: ' ////',
tokens: seedTokens({
start: ' ',
delimiter: '',
postDelimiter: '',
description: '',
end: '////',
}),
},
];
expect(parsed).toEqual([...nulls(11), block]);
});

View File

@ -0,0 +1,120 @@
import { getJoiner } from '../../src/parser/tokenizers/description.js';
import { Line } from '../../src/primitives.js';
import { seedTokens } from '../../src/util.js';
const source: Line[] = [
{
number: 1,
source: ' /**',
tokens: seedTokens({
start: ' ',
delimiter: '/**',
}),
},
{
number: 2,
source: ' * ',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
}),
},
{
number: 3,
source: ' * Description first line\twith\ttabs ',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: 'Description first line\twith\ttabs ',
}),
},
{
number: 4,
source: ' * second line ',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: 'second line ',
}),
},
{
number: 5,
source: ' * ',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
}),
},
{
number: 6,
source: ' * third line ',
tokens: seedTokens({
start: ' ',
delimiter: '*',
postDelimiter: ' ',
description: 'third line ',
}),
},
{
number: 7,
source: ' */',
tokens: seedTokens({
start: ' ',
end: '*/',
}),
},
];
test('compact', () => {
const joined = getJoiner('compact')(source);
expect(joined).toBe(
'Description first line\twith\ttabs second line third line'
);
});
test('preserve', () => {
const joined = getJoiner('preserve')(source);
expect(joined).toBe(
' \n Description first line\twith\ttabs \n second line \n \n third line '
);
});
test('preserve - empty', () => {
const joined = getJoiner('preserve')([]);
expect(joined).toBe('');
});
test('preserve - no delimiter', () => {
const joined = getJoiner('preserve')([
{
number: 1,
source: '...',
tokens: seedTokens({
start: ' ',
delimiter: '',
postDelimiter: '',
description: 'line with no delimiter',
}),
},
]);
expect(joined).toBe(' line with no delimiter');
});
test('custom', () => {
const spacerFn = (source: Line[]) =>
source
.map(({ tokens: { description } }) =>
description.replace(/\s+/g, ' ').trim().toUpperCase()
)
.filter((s) => s !== '')
.join(' ');
const joined = getJoiner(spacerFn)(source);
expect(joined).toBe(
'DESCRIPTION FIRST LINE WITH TABS SECOND LINE THIRD LINE'
);
});

View File

@ -0,0 +1,227 @@
import descriptionTokenizer from '../../src/parser/tokenizers/description.js';
import { seedSpec, seedTokens } from '../../src/util.js';
const sourceSingle = [
{
number: 1,
source: '...',
tokens: seedTokens({ description: ' one two ' }),
},
];
const sourceMultiple = [
{
number: 1,
source: '...',
tokens: seedTokens({ description: 'one two ' }),
},
{
number: 2,
source: '...',
tokens: seedTokens({ description: '' }),
},
{
number: 3,
source: '...',
tokens: seedTokens({ description: ' three four' }),
},
{
number: 4,
source: '...',
tokens: seedTokens({ description: '' }),
},
];
test('compact - single line', () => {
const tokenize = descriptionTokenizer('compact');
const input = seedSpec({ source: sourceSingle });
const output = seedSpec({ source: sourceSingle, description: 'one two' });
expect(tokenize(input)).toEqual(output);
});
test('compact - multiple lines', () => {
const tokenize = descriptionTokenizer('compact');
const input = seedSpec({ source: sourceMultiple });
const output = seedSpec({
source: sourceMultiple,
description: 'one two three four',
});
expect(tokenize(input)).toEqual(output);
});
test('preserve - multiple lines', () => {
const tokenize = descriptionTokenizer('preserve');
const input = seedSpec({ source: sourceMultiple });
const output = seedSpec({
source: sourceMultiple,
description: 'one two \n\n three four\n',
});
expect(tokenize(input)).toEqual(output);
});
test('preserve - one-liner', () => {
const tokenize = descriptionTokenizer('preserve');
const input = seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
delimiter: '/**',
postDelimiter: ' ',
description: 'description',
end: '*/',
}),
},
],
});
const output = seedSpec({
description: 'description',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
delimiter: '/**',
postDelimiter: ' ',
description: 'description',
end: '*/',
}),
},
],
});
expect(tokenize(input)).toEqual(output);
});
test('preserve - leading empty lines', () => {
const source = [
{
number: 1,
source: '...',
tokens: seedTokens({ delimiter: '/**' }),
},
{
number: 2,
source: '...',
tokens: seedTokens(),
},
{
number: 3,
source: '...',
tokens: seedTokens({ description: ' line 1 ' }),
},
{
number: 4,
source: '...',
tokens: seedTokens({ description: ' line 2 ' }),
},
{
number: 5,
source: '...',
tokens: seedTokens({ description: '' }),
},
];
const tokenize = descriptionTokenizer('preserve');
const input = seedSpec({ source });
const output = seedSpec({
source,
description: '\n line 1 \n line 2 \n',
});
expect(tokenize(input)).toEqual(output);
});
test('preserve - leading type lines', () => {
const source = [
{
number: 1,
source: '...',
tokens: seedTokens({ delimiter: '/**' }),
},
{
number: 2,
source: '...',
tokens: seedTokens(),
},
{
number: 3,
source: '...',
tokens: seedTokens({ type: '{function(' }),
},
{
number: 4,
source: '...',
tokens: seedTokens({ type: ' number' }),
},
{
number: 5,
source: '...',
tokens: seedTokens({
type: ')}',
postType: ' ',
description: 'line 1 ',
}),
},
{
number: 6,
source: '...',
tokens: seedTokens({ description: ' line 2 ' }),
},
{
number: 7,
source: '...',
tokens: seedTokens({ description: '' }),
},
];
const tokenize = descriptionTokenizer('preserve');
const input = seedSpec({ source });
const output = seedSpec({
source,
description: 'line 1 \n line 2 \n',
});
expect(tokenize(input)).toEqual(output);
});
test('custom joiner - single line', () => {
const tokenize = descriptionTokenizer((lines) => {
return lines
.reduce((str, { tokens: { description } }) => {
const trimmed = description.trim();
if (!trimmed) {
return str;
}
return str + ' ' + trimmed;
}, '')
.slice(1);
});
const input = seedSpec({ source: sourceSingle });
const output = seedSpec({ source: sourceSingle, description: 'one two' });
expect(tokenize(input)).toEqual(output);
});
test('custom joiner - multiple lines', () => {
const tokenize = descriptionTokenizer((lines) => {
return lines
.reduce((str, { tokens: { description } }) => {
const trimmed = description.trim();
if (!trimmed) {
return str;
}
return str + ' ' + trimmed;
}, '')
.slice(1);
});
const input = seedSpec({ source: sourceMultiple });
const output = seedSpec({
source: sourceMultiple,
description: 'one two three four',
});
expect(tokenize(input)).toEqual(output);
});

View File

@ -0,0 +1,736 @@
import nameTokenizer from '../../src/parser/tokenizers/name.js';
import { seedTokens, seedSpec } from '../../src/util.js';
const tokenize = nameTokenizer();
test('single word', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({ description: 'param param description 0' }),
},
],
})
)
).toEqual(
seedSpec({
name: 'param',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: 'param',
postName: ' ',
description: 'param description 0',
}),
},
],
})
);
});
test('dash-delimitered', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({ description: 'param-param description 0' }),
},
],
})
)
).toEqual(
seedSpec({
name: 'param-param',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: 'param-param',
postName: ' ',
description: 'description 0',
}),
},
],
})
);
});
test('quoted', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({ description: '"param param" description 0' }),
},
],
})
)
).toEqual(
seedSpec({
name: 'param param',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '"param param"',
postName: ' ',
description: 'description 0',
}),
},
],
})
);
});
test('inconsistent quotes', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({ description: '"param param description 0' }),
},
],
})
)
).toEqual(
seedSpec({
name: '"param',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '"param',
postName: ' ',
description: 'param description 0',
}),
},
],
})
);
});
test('optional', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({ description: '[param] param description' }),
},
],
})
)
).toEqual(
seedSpec({
name: 'param',
optional: true,
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '[param]',
postName: ' ',
description: 'param description',
}),
},
],
})
);
});
test('optional with default', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[param=value] param description',
}),
},
],
})
)
).toEqual(
seedSpec({
name: 'param',
optional: true,
default: 'value',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '[param=value]',
postName: ' ',
description: 'param description',
}),
},
],
})
);
});
test('quoted default', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[param="value"] param description',
}),
},
],
})
)
).toEqual(
seedSpec({
name: 'param',
optional: true,
default: '"value"',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '[param="value"]',
postName: ' ',
description: 'param description',
}),
},
],
})
);
});
test('loosely quoted default', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[param="value] param description',
}),
},
],
})
)
).toEqual(
seedSpec({
name: 'param',
optional: true,
default: '"value',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '[param="value]',
postName: ' ',
description: 'param description',
}),
},
],
})
);
});
test('quoted default with =', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[param="value=1"] param description',
}),
},
],
})
)
).toEqual(
seedSpec({
name: 'param',
optional: true,
default: '"value=1"',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '[param="value=1"]',
postName: ' ',
description: 'param description',
}),
},
],
})
);
});
test('non-alphanumeric', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '$param description',
}),
},
],
})
)
).toEqual(
seedSpec({
name: '$param',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '$param',
postName: ' ',
description: 'description',
}),
},
],
})
);
});
test('spread notation', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '...params description',
}),
},
],
})
)
).toEqual(
seedSpec({
name: '...params',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '...params',
postName: ' ',
description: 'description',
}),
},
],
})
);
});
test('optionsl spread notation', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[...params] description',
}),
},
],
})
)
).toEqual(
seedSpec({
name: '...params',
optional: true,
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '[...params]',
postName: ' ',
description: 'description',
}),
},
],
})
);
});
test('optional multiple words', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[param name] param description',
}),
},
],
})
)
).toEqual(
seedSpec({
name: 'param name',
optional: true,
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '[param name]',
postName: ' ',
description: 'param description',
}),
},
],
})
);
});
test('name spacing', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[ param = value ] param description',
}),
},
],
})
)
).toEqual(
seedSpec({
name: 'param',
optional: true,
default: 'value',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '[ param = value ]',
postName: ' ',
description: 'param description',
}),
},
],
})
);
});
test('inconsistent brackets', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[param param description',
}),
},
],
})
)
).toEqual(
seedSpec({
problems: [
{
code: 'spec:name:unpaired-brackets',
line: 1,
critical: true,
message: 'unpaired brackets',
},
],
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[param param description',
}),
},
],
})
);
});
test('empty name', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[=value] param description',
}),
},
],
})
)
).toEqual(
seedSpec({
problems: [
{
code: 'spec:name:empty-name',
line: 1,
critical: true,
message: 'empty name',
},
],
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[=value] param description',
}),
},
],
})
);
});
test('empty default value', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[param=] param description',
}),
},
],
})
)
).toEqual(
seedSpec({
problems: [
{
code: 'spec:name:empty-default',
line: 1,
critical: true,
message: 'empty default value',
},
],
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[param=] param description',
}),
},
],
})
);
});
test('empty', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[] param description',
}),
},
],
})
)
).toEqual(
seedSpec({
problems: [
{
code: 'spec:name:empty-name',
line: 1,
critical: true,
message: 'empty name',
},
],
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[] param description',
}),
},
],
})
);
});
test('default value syntax', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[param=value=value] param description',
}),
},
],
})
)
).toEqual(
seedSpec({
problems: [
{
code: 'spec:name:invalid-default',
line: 1,
critical: true,
message: 'invalid default value syntax',
},
],
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[param=value=value] param description',
}),
},
],
})
);
});
test('default with arrow', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '[param = value => value] param description',
}),
},
],
})
)
).toEqual(
seedSpec({
name: 'param',
optional: true,
default: 'value => value',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
name: '[param = value => value]',
postName: ' ',
description: 'param description',
}),
},
],
})
);
});
test('after multiline {type}', () => {
const sourceIn = [
{
number: 0,
source: '...',
tokens: seedTokens({
tag: '@aram',
postTag: ' ',
type: '{function(',
}),
},
{
number: 1,
source: '...',
tokens: seedTokens({ type: ' number' }),
},
{
number: 2,
source: '...',
tokens: seedTokens({
type: ')}',
postType: ' ',
description: 'paramname description text',
}),
},
];
const sourceOut = JSON.parse(JSON.stringify(sourceIn));
Object.assign(sourceOut[2].tokens, {
name: 'paramname',
postName: ' ',
description: 'description text',
});
expect(tokenize(seedSpec({ source: sourceIn }))).toEqual(
seedSpec({
name: 'paramname',
source: sourceOut,
})
);
});

View File

@ -0,0 +1,184 @@
import descriptionTokenizer from '../../src/parser/tokenizers/description.js';
import nameTokenizer from '../../src/parser/tokenizers/name.js';
import tagTokenizer from '../../src/parser/tokenizers/tag.js';
import typeTokenizer from '../../src/parser/tokenizers/type.js';
import getParser from '../../src/parser/spec-parser.js';
import { seedTokens, seedSpec } from '../../src/util.js';
import { Spec, Problem } from '../../src/primitives.js';
const parse = getParser({
tokenizers: [
tagTokenizer(),
typeTokenizer(),
nameTokenizer(),
descriptionTokenizer(),
],
});
const tokenizer = (message: string, critical = false) => {
const problem: Problem = {
code: 'custom',
line: 1,
message,
critical,
};
return (spec: Spec) => ({
...spec,
problems: [...spec.problems, problem],
});
};
test('all tokens', () => {
const parsed = parse([
{
number: 1,
source: '...',
tokens: seedTokens({
description: '@param {type} [name=value] description',
}),
},
]);
expect(parsed).toEqual(
seedSpec({
tag: 'param',
type: 'type',
name: 'name',
default: 'value',
optional: true,
description: 'description',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
tag: '@param',
postTag: ' ',
type: '{type}',
postType: ' ',
name: '[name=value]',
postName: ' ',
description: 'description',
}),
},
],
})
);
});
test('quotes in description', () => {
const parsed = parse([
{
number: 1,
source: '...',
tokens: seedTokens({
description:
'@param {type} [name=value] description with "quoted" word',
}),
},
]);
expect(parsed).toEqual(
seedSpec({
tag: 'param',
type: 'type',
name: 'name',
default: 'value',
optional: true,
description: 'description with "quoted" word',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
tag: '@param',
postTag: ' ',
type: '{type}',
postType: ' ',
name: '[name=value]',
postName: ' ',
description: 'description with "quoted" word',
}),
},
],
})
);
});
test('collect non-critical errors', () => {
const parse = getParser({
tokenizers: [tokenizer('warning 1'), tokenizer('warning 2')],
});
const parsed = parse([
{
number: 1,
source: '...',
tokens: seedTokens({ description: 'description' }),
},
]);
expect(parsed).toEqual({
tag: '',
name: '',
optional: false,
type: '',
description: '',
problems: [
{
code: 'custom',
line: 1,
message: 'warning 1',
critical: false,
},
{
code: 'custom',
line: 1,
message: 'warning 2',
critical: false,
},
],
source: [
{
number: 1,
source: '...',
tokens: seedTokens({ description: 'description' }),
},
],
});
});
test('stop on critical error', () => {
const parse = getParser({
tokenizers: [tokenizer('error 1', true), tokenizer('warning 2')],
});
const parsed = parse([
{
number: 1,
source: '...',
tokens: seedTokens({ description: 'description' }),
},
]);
expect(parsed).toEqual({
tag: '',
name: '',
optional: false,
type: '',
description: '',
problems: [
{
code: 'custom',
line: 1,
message: 'error 1',
critical: true,
},
],
source: [
{
number: 1,
source: '...',
tokens: seedTokens({ description: 'description' }),
},
],
});
});

View File

@ -0,0 +1,112 @@
import tagTokenizer from '../../src/parser/tokenizers/tag.js';
import { seedTokens, seedSpec } from '../../src/util.js';
const tokenize = tagTokenizer();
test('ok', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '@param {string} value value description 0',
}),
},
],
})
)
).toEqual(
seedSpec({
tag: 'param',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
tag: '@param',
postTag: ' ',
description: '{string} value value description 0',
}),
},
],
})
);
});
test('require @', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 42,
source: '...',
tokens: seedTokens({
description: 'param {string} value value description 0',
}),
},
],
})
)
).toEqual(
seedSpec({
problems: [
{
code: 'spec:tag:prefix',
message: 'tag should start with "@" symbol',
critical: true,
line: 42,
},
],
source: [
{
number: 42,
source: '...',
tokens: seedTokens({
description: 'param {string} value value description 0',
}),
},
],
})
);
});
test.each([
['@+tag', '+tag'],
['@-tag', '-tag'],
['@.tag', '.tag'],
])('loose tag - %s', (token, tag) => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: token + ' name description',
}),
},
],
})
)
).toEqual(
seedSpec({
tag,
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
tag: token,
postTag: ' ',
description: 'name description',
}),
},
],
})
);
});

View File

@ -0,0 +1,265 @@
import typeTokenizer, { Joiner } from '../../src/parser/tokenizers/type.js';
import { seedTokens, seedSpec } from '../../src/util.js';
const tokenize = typeTokenizer();
test('ok', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '{string} param param description 0',
}),
},
],
})
)
).toEqual(
seedSpec({
type: 'string',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
type: '{string}',
postType: ' ',
description: 'param param description 0',
}),
},
],
})
);
});
test('inconsistent curlies', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '{string param param description 0',
}),
},
],
})
)
).toEqual(
seedSpec({
problems: [
{
code: 'spec:type:unpaired-curlies',
line: 1,
message: 'unpaired curlies',
critical: true,
},
],
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '{string param param description 0',
}),
},
],
})
);
});
test('object notation', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '{{a: 1}} param description',
}),
},
],
})
)
).toEqual(
seedSpec({
type: '{a: 1}',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
type: '{{a: 1}}',
postType: ' ',
description: 'param description',
}),
},
],
})
);
});
test('omit', () => {
expect(
tokenize(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: 'string param param description 0',
}),
},
],
})
)
).toEqual(
seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: 'string param param description 0',
}),
},
],
})
);
});
test('multiline', () => {
const spec = seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '{function(',
}),
},
{
number: 2,
source: '...',
tokens: seedTokens({
postDelimiter: ' ',
description: 'number)} function type',
}),
},
{
number: 3,
source: '...',
tokens: seedTokens(),
},
{
number: 4,
source: '...',
tokens: seedTokens({
end: '*/',
}),
},
],
});
const tokenized = tokenize(spec);
const expected = seedSpec({
type: 'function(number)',
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
type: '{function(',
}),
},
{
number: 2,
source: '...',
tokens: seedTokens({
type: ' number)}',
postType: ' ',
description: 'function type',
}),
},
{
number: 3,
source: '...',
tokens: seedTokens(),
},
{
number: 4,
source: '...',
tokens: seedTokens({
end: '*/',
}),
},
],
});
expect(tokenized).toEqual(expected);
});
test.each([
['default', undefined, 'function(number,string)'],
['preserve', 'preserve', 'function(\n number,\n string\n)'],
['compact', 'compact', 'function(number,string)'],
[
'custom',
(t: string[]) => t.map((x: string) => x.trim()).join(''),
'function(number,string)',
],
])('spacing - %s', (name, spacing, type) => {
const tokenize =
spacing === 'preserve' ||
spacing === 'compact' ||
typeof spacing === 'function'
? typeTokenizer(spacing)
: typeTokenizer();
const spec = seedSpec({
source: [
{
number: 1,
source: '...',
tokens: seedTokens({
description: '{function(',
}),
},
{
number: 2,
source: '...',
tokens: seedTokens({
postDelimiter: ' ',
description: 'number,',
}),
},
{
number: 2,
source: '...',
tokens: seedTokens({
postDelimiter: ' ',
description: 'string',
}),
},
{
number: 3,
source: '...',
tokens: seedTokens({
description: ')} function type',
}),
},
],
});
const tokenized = tokenize(spec);
expect(tokenized.type).toEqual(type);
});

View File

@ -0,0 +1,169 @@
import getStringifier from '../../src/stringifier/index.js';
const source = [
{
number: 1,
source: ' /**',
tokens: {
start: ' ',
delimiter: '/**',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '',
lineEnd: '',
},
},
{
number: 2,
source: ' * Description may go',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: 'Description may go',
end: '',
lineEnd: '',
},
},
{
number: 3,
source: ' * over multiple lines followed by @tags',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: 'over multiple lines followed by @tags',
end: '',
lineEnd: '',
},
},
{
number: 4,
source: ' *',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '',
lineEnd: '',
},
},
{
number: 5,
source: '* @my-tag {my.type} my-name description line 1',
tokens: {
start: '',
delimiter: '*',
postDelimiter: ' ',
tag: '@my-tag',
postTag: ' ',
name: 'my-name',
postName: ' ',
type: '{my.type}',
postType: ' ',
description: 'description line 1',
end: '',
lineEnd: '',
},
},
{
number: 6,
source: ' description line 2',
tokens: {
start: ' ',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: 'description line 2',
end: '',
lineEnd: '',
},
},
{
number: 7,
source: ' * description line 3',
tokens: {
start: ' ',
delimiter: '*',
postDelimiter: ' ',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: 'description line 3',
end: '',
lineEnd: '',
},
},
{
number: 8,
source: ' */',
tokens: {
start: ' ',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '*/',
lineEnd: '',
},
},
];
const block = {
description: '',
tags: [],
source,
problems: [],
};
test('default', () => {
const s = getStringifier()(block);
const expected = `
/**
* Description may go
* over multiple lines followed by @tags
*
* @my-tag {my.type} my-name description line 1
description line 2
* description line 3
*/`;
expect(s).toBe(expected.slice(1));
});

View File

@ -0,0 +1,165 @@
import align from '../../src/transforms/align.js';
import getParser, { Parser } from '../../src/parser/index.js';
import getStringifier, { Stringifier } from '../../src/stringifier/index.js';
let parse: Parser;
let stringify: Stringifier;
beforeEach(() => {
parse = getParser();
stringify = getStringifier();
});
test('multiline', () => {
const source = `
/**
* Description may go
* over multiple lines followed by @tags
*
* @some-tag {some-type} some-name description line 1
* @another-tag {another-type} another-name description line 1
description line 2
* description line 3
*/`;
const expected = `
/**
* Description may go
* over multiple lines followed by @tags
*
* @some-tag {some-type} some-name description line 1
* @another-tag {another-type} another-name description line 1
description line 2
* description line 3
*/`.slice(1);
const parsed = parse(source);
const aligned = align()(parsed[0]);
const out = stringify(aligned);
// console.log(inspect(aligned));
expect(out).toBe(expected);
});
test('one-liner', () => {
const source = ` /** @tag {type} name description */`;
const parsed = parse(source);
const out = stringify(align()(parsed[0]));
expect(out).toBe(source);
});
test('same line open', () => {
const source = `
/** @tag {type} name description
*/`.slice(1);
const parsed = parse(source);
const out = stringify(align()(parsed[0]));
expect(out).toBe(source);
});
test('same line close', () => {
const source = `
/**
* @tag {type} name description */`;
const expected = `
/**
* @tag {type} name description */`.slice(1);
const parsed = parse(source);
const aligned = align()(parsed[0]);
const out = stringify(aligned);
expect(out).toBe(expected);
});
test('spec source referencing', () => {
const parsed = parse(`/** @tag {type} name Description */`);
const block = align()(parsed[0]);
expect(block.tags[0].source[0] === block.source[0]).toBe(true);
});
test('block source clonning', () => {
const parsed = parse(`/** @tag {type} name Description */`);
const block = align()(parsed[0]);
parsed[0].source[0].tokens.description = 'test';
expect(block.source[0].tokens.description).toBe('Description ');
});
test('ignore right whitespace', () => {
const source = `
/**
* Description may go
* over multiple lines followed by @tags
* @private
* @param {string} name
* @param {any} value the value parameter
*
*/`.slice(1);
const expected = `
/**
* Description may go
* over multiple lines followed by @tags
* @private
* @param {string} name
* @param {any} value the value parameter
*
*/`.slice(1);
const parsed = parse(source);
const aligned = align()(parsed[0]);
const stringified = stringify(aligned);
expect(stringified).toEqual(expected);
});
test('collapse postDelimiter', () => {
const source = `
/**
* Description may go
* over multiple lines followed by @tags
* @param {string} name the name parameter
* @param {any} value the value parameter
*/`.slice(1);
const expected = `
/**
* Description may go
* over multiple lines followed by @tags
* @param {string} name the name parameter
* @param {any} value the value parameter
*/`.slice(1);
const parsed = parse(source);
const aligned = align()(parsed[0]);
const stringified = stringify(aligned);
expect(stringified).toEqual(expected);
});
test('keep carriage returns', () => {
const source = `
/**\r\r
* Description may go\r
* over multiple lines followed by @tags\r
* @param {string} name the name parameter\r
* @param {any} value\r
*/\r`.slice(1);
const expected = `
/**\r\r
* Description may go\r
* over multiple lines followed by @tags\r
* @param {string} name the name parameter\r
* @param {any} value\r
*/\r`.slice(1);
const parsed = parse(source);
const aligned = align()(parsed[0]);
const stringified = stringify(aligned);
expect(stringified).toEqual(expected);
});

View File

@ -0,0 +1,70 @@
import crlf, { Ending } from '../../src/transforms/crlf.js';
import getParser, { Parser } from '../../src/parser/index.js';
import getStringifier, { Stringifier } from '../../src/stringifier/index.js';
const tests = [
[
'no CR',
'CRLF',
`
/**
* description
*
*/`,
`
/**\r
* description\r
*\r
*/\r`,
],
[
'mixed',
'CRLF',
`
/**
* description
*\r
*/`,
`
/**\r
* description\r
*\r
*/\r`,
],
[
'no CR',
'LF',
`
/**
* description
*
*/`,
`
/**
* description
*
*/`,
],
[
'mixed',
'LF',
`
/**
* description
*\r
*/`,
`
/**
* description
*
*/`,
],
];
test.each(tests)('CRLF - %s to %s', (name, mode, source, expected) => {
expected = expected.slice(1);
const parsed = getParser()(source);
const normalized = crlf(mode as Ending)(parsed[0]);
const out = getStringifier()(normalized);
expect(out).toBe(expected);
});

View File

@ -0,0 +1,95 @@
import indent from '../../src/transforms/indent.js';
import getParser from '../../src/parser/index.js';
import getStringifier from '../../src/stringifier/index.js';
test('push', () => {
const source = `
/**
* Description may go
* over multiple lines followed by @tags
*
* @my-tag {my.type} my-name description line 1
description line 2
* description line 3
*/`;
const expected = `
/**
* Description may go
* over multiple lines followed by @tags
*
* @my-tag {my.type} my-name description line 1
description line 2
* description line 3
*/`;
const parsed = getParser()(source);
const out = getStringifier()(indent(4)(parsed[0]));
expect(out).toBe(expected.slice(1));
});
test('pull', () => {
const source = `
/**
* Description may go
* over multiple lines followed by @tags
*
* @my-tag {my.type} my-name description line 1
description line 2
* description line 3
*/`;
const expected = `
/**
* Description may go
* over multiple lines followed by @tags
*
* @my-tag {my.type} my-name description line 1
description line 2
* description line 3
*/`;
const parsed = getParser()(source);
const out = getStringifier()(indent(2)(parsed[0]));
expect(out).toBe(expected.slice(1));
});
test('force pull', () => {
const source = `
/**
* Description may go
* over multiple lines followed by @tags
*
* @my-tag {my.type} my-name description line 1
description line 2
* description line 3
*/`;
const expected = `
/**
* Description may go
* over multiple lines followed by @tags
*
* @my-tag {my.type} my-name description line 1
description line 2
* description line 3
*/`;
const parsed = getParser()(source);
const indented = indent(0)(parsed[0]);
const out = getStringifier()(indented);
expect(out).toBe(expected.slice(1));
});
test('spec source referencing', () => {
const parsed = getParser()(`/** @tag {type} name Description */`);
const block = indent(0)(parsed[0]);
expect(block.tags[0].source[0] === block.source[0]).toBe(true);
});
test('block source clonning', () => {
const parsed = getParser()(`/** @tag {type} name Description */`);
const block = indent(0)(parsed[0]);
parsed[0].source[0].tokens.description = 'test';
expect(block.source[0].tokens.description).toBe('Description ');
});

View File

@ -0,0 +1,21 @@
import { flow } from '../../src/transforms/index.js';
import { seedBlock } from '../../src/util.js';
import { Block } from '../../src/primitives.js';
const t0 = (b: Block): Block => ({ ...b, description: b.description + ' t0' });
const t1 = (b: Block): Block => ({ ...b, description: b.description + ' t1' });
test('multiple', () => {
const block = seedBlock({ description: 'test' });
expect(flow(t0, t1)(block).description).toBe('test t0 t1');
});
test('one', () => {
const block = seedBlock({ description: 'test' });
expect(flow(t0)(block).description).toBe('test t0');
});
test('none', () => {
const block = seedBlock({ description: 'test' });
expect(flow()(block).description).toBe('test');
});

View File

@ -0,0 +1,107 @@
import { seedTokens, rewireSource, rewireSpecs } from '../../src/util.js';
test('source to spec', () => {
const block = {
description: '',
tags: [
{
tag: 'my-tag',
name: '',
type: '',
optional: false,
description: '',
problems: [],
source: [
{
number: 2,
source: '...changed in spec...',
tokens: seedTokens({ name: '...changed in spec...' }),
},
],
},
],
source: [
{
number: 1,
source: 'source line 1',
tokens: seedTokens(),
},
{
number: 2,
source: 'source line 2',
tokens: seedTokens({ name: 'source' }),
},
],
problems: [],
};
// source is unsynced
expect(block.source[1] === block.tags[0].source[0]).toBe(false);
rewireSource(block);
// source is referenced
expect(block.source[1] === block.tags[0].source[0]).toBe(true);
// non-tag line stays unchanged
expect(block.source[0].source).toEqual('source line 1');
// tag-holding source line stays unchanged
expect(block.source[1].source).toEqual('source line 2');
expect(block.source[1].tokens.name).toEqual('source');
// tag source inherits block source
expect(block.tags[0].source[0].source).toEqual('source line 2');
expect(block.tags[0].source[0].tokens.name).toEqual('source');
});
test('spec to source', () => {
const block = {
description: '',
tags: [
{
tag: 'my-tag',
name: '',
type: '',
optional: false,
description: '',
problems: [],
source: [
{
number: 2,
source: '...changed in spec...',
tokens: seedTokens({ name: '...changed in spec...' }),
},
],
},
],
source: [
{
number: 1,
source: 'source line 1',
tokens: seedTokens(),
},
{
number: 2,
source: 'source line 2',
tokens: seedTokens({ name: 'source' }),
},
],
problems: [],
};
// source is unsynced
expect(block.source[1] === block.tags[0].source[0]).toBe(false);
rewireSpecs(block);
// source is referenced
expect(block.source[1] === block.tags[0].source[0]).toBe(true);
// non-tag line stays unchanged
expect(block.source[0].source).toEqual('source line 1');
// tag-holding source line inherits spec source
expect(block.source[1].source).toEqual('...changed in spec...');
expect(block.source[1].tokens.name).toEqual('...changed in spec...');
// tag source inherits block source
expect(block.tags[0].source[0].source).toEqual('...changed in spec...');
expect(block.tags[0].source[0].tokens.name).toEqual('...changed in spec...');
});

130
node_modules/comment-parser/tests/unit/util.spec.ts generated vendored Normal file
View File

@ -0,0 +1,130 @@
import {
hasCR,
isSpace,
seedTokens,
seedBlock,
splitLines,
splitSpace,
seedSpec,
} from '../../src/util.js';
test.each([
['beginning', '\r to end', false],
['middle', 'has \r in middle', false],
['ending', 'only at end \r', true],
['none', 'no carriage returns', false],
])('carriage returns - %s', (name, source, boolResult) => {
expect(hasCR(source)).toEqual(boolResult);
});
test.each([
['win', 'a\r\nb\r\nc', ['a\r', 'b\r', 'c']],
['unix', 'a\nb\nc', ['a', 'b', 'c']],
['mixed', 'a\nb\r\nc', ['a', 'b\r', 'c']],
['none', 'abc', ['abc']],
])('spliLines - %s', (name, source, parsed) =>
expect(splitLines(source)).toEqual(parsed)
);
test.each([
['pre', ' abc', [' ', 'abc']],
['pre', 'abc ', ['', 'abc ']],
['pre+post', ' abc ', [' ', 'abc ']],
['none', 'abc', ['', 'abc']],
])('spliSpace - %s', (name, source, parsed) =>
expect(splitSpace(source)).toEqual(parsed)
);
test.each([
['space', ' ', true],
['spaces', ' ', true],
['tab', '\t', true],
['tabs', '\t\t', true],
['line end', '\n', true],
['line ends', '\n\n', true],
['line return', '\r', true],
['line returns', '\r\r', true],
['mixed space', '\n\r\t', true],
['mixed', '\naba', false],
['alpahnumeric', '1abcd34', false],
['symbols', '*', false],
['empty', '', false],
])('isSpace - %s', (name, source, result) =>
expect(isSpace(source)).toBe(result)
);
test('seedTokens defaults', () => {
expect(seedTokens()).toEqual({
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: '',
end: '',
lineEnd: '',
});
});
test('seedTokens overrides', () => {
expect(seedTokens({ description: 'abc' })).toEqual({
start: '',
delimiter: '',
postDelimiter: '',
tag: '',
postTag: '',
name: '',
postName: '',
type: '',
postType: '',
description: 'abc',
end: '',
lineEnd: '',
});
});
test('seedBlock defaults', () => {
expect(seedBlock()).toEqual({
description: '',
tags: [],
source: [],
problems: [],
});
});
test('seedBlock overrides', () => {
expect(seedBlock({ description: 'abc' })).toEqual({
description: 'abc',
tags: [],
source: [],
problems: [],
});
});
test('seedSpec defaults', () => {
expect(seedSpec()).toEqual({
tag: '',
name: '',
type: '',
optional: false,
description: '',
problems: [],
source: [],
});
});
test('seedSpec overrides', () => {
expect(seedSpec({ description: 'abc' })).toEqual({
tag: '',
name: '',
type: '',
optional: false,
description: 'abc',
problems: [],
source: [],
});
});