refactor: convert to ESM + update all dependencies (#120)

This commit is contained in:
Samuel Attard 2024-09-23 10:52:13 -07:00 коммит произвёл GitHub
Родитель 2c5b009baa
Коммит 43bc6bc93b
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: B5690EEEBB952194
14 изменённых файлов: 1627 добавлений и 879 удалений

Просмотреть файл

@ -10,14 +10,6 @@ workflows:
jobs:
- node/test:
name: test-<< matrix.executor >>-<< matrix.node-version >>
pre-steps:
- when:
condition:
and:
- equal: [ node/macos, << matrix.executor >> ]
- equal: [ '14.16', << matrix.node-version >> ]
steps:
- node/install-rosetta
test-steps:
- run: yarn prettier:check
- run: yarn build
@ -31,11 +23,8 @@ workflows:
- node/macos
- node/windows
node-version:
- '20.9'
- '18.17'
- '16.20'
# Stay below 14.17.0 or nvm tries to download arm64 artifacts which don't exist
- '14.16'
- '22.9'
- '20.11'
- cfa/release:
requires:
- test

1
.gitattributes поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
* text=auto eol=lf

1
.husky/pre-commit Normal file
Просмотреть файл

@ -0,0 +1 @@
yarn lint-staged

Просмотреть файл

@ -1,8 +1,13 @@
const path = require('path');
import path from 'node:path';
import { createDefaultEsmPreset } from 'ts-jest';
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
testPathIgnorePatterns: ['node_modules', path.resolve(__dirname, 'dist')],
setupFiles: ['<rootDir>/jest.setup.js'],
};
/** @type {import('ts-jest').JestConfigWithTsJest} **/
export default {
moduleNameMapper: {
'^(\\.{1,2}/.*)\\.js$': '$1',
},
testPathIgnorePatterns: ['node_modules', path.resolve(import.meta.dirname, 'dist')],
...createDefaultEsmPreset({
tsconfig: 'tsconfig.json',
})
};

Просмотреть файл

@ -3,17 +3,19 @@
"version": "0.0.0-development",
"description": "Parse Electron documentation into a machine readable JSON file",
"main": "dist/index.js",
"type": "module",
"author": "Samuel Attard",
"license": "MIT",
"engines": {
"node": "^14.15.0 || ^16.10.0 || >=18.0.0"
"node": "^20.11.0 || >= 21.2.0"
},
"scripts": {
"build": "tsc",
"prepublishOnly": "npx yarn build",
"prettier:check": "prettier --list-different \"src/**/*.{ts,tsx}\"",
"prettier:write": "prettier --write \"src/**/*.{ts,tsx}\"",
"test": "jest"
"test": "cross-env NODE_OPTIONS=\"--experimental-vm-modules\" jest",
"prepare": "husky"
},
"bin": {
"electron-docs-linter": "./dist/bin.js",
@ -24,27 +26,30 @@
"!dist/__tests__"
],
"devDependencies": {
"@types/chai": "^4.2.5",
"@types/fs-extra": "^8.0.1",
"@types/jest": "^24.0.23",
"@types/lodash.camelcase": "^4.3.6",
"@types/minimist": "^1.2.0",
"@types/chai": "^4.3.19",
"@types/jest": "^29.5.13",
"@types/lodash.camelcase": "^4.3.9",
"@types/node": "^22.5.5",
"@types/pretty-ms": "^5.0.1",
"jest": "^29.3.1",
"prettier": "^1.19.1",
"ts-jest": "^29.0.3",
"typescript": "^4.9.3"
"cross-env": "^7.0.3",
"husky": "^9.1.6",
"jest": "^30.0.0-alpha.6",
"lint-staged": "^15.2.10",
"prettier": "^3.3.3",
"ts-jest": "^29.2.5",
"typescript": "^5.6.2"
},
"dependencies": {
"@types/markdown-it": "^12.0.0",
"chai": "^4.2.0",
"chalk": "^3.0.0",
"fs-extra": "^8.1.0",
"@types/markdown-it": "^14.1.2",
"chai": "^5.1.1",
"chalk": "^5.3.0",
"lodash.camelcase": "^4.3.0",
"markdown-it": "^12.0.0",
"minimist": "^1.2.0",
"ora": "^4.0.3",
"pretty-ms": "^5.1.0"
"markdown-it": "^14.1.0",
"ora": "^8.1.0",
"pretty-ms": "^9.1.0"
},
"lint-staged": {
"*.ts": "prettier --write"
},
"repository": {
"type": "git",

Просмотреть файл

@ -1,9 +1,9 @@
import { expect } from 'chai';
import * as fs from 'fs-extra';
import fs from 'node:fs';
import MarkdownIt from 'markdown-it';
import Token from 'markdown-it/lib/token';
import { Token } from 'markdown-it';
import * as path from 'path';
import toCamelCase = require('lodash.camelcase');
import toCamelCase from 'lodash.camelcase';
import {
ParsedDocumentation,
@ -13,7 +13,7 @@ import {
ModuleDocumentationContainer,
ClassDocumentationContainer,
ElementDocumentationContainer,
} from './ParsedDocumentation';
} from './ParsedDocumentation.js';
import {
findNextList,
convertListToTypedKeys,
@ -28,15 +28,15 @@ import {
findContentAfterHeadingClose,
HeadingContent,
getContentBeforeFirstHeadingMatching,
} from './markdown-helpers';
import { WEBSITE_BASE_DOCS_URL, REPO_BASE_DOCS_URL } from './constants';
import { extendError } from './helpers';
} from './markdown-helpers.js';
import { WEBSITE_BASE_DOCS_URL, REPO_BASE_DOCS_URL } from './constants.js';
import { extendError } from './helpers.js';
import {
parseMethodBlocks,
_headingToMethodBlock,
parsePropertyBlocks,
parseEventBlocks,
} from './block-parsers';
} from './block-parsers.js';
export class DocsParser {
constructor(
@ -108,7 +108,7 @@ export class DocsParser {
groups = getContentBeforeConstructor(tokens);
} else {
// FIXME: Make it so that we don't need this magic FIXME for the electron breaking-changes document
groups = getContentBeforeFirstHeadingMatching(tokens, heading =>
groups = getContentBeforeFirstHeadingMatching(tokens, (heading) =>
['Events', 'Methods', 'Properties', '`FIXME` comments'].includes(heading.trim()),
);
}
@ -156,7 +156,7 @@ export class DocsParser {
| ClassDocumentationContainer
| ElementDocumentationContainer
)[] = [];
const contents = await fs.readFile(filePath, 'utf8');
const contents = await fs.promises.readFile(filePath, 'utf8');
const md = new MarkdownIt({ html: true });
const allTokens = md.parse(contents, {});
@ -182,7 +182,7 @@ export class DocsParser {
if (isClass) {
// Instance name will be taken either from an example in a method declaration or the camel
// case version of the class name
const levelFourHeader = headingsAndContent(tokens).find(h => h.level === 4);
const levelFourHeader = headingsAndContent(tokens).find((h) => h.level === 4);
const instanceName = levelFourHeader
? (levelFourHeader.heading.split('`')[1] || '').split('.')[0] ||
toCamelCase(container.name)
@ -262,7 +262,7 @@ export class DocsParser {
}
private async parseStructure(filePath: string): Promise<StructureDocumentationContainer> {
const contents = await fs.readFile(filePath, 'utf8');
const contents = await fs.promises.readFile(filePath, 'utf8');
const md = new MarkdownIt({ html: true });
const tokens = md.parse(contents, {});
@ -279,7 +279,7 @@ export class DocsParser {
return {
type: 'Structure',
...baseInfos[0].container,
properties: consumeTypedKeysList(convertListToTypedKeys(list!)).map(typedKey => ({
properties: consumeTypedKeysList(convertListToTypedKeys(list!)).map((typedKey) => ({
name: typedKey.key,
description: typedKey.description,
required: typedKey.required,

Просмотреть файл

@ -172,7 +172,7 @@ export class ParsedDocumentation {
}
public getJSON(): ParsedDocumentationResult {
return this.repr.filter(container => {
return this.repr.filter((container) => {
if (container.type !== 'Module') return true;
return container.events.length + container.methods.length + container.properties.length > 0;

Просмотреть файл

@ -57,7 +57,7 @@ describe('markdown-helpers', () => {
});
describe('snapshots', () => {
const fixtureDir = path.resolve(__dirname, 'fixtures');
const fixtureDir = path.resolve(import.meta.dirname, 'fixtures');
for (const markdownFixture of fs.readdirSync(fixtureDir)) {
if (!markdownFixture.endsWith('.md')) continue;

Просмотреть файл

@ -1,22 +1,44 @@
#!/usr/bin/env node
import * as fs from 'fs-extra';
import minimist from 'minimist';
import chalk from 'chalk';
import fs from 'node:fs';
import { parseArgs } from 'node:util';
import ora from 'ora';
import * as path from 'path';
import pretty from 'pretty-ms';
import { parseDocs } from '.';
import chalk from 'chalk';
import { parseDocs } from './index.js';
const args = minimist(process.argv, {
default: {
packageMode: 'single',
const {
values: { outDir, dir, useReadme, moduleVersion, help, packageMode },
} = parseArgs({
options: {
packageMode: {
type: 'string',
default: 'single',
},
dir: {
type: 'string',
},
outDir: {
type: 'string',
},
useReadme: {
type: 'boolean',
},
moduleVersion: {
type: 'string',
},
help: {
type: 'boolean',
default: false,
},
},
});
const { dir, outDir, useReadme, packageMode, moduleVersion, help } = args;
if (!['single', 'multi'].includes(packageMode)) {
let safePackageMode = packageMode as 'single' | 'multi' | string;
if (safePackageMode !== 'single' && safePackageMode !== 'multi') {
console.error(chalk.red('packageMode must be one of "single" and "multi"'));
process.exit(1);
}
@ -41,7 +63,7 @@ if (typeof moduleVersion !== 'string') {
}
const resolvedDir = path.isAbsolute(dir) ? dir : path.resolve(process.cwd(), dir);
if (!fs.pathExistsSync(resolvedDir)) {
if (!fs.existsSync(resolvedDir)) {
runner.fail(`${chalk.red('Resolved directory does not exist:')} ${chalk.cyan(resolvedDir)}`);
process.exit(1);
}
@ -57,17 +79,18 @@ runner.text = chalk.cyan(`Generating API in directory: ${chalk.yellow(`"${resolv
const start = Date.now();
fs.mkdirp(resolvedOutDir).then(() =>
fs.promises.mkdir(resolvedOutDir, { recursive: true }).then(() =>
parseDocs({
useReadme: useReadme ? true : false,
baseDirectory: resolvedDir,
moduleVersion,
packageMode,
packageMode: safePackageMode,
})
.then(data =>
fs.writeJson(path.resolve(resolvedOutDir, './electron-api.json'), data, {
spaces: 2,
}),
.then((data) =>
fs.promises.writeFile(
path.resolve(resolvedOutDir, './electron-api.json'),
JSON.stringify(data, null, 2),
),
)
.then(() =>
runner.succeed(
@ -76,7 +99,7 @@ fs.mkdirp(resolvedOutDir).then(() =>
)} took ${chalk.cyan(pretty(Date.now() - start))}`,
),
)
.catch(err => {
.catch((err) => {
runner.fail();
console.error(err);
process.exit(1);

Просмотреть файл

@ -1,5 +1,5 @@
import { expect } from 'chai';
import Token from 'markdown-it/lib/token';
import { Token } from 'markdown-it';
import {
parseHeadingTags,
@ -14,12 +14,12 @@ import {
StripReturnTypeBehavior,
consumeTypedKeysList,
slugifyHeading,
} from './markdown-helpers';
} from './markdown-helpers.js';
import {
MethodDocumentationBlock,
PropertyDocumentationBlock,
EventDocumentationBlock,
} from './ParsedDocumentation';
} from './ParsedDocumentation.js';
type GuessedParam = {
name: string;
@ -110,7 +110,7 @@ export const _headingToMethodBlock = (
null,
`Method ${heading.heading} has at least one parameter but no parameter type list`,
);
parameters = consumeTypedKeysList(convertListToTypedKeys(list)).map(typedKey => ({
parameters = consumeTypedKeysList(convertListToTypedKeys(list)).map((typedKey) => ({
name: typedKey.key,
description: typedKey.description,
required: typedKey.required,
@ -191,13 +191,11 @@ export const _headingToEventBlock = (heading: HeadingContent): EventDocumentatio
let parameters: EventDocumentationBlock['parameters'] = [];
if (
safelyJoinTokens(findContentAfterHeadingClose(heading.content))
.trim()
.startsWith('Returns:')
safelyJoinTokens(findContentAfterHeadingClose(heading.content)).trim().startsWith('Returns:')
) {
const list = findNextList(heading.content);
if (list) {
parameters = consumeTypedKeysList(convertListToTypedKeys(list)).map(typedKey => ({
parameters = consumeTypedKeysList(convertListToTypedKeys(list)).map((typedKey) => ({
name: typedKey.key,
description: typedKey.description,
...typedKey.type,
@ -219,7 +217,7 @@ export const _headingToEventBlock = (heading: HeadingContent): EventDocumentatio
export const parseMethodBlocks = (tokens: Token[] | null): MethodDocumentationBlock[] => {
if (!tokens) return [];
return headingsAndContent(tokens).map(heading => _headingToMethodBlock(heading)!);
return headingsAndContent(tokens).map((heading) => _headingToMethodBlock(heading)!);
};
export const parsePropertyBlocks = (tokens: Token[] | null): PropertyDocumentationBlock[] => {

Просмотреть файл

@ -1,6 +1,6 @@
import * as fs from 'fs-extra';
import fs from 'node:fs';
import * as path from 'path';
import { DocsParser } from './DocsParser';
import { DocsParser } from './DocsParser.js';
type ParseOptions = {
baseDirectory: string;
@ -43,10 +43,10 @@ export async function parseDocs(options: ParseOptions) {
async function getAllMarkdownFiles(inDir: string) {
const allMarkdownFiles: string[] = [];
const children = await fs.readdir(inDir);
const children = await fs.promises.readdir(inDir);
for (const child of children) {
const childPath = path.resolve(inDir, child);
const stats = await fs.stat(childPath);
const stats = await fs.promises.stat(childPath);
if (path.extname(childPath) === '.md' && stats.isFile()) {
allMarkdownFiles.push(childPath);
}
@ -55,4 +55,4 @@ async function getAllMarkdownFiles(inDir: string) {
return allMarkdownFiles;
}
export * from './ParsedDocumentation';
export * from './ParsedDocumentation.js';

Просмотреть файл

@ -1,5 +1,5 @@
import { expect } from 'chai';
import Token from 'markdown-it/lib/token';
import { Token } from 'markdown-it';
import {
TypeInformation,
PropertyDocumentationBlock,
@ -7,7 +7,7 @@ import {
PossibleStringValue,
DocumentationTag,
ProcessBlock,
} from './ParsedDocumentation';
} from './ParsedDocumentation.js';
const tagMap = {
macOS: DocumentationTag.OS_MACOS,
@ -35,7 +35,7 @@ export const parseHeadingTags = (tags: string | null): DocumentationTag[] => {
parsedTags.push(match[1] as keyof typeof tagMap);
}
return parsedTags.map(value => {
return parsedTags.map((value) => {
if (tagMap[value]) return tagMap[value];
throw new Error(
@ -45,7 +45,7 @@ export const parseHeadingTags = (tags: string | null): DocumentationTag[] => {
};
export const findNextList = (tokens: Token[]) => {
const start = tokens.findIndex(t => t.type === 'bullet_list_open');
const start = tokens.findIndex((t) => t.type === 'bullet_list_open');
if (start === -1) return null;
let opened = 1;
let end = -1;
@ -63,7 +63,7 @@ export const findNextList = (tokens: Token[]) => {
};
export const findFirstHeading = (tokens: Token[]) => {
const open = tokens.findIndex(token => token.type === 'heading_open');
const open = tokens.findIndex((token) => token.type === 'heading_open');
expect(open).to.not.equal(-1, "expected to find a heading token but couldn't");
expect(tokens).to.have.lengthOf.at.least(open + 2);
expect(tokens[open + 2].type).to.equal('heading_close');
@ -89,16 +89,16 @@ export const findContentAfterList = (tokens: Token[], returnAllOnNoList = false)
}
if (start === -1) {
if (!returnAllOnNoList) return [];
start = tokens.findIndex(t => t.type === 'heading_close');
start = tokens.findIndex((t) => t.type === 'heading_close');
}
const end = tokens.slice(start).findIndex(t => t.type === 'heading_open');
const end = tokens.slice(start).findIndex((t) => t.type === 'heading_open');
if (end === -1) return tokens.slice(start + 1);
return tokens.slice(start + 1, end);
};
export const findContentAfterHeadingClose = (tokens: Token[]) => {
const start = tokens.findIndex(t => t.type === 'heading_close');
const end = tokens.slice(start).findIndex(t => t.type === 'heading_open');
const start = tokens.findIndex((t) => t.type === 'heading_close');
const end = tokens.slice(start).findIndex((t) => t.type === 'heading_open');
if (end === -1) return tokens.slice(start + 1);
return tokens.slice(start + 1, end);
};
@ -118,14 +118,14 @@ export const headingsAndContent = (tokens: Token[]): HeadingContent[] => {
const headingTokens = tokens.slice(
start + 1,
start +
tokens.slice(start).findIndex(t => t.type === 'heading_close' && t.level === token.level),
tokens.slice(start).findIndex((t) => t.type === 'heading_close' && t.level === token.level),
);
const startLevel = parseInt(token.tag.replace('h', ''), 10);
const content = tokens.slice(start + headingTokens.length);
const contentEnd = content.findIndex(
t => t.type === 'heading_open' && parseInt(t.tag.replace('h', ''), 10) <= startLevel,
(t) => t.type === 'heading_open' && parseInt(t.tag.replace('h', ''), 10) <= startLevel,
);
groups.push({
@ -140,7 +140,7 @@ export const headingsAndContent = (tokens: Token[]): HeadingContent[] => {
};
const getConstructorHeaderInGroups = (groups: HeadingContent[]) => {
return groups.find(group => group.heading.startsWith('`new ') && group.level === 3);
return groups.find((group) => group.heading.startsWith('`new ') && group.level === 3);
};
export const findConstructorHeader = (tokens: Token[]) => {
@ -165,7 +165,7 @@ export const getContentBeforeFirstHeadingMatching = (
return groups.slice(
0,
groups.findIndex(g => matcher(g.heading)),
groups.findIndex((g) => matcher(g.heading)),
);
};
@ -175,7 +175,7 @@ export const findContentInsideHeader = (
expectedLevel: number,
) => {
const group = headingsAndContent(tokens).find(
g => g.heading === expectedHeader && g.level === expectedLevel,
(g) => g.heading === expectedHeader && g.level === expectedLevel,
);
if (!group) return null;
return group.content;
@ -205,7 +205,7 @@ export const safelySeparateTypeStringOn = (typeString: string, targetChar: strin
}
}
types.push(current);
return types.map(t => t.trim()).filter(t => !!t);
return types.map((t) => t.trim()).filter((t) => !!t);
};
export const getTopLevelMultiTypes = (typeString: string) => {
@ -286,7 +286,7 @@ export const rawTypeToTypeInformation = (
index === multiTypes.length - 1 && !wasBracketWrapped && collection ? '[]' : ''
}`,
)
.map(multiType => rawTypeToTypeInformation(multiType, relatedDescription, subTypedKeys)),
.map((multiType) => rawTypeToTypeInformation(multiType, relatedDescription, subTypedKeys)),
};
}
@ -296,7 +296,7 @@ export const rawTypeToTypeInformation = (
type: 'Function',
parameters:
subTypedKeys && !subTypedKeys.consumed
? consumeTypedKeysList(subTypedKeys).map<MethodParameterDocumentation>(typedKey => ({
? consumeTypedKeysList(subTypedKeys).map<MethodParameterDocumentation>((typedKey) => ({
name: typedKey.key,
description: typedKey.description,
required: typedKey.required,
@ -311,7 +311,7 @@ export const rawTypeToTypeInformation = (
type: 'Object',
properties:
subTypedKeys && !subTypedKeys.consumed
? consumeTypedKeysList(subTypedKeys).map<PropertyDocumentationBlock>(typedKey => ({
? consumeTypedKeysList(subTypedKeys).map<PropertyDocumentationBlock>((typedKey) => ({
name: typedKey.key,
description: typedKey.description,
required: typedKey.required,
@ -326,13 +326,13 @@ export const rawTypeToTypeInformation = (
type: 'String',
possibleValues:
subTypedKeys && !subTypedKeys.consumed
? consumeTypedKeysList(subTypedKeys).map<PossibleStringValue>(typedKey => ({
? consumeTypedKeysList(subTypedKeys).map<PossibleStringValue>((typedKey) => ({
value: typedKey.key,
description: typedKey.description,
}))
: relatedDescription
? extractStringEnum(relatedDescription)
: null,
? extractStringEnum(relatedDescription)
: null,
};
}
@ -340,21 +340,23 @@ export const rawTypeToTypeInformation = (
if (genericTypeMatch) {
const genericTypeString = genericTypeMatch.outerType;
const innerTypes = getTopLevelOrderedTypes(genericTypeMatch.genericType)
.map(t => rawTypeToTypeInformation(t.trim(), '', null))
.map(info => {
.map((t) => rawTypeToTypeInformation(t.trim(), '', null))
.map((info) => {
if (info.type === 'Object') {
return {
...info,
type: 'Object',
properties:
subTypedKeys && !subTypedKeys.consumed
? consumeTypedKeysList(subTypedKeys).map<PropertyDocumentationBlock>(typedKey => ({
name: typedKey.key,
description: typedKey.description,
required: typedKey.required,
additionalTags: typedKey.additionalTags,
...typedKey.type,
}))
? consumeTypedKeysList(subTypedKeys).map<PropertyDocumentationBlock>(
(typedKey) => ({
name: typedKey.key,
description: typedKey.description,
required: typedKey.required,
additionalTags: typedKey.additionalTags,
...typedKey.type,
}),
)
: [],
};
}
@ -393,7 +395,7 @@ export const rawTypeToTypeInformation = (
collection,
type: 'Event',
eventProperties: consumeTypedKeysList(subTypedKeys).map<PropertyDocumentationBlock>(
typedKey => ({
(typedKey) => ({
name: typedKey.key,
description: typedKey.description,
required: typedKey.required,
@ -417,12 +419,14 @@ export const rawTypeToTypeInformation = (
// If no param types are provided in the <A, B, C> syntax then we should fallback to the normal one
genericProvidedParams.length === 0
? subTypedKeys && !subTypedKeys.consumed
? consumeTypedKeysList(subTypedKeys).map<MethodParameterDocumentation>(typedKey => ({
name: typedKey.key,
description: typedKey.description,
required: typedKey.required,
...typedKey.type,
}))
? consumeTypedKeysList(subTypedKeys).map<MethodParameterDocumentation>(
(typedKey) => ({
name: typedKey.key,
description: typedKey.description,
required: typedKey.required,
...typedKey.type,
}),
)
: []
: (genericProvidedParams as MethodParameterDocumentation[]),
returns: innerTypes[innerTypes.length - 1],
@ -617,7 +621,7 @@ export const extractStringEnum = (description: string): PossibleStringValue[] |
return null;
}
return state.values.map(value => ({
return state.values.map((value) => ({
value,
description: '',
}));
@ -983,7 +987,7 @@ export const findProcess = (tokens: Token[]): ProcessBlock => {
renderer: false,
utility: false,
exported: !ptks.some(
ptk => ptk.type === 'text' && ptk.content.startsWith('This class is not exported'),
(ptk) => ptk.type === 'text' && ptk.content.startsWith('This class is not exported'),
),
};
for (const ptk of ptks) {

Просмотреть файл

@ -1,12 +1,17 @@
{
"compilerOptions": {
"module": "commonjs",
"target": "es6",
"module": "es2022",
"target": "es2022",
"moduleResolution": "Node",
"outDir": "dist",
"lib": [
"es6",
"es7"
],
"types": [
"node",
"jest"
],
"declaration": true,
"sourceMap": true,
"rootDir": "src",

2223
yarn.lock

Разница между файлами не показана из-за своего большого размера Загрузить разницу