commit db2a715058613ceaf8beccb3b7f28cb246bb3790
parent a05b0a92733ba52df65d86c27663c34b1174d10c
Author: Nullptrderef <nullptrderef@proton.me>
Date: Sat, 29 Jun 2024 18:09:13 +0200
feat(wip): typesafety
Diffstat:
6 files changed, 422 insertions(+), 280 deletions(-)
diff --git a/.prettierrc b/.prettierrc
@@ -0,0 +1,6 @@
+{
+ "trailingComma": "all",
+ "tabWidth": 2,
+ "semi": true,
+ "singleQuote": true
+}
diff --git a/contrib/type-extractor/extract-types.mjs b/contrib/type-extractor/extract-types.mjs
@@ -1,280 +0,0 @@
-// Usage: $0 <path to documentation root>
-
-import fsSync, { promises as fs } from "fs";
-import ts from "typescript";
-import * as path from "path";
-
-const ignoredExports = ["PublishedAgeRestrictionBaseKey"];
-
-/**
- * @param {string} file
- */
-const runFileJob = async (file) => {
- // TODO: idk why this was async, im sure i had a reason
- let workingFile = file;
- const tsDefs = file.match(/[\t ]*\.\. ts\:def\:\: [a-zA-Z][a-zA-Z0-9_]*/g);
- /** @type string[] */
- const defines = [];
- let dtsOutput = "";
- if (tsDefs)
- for (const def of tsDefs) {
- if (!def) {
- console.warn("No matches in ", file);
- break;
- }
- workingFile = workingFile.substring(workingFile.indexOf(def));
- let [defMatch, indentation, defName] = def.match(
- /([\t ])*\.\. ts\:def\:\: ([a-zA-Z][a-zA-Z0-9_]*) *\n?/
- );
-
- if (ignoredExports.includes(defName)) continue;
-
- // Extract the ts def
- indentation = indentation ?? "";
- workingFile = workingFile.substring(defMatch.length);
- const workingFileLines = workingFile.split("\n");
- let tsMatch = "";
- while (workingFileLines[0]?.trim() === "") workingFileLines.shift();
- while (
- (workingFileLines[0]?.trim() === "" ||
- (workingFileLines[0] &&
- new RegExp("^" + "[ \\t]".repeat(indentation.length + 2)).test(
- workingFileLines[0]
- ))) &&
- !workingFileLines[0]?.trim()?.startsWith(".. ts:def::")
- ) {
- if (workingFileLines[0].length > indentation.length + 2)
- workingFileLines[0] = workingFileLines[0].substring(
- indentation.length + 2
- );
- tsMatch += workingFileLines.shift() + "\n";
- }
- workingFile = workingFileLines.join("\n");
-
- // Convert comments to JSDocs
- tsMatch = tsMatch
- .replace(/([ \t]*\/\/.*\n?)+/g, (match) => {
- match = match
- .split("\n")
- .map((v) => v.replace(/[ \t]+\/\/ ?/, "").trim())
- .join("\n")
- .trim();
- if (match.includes("\n"))
- match = `/**
-${match
- .split("\n")
- .map((v) => (v.trimStart().startsWith("//") ? v.replace("//", "") : v))
- .map((v) => " *" + (v.startsWith(" ") ? "" : " ") + v)
- .join("\n")
- .replace(/\*\//g, "*/")}
- */
-`;
- else
- match = `/**
- * ${(match.trimStart().startsWith("//") ? match.replace("//", "") : match)
- .trim()
- .replace(/\*\//g, "*/")}
- */
-`;
- return match;
- })
- .trim();
-
- defines.push(defName);
- dtsOutput += tsMatch + "\n";
- }
-
- if (defines.length === 0) return null; // nothing to give back, just exit
-
- // Now, find the unknown imports
-
- dtsOutput += `
-export { ${defines.join(", ")} };
-`;
-
- // Job is done, return
- return {
- defines,
- dtsOutput,
- };
-};
-
-(async () => {
- const genDocsForDirs = ["core/"].map((v) => path.resolve(process.argv[2], v));
- const genDocsForFiles = (
- await Promise.all(
- genDocsForDirs.map(async (dir) =>
- (await fs.readdir(dir)).map((file) => path.join(dir, file))
- )
- )
- ).flat();
- const output = path.resolve(
- process.env.TYPE_OUTPUT ?? process.env.TMP ?? process.env.TEMP ?? "/tmp",
- "net.taler.docs.ts-extracted"
- );
- const tsDocOutput = path.join(output, "dts");
- // const zodOutput = path.join(output, "zod"); // TODO: this would be cool to have in future
-
- if (fsSync.existsSync(tsDocOutput))
- await fs.rm(tsDocOutput, { recursive: true });
- await fs.mkdir(tsDocOutput, {
- recursive: true,
- });
- // if (fsSync.existsSync(zodOutput)) await fs.rm(zodOutput, { recursive: true });
- // await fs.mkdir(zodOutput, {
- // recursive: true,
- // });
- const jobResults = (
- await Promise.all(
- genDocsForFiles.map(async (filepath) => ({
- source: filepath,
- output: path.join(
- tsDocOutput,
- path.basename(filepath).replace(".rst", ".ts")
- ),
- result: await runFileJob(await fs.readFile(filepath, "utf-8")),
- }))
- )
- ).filter((v) => v.result !== null);
- // Polyfilling!!!
- // TODO: Extract these to standalone .rst files!
- jobResults.push({
- source: "/tmp/net.taler.docs.extracted/_forced_polyfill",
- output: path.join(tsDocOutput, "post-polyfill.ts"),
- // This polyfill overwrites any object defined elsewhere
- result: await runFileJob(`
-.. ts:def:: Integer
- // An integer value.
- // @integer
- type Integer = number;
-`),
- });
- jobResults.unshift({
- source: "/tmp/net.taler.docs.extracted/_polyfill",
- output: path.join(tsDocOutput, "polyfill.ts"),
- // This polyfill can be overwritten by the actual docs; it's contents will be outputted but ignored by the import resolver if overwritten
- result: await runFileJob(`
-.. ts:def:: PaytoHash
- // A Binary Object
- type PaytoHash = string;
-.. ts:def:: AgeCommitmentHash
- // A Binary Object
- type AgeCommitmentHash = string;
-.. ts:def:: TALER_RefreshCommitmentP
- // A Binary Object
- type TALER_RefreshCommitmentP = string;
-.. ts:def:: WireTransferIdentifierRawP
- // A Binary Object
- type WireTransferIdentifierRawP = string;
-.. ts:def:: Base32
- // Binary data is generally encoded using Crockford’s variant of Base32 (https://www.crockford.com/wrmg/base32.html), except that “U” is not excluded but also decodes to “V” to make OCR easy. We will still simply use the JSON type “base32” and the term “Crockford Base32” in the text to refer to the resulting encoding.
- type Base32 = string;
-.. ts:def:: ExtensionManifest
- // Mostly undocumented object; see {@link https://docs.taler.net/design-documents/006-extensions.html#extensionmanifest-object} for what it likely is?
- interface ExtensionManifest {
- // The criticality of the extension MUST be provided. It has the same
- // semantics as "critical" has for extensions in X.509:
- // - if "true", the client must "understand" the extension before
- // proceeding,
- // - if "false", clients can safely skip extensions they do not
- // understand.
- // (see https://datatracker.ietf.org/doc/html/rfc5280#section-4.2)
- critical: boolean;
-
- // The version information MUST be provided in Taler's protocol version
- // ranges notation, see
- // https://docs.taler.net/core/api-common.html#protocol-version-ranges
- version: LibtoolVersion;
-
- // Optional configuration object, defined by the feature itself
- config?: object;
- }
-.. ts:def:: WadId
- // https://docs.taler.net/core/api-common.html#wadid
- type WadId = string;
-.. ts:def:: ContractChoice
- // Untyped in documentation https://docs.taler.net/design-documents/046-mumimo-contracts.html#tsref-type-ContractChoice
- type ContractChoice = any;
-`),
- });
- // Resolve Inputs
- /** @type {Record<string,string>} */
- const fileByExport = {};
- jobResults.forEach((result) => {
- // these are processed intentionally in-order; the last items in jobResults will take priority over the first; polyfill will always take peak priority
- result.result.defines.forEach(
- (define) => (fileByExport[define] = result.output)
- );
- });
- await Promise.all(
- jobResults.map((result) => {
- // now that the table is populated, lets resolve imports
- const src = result.result.dtsOutput;
-
- /** @type {string[]} */
- const toBeImported = [];
-
- const sourceFile = ts.createSourceFile(path.basename(result.output), src);
-
- /**
- * @param {ts.Node} node
- */
- const astWalker = (node) => {
- if (node.kind === ts.SyntaxKind.TypeReference) {
- /** @type {ts.TypeReferenceNode} */
- const typeRefNode = node;
- const identifier = typeRefNode.typeName.escapedText;
- if (!result.result.defines.includes(identifier))
- toBeImported.push(identifier);
- }
- ts.forEachChild(node, astWalker);
- };
- astWalker(sourceFile);
- result.result.dtsOutput = `${toBeImported
- .filter((v, i, a) => a.indexOf(v) === i)
- .map((v) => {
- if (fileByExport[v])
- return `import { ${v} } from ${JSON.stringify(
- "./" + path.basename(fileByExport[v])
- )}`;
- else if (["String", "Boolean"].includes(v))
- console.warn(
- `In file ${
- result.source
- }: Please use ${v.toLocaleLowerCase()} instead of ${v}`
- );
- console.warn("Could not find reference to", v);
- return "// WARN: UNKNOWN REF: " + JSON.stringify(v);
- })
- .join("\n")}
-${result.result.dtsOutput}`;
- })
- );
- // Write outputs
- await Promise.all(
- jobResults.map(async ({ output, result }) => {
- await fs.writeFile(output, result.dtsOutput);
- })
- );
- // Write the index.ts file
- /** @type {Record<string,string[]>} */
- const exportsByFile = {};
- for (const [exported, file] of Object.entries(fileByExport)) {
- exportsByFile[file] = exportsByFile[file] ?? [];
- exportsByFile[file].push(exported);
- }
- await fs.writeFile(
- path.join(tsDocOutput, "main.ts"),
- Object.entries(exportsByFile)
- .map(
- ([file, exports]) =>
- // We could use export * from, but then we'd get class conflicts if 2 separate files declare the same type - including if our polyfill overwrites or gets overwritten
- `export { ${exports.join(", ")} } from ${JSON.stringify(
- "./" + path.basename(file) // TODO: use path.relative
- )};`
- )
- .join("")
- );
-
- // TODO: call tsc on all our stuff, ensure it validates
-})();
diff --git a/contrib/type-extractor/package.json b/contrib/type-extractor/package.json
@@ -11,5 +11,8 @@
"license": "ISC",
"dependencies": {
"typescript": "^5.4.5"
+ },
+ "devDependencies": {
+ "@types/node": "^20.14.9"
}
}
diff --git a/contrib/type-extractor/pnpm-lock.yaml b/contrib/type-extractor/pnpm-lock.yaml
@@ -11,14 +11,30 @@ importers:
typescript:
specifier: ^5.4.5
version: 5.4.5
+ devDependencies:
+ '@types/node':
+ specifier: ^20.14.9
+ version: 20.14.9
packages:
+ '@types/node@20.14.9':
+ resolution: {integrity: sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==}
+
typescript@5.4.5:
resolution: {integrity: sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==}
engines: {node: '>=14.17'}
hasBin: true
+ undici-types@5.26.5:
+ resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
+
snapshots:
+ '@types/node@20.14.9':
+ dependencies:
+ undici-types: 5.26.5
+
typescript@5.4.5: {}
+
+ undici-types@5.26.5: {}
diff --git a/contrib/type-extractor/src/extract-types.ts b/contrib/type-extractor/src/extract-types.ts
@@ -0,0 +1,288 @@
+// Usage: $0 <path to documentation root>
+
+import fsSync, { promises as fs } from 'fs';
+import ts, { ScriptTarget } from 'typescript';
+import * as path from 'path';
+
+const ignoredExports = ['PublishedAgeRestrictionBaseKey'];
+
+/**
+ * @param {string} file
+ */
+const runFileJob = (file: string) => {
+ // TODO: idk why this was async, im sure i had a reason
+ let workingFile = file;
+ const tsDefs = file.match(/[\t ]*\.\. ts\:def\:\: [a-zA-Z][a-zA-Z0-9_]*/g);
+ const defines: string[] = [];
+ let dtsOutput = '';
+ if (tsDefs)
+ for (const def of tsDefs) {
+ if (!def) {
+ console.warn('No matches in ', file);
+ break;
+ }
+ workingFile = workingFile.substring(workingFile.indexOf(def));
+ let [defMatch, indentation, defName] =
+ def.match(/([\t ])*\.\. ts\:def\:\: ([a-zA-Z][a-zA-Z0-9_]*) *\n?/) ??
+ [];
+
+ if (
+ !defMatch ||
+ !indentation ||
+ !defName ||
+ ignoredExports.includes(defName)
+ )
+ continue;
+
+ // Extract the ts def
+ indentation = indentation ?? '';
+ workingFile = workingFile.substring(defMatch.length);
+ const workingFileLines = workingFile.split('\n');
+ let tsMatch = '';
+ while (workingFileLines[0]?.trim() === '') workingFileLines.shift();
+ while (
+ (workingFileLines[0]?.trim() === '' ||
+ (workingFileLines[0] &&
+ new RegExp('^' + '[ \\t]'.repeat(indentation.length + 2)).test(
+ workingFileLines[0],
+ ))) &&
+ !workingFileLines[0]?.trim()?.startsWith('.. ts:def::')
+ ) {
+ if (workingFileLines[0].length > indentation.length + 2)
+ workingFileLines[0] = workingFileLines[0].substring(
+ indentation.length + 2,
+ );
+ tsMatch += workingFileLines.shift() + '\n';
+ }
+ workingFile = workingFileLines.join('\n');
+
+ // Convert comments to JSDocs
+ tsMatch = tsMatch
+ .replace(/([ \t]*\/\/.*\n?)+/g, (match) => {
+ match = match
+ .split('\n')
+ .map((v) => v.replace(/[ \t]+\/\/ ?/, '').trim())
+ .join('\n')
+ .trim();
+ if (match.includes('\n'))
+ match = `/**
+${match
+ .split('\n')
+ .map((v) => (v.trimStart().startsWith('//') ? v.replace('//', '') : v))
+ .map((v) => ' *' + (v.startsWith(' ') ? '' : ' ') + v)
+ .join('\n')
+ .replace(/\*\//g, '*/')}
+ */
+`;
+ else
+ match = `/**
+ * ${(match.trimStart().startsWith('//') ? match.replace('//', '') : match)
+ .trim()
+ .replace(/\*\//g, '*/')}
+ */
+`;
+ return match;
+ })
+ .trim();
+
+ defines.push(defName);
+ dtsOutput += tsMatch + '\n';
+ }
+
+ if (defines.length === 0) return null; // nothing to give back, just exit
+
+ // Now, find the unknown imports
+
+ dtsOutput += `
+export { ${defines.join(', ')} };
+`;
+
+ // Job is done, return
+ return {
+ defines,
+ dtsOutput,
+ };
+};
+
+(async () => {
+ const genDocsForDirs = ['core/'].map((v) => path.resolve(process.argv[2], v));
+ const genDocsForFiles = (
+ await Promise.all(
+ genDocsForDirs.map(async (dir) =>
+ (await fs.readdir(dir)).map((file) => path.join(dir, file)),
+ ),
+ )
+ ).flat();
+ const output = path.resolve(
+ process.env.TYPE_OUTPUT ?? process.env.TMP ?? process.env.TEMP ?? '/tmp',
+ 'net.taler.docs.ts-extracted',
+ );
+ const tsDocOutput = path.join(output, 'dts');
+ // const zodOutput = path.join(output, "zod"); // TODO: this would be cool to have in future
+
+ if (fsSync.existsSync(tsDocOutput))
+ await fs.rm(tsDocOutput, { recursive: true });
+ await fs.mkdir(tsDocOutput, {
+ recursive: true,
+ });
+ // if (fsSync.existsSync(zodOutput)) await fs.rm(zodOutput, { recursive: true });
+ // await fs.mkdir(zodOutput, {
+ // recursive: true,
+ // });
+ const jobResults = (
+ await Promise.all(
+ genDocsForFiles.map(async (filepath) => ({
+ source: filepath,
+ output: path.join(
+ tsDocOutput,
+ path.basename(filepath).replace('.rst', '.ts'),
+ ),
+ result: runFileJob(await fs.readFile(filepath, 'utf-8'))!,
+ })),
+ )
+ ).filter((v) => v.result !== null);
+ // Polyfilling!!!
+ // TODO: Extract these to standalone .rst files!
+ jobResults.push({
+ source: '/tmp/net.taler.docs.extracted/_forced_polyfill',
+ output: path.join(tsDocOutput, 'post-polyfill.ts'),
+ // This polyfill overwrites any object defined elsewhere
+ result: runFileJob(`
+.. ts:def:: Integer
+ // An integer value.
+ // @integer
+ type Integer = number;
+`)!,
+ });
+ jobResults.unshift({
+ source: '/tmp/net.taler.docs.extracted/_polyfill',
+ output: path.join(tsDocOutput, 'polyfill.ts'),
+ // This polyfill can be overwritten by the actual docs; it's contents will be outputted but ignored by the import resolver if overwritten
+ result: runFileJob(`
+.. ts:def:: PaytoHash
+ // A Binary Object
+ type PaytoHash = string;
+.. ts:def:: AgeCommitmentHash
+ // A Binary Object
+ type AgeCommitmentHash = string;
+.. ts:def:: TALER_RefreshCommitmentP
+ // A Binary Object
+ type TALER_RefreshCommitmentP = string;
+.. ts:def:: WireTransferIdentifierRawP
+ // A Binary Object
+ type WireTransferIdentifierRawP = string;
+.. ts:def:: Base32
+ // Binary data is generally encoded using Crockford’s variant of Base32 (https://www.crockford.com/wrmg/base32.html), except that “U” is not excluded but also decodes to “V” to make OCR easy. We will still simply use the JSON type “base32” and the term “Crockford Base32” in the text to refer to the resulting encoding.
+ type Base32 = string;
+.. ts:def:: ExtensionManifest
+ // Mostly undocumented object; see {@link https://docs.taler.net/design-documents/006-extensions.html#extensionmanifest-object} for what it likely is?
+ interface ExtensionManifest {
+ // The criticality of the extension MUST be provided. It has the same
+ // semantics as "critical" has for extensions in X.509:
+ // - if "true", the client must "understand" the extension before
+ // proceeding,
+ // - if "false", clients can safely skip extensions they do not
+ // understand.
+ // (see https://datatracker.ietf.org/doc/html/rfc5280#section-4.2)
+ critical: boolean;
+
+ // The version information MUST be provided in Taler's protocol version
+ // ranges notation, see
+ // https://docs.taler.net/core/api-common.html#protocol-version-ranges
+ version: LibtoolVersion;
+
+ // Optional configuration object, defined by the feature itself
+ config?: object;
+ }
+.. ts:def:: WadId
+ // https://docs.taler.net/core/api-common.html#wadid
+ type WadId = string;
+.. ts:def:: ContractChoice
+ // Untyped in documentation https://docs.taler.net/design-documents/046-mumimo-contracts.html#tsref-type-ContractChoice
+ type ContractChoice = any;
+`)!,
+ });
+ // Resolve Inputs
+ const fileByExport: Record<string, string> = {};
+ jobResults.forEach((result) => {
+ // these are processed intentionally in-order; the last items in jobResults will take priority over the first; polyfill will always take peak priority
+ result.result.defines.forEach(
+ (define) => (fileByExport[define] = result.output),
+ );
+ });
+ await Promise.all(
+ jobResults.map((result) => {
+ // now that the table is populated, lets resolve imports
+ const src = result.result.dtsOutput;
+
+ const toBeImported: Array<string> = [];
+
+ const sourceFile = ts.createSourceFile(
+ path.basename(result.output),
+ src,
+ {
+ languageVersion: ScriptTarget.ESNext,
+ },
+ );
+
+ const astWalker = (node: ts.Node) => {
+ if (node.kind === ts.SyntaxKind.TypeReference) {
+ const typeRefNode = node as ts.TypeReferenceNode;
+ const { typeName } = typeRefNode;
+ const identifier =
+ 'escapedText' in typeName
+ ? typeName.escapedText
+ : typeName.getText();
+ if (!result.result.defines.includes(`${identifier}`))
+ toBeImported.push(`${identifier}`);
+ }
+ ts.forEachChild(node, astWalker);
+ };
+ astWalker(sourceFile);
+ result.result.dtsOutput = `${toBeImported
+ .filter((v, i, a) => a.indexOf(v) === i)
+ .map((v) => {
+ if (fileByExport[v])
+ return `import { ${v} } from ${JSON.stringify(
+ './' + path.basename(fileByExport[v]),
+ )}`;
+ else if (['String', 'Boolean'].includes(v))
+ console.warn(
+ `In file ${
+ result.source
+ }: Please use ${v.toLocaleLowerCase()} instead of ${v}`,
+ );
+ console.warn('Could not find reference to', v);
+ return '// WARN: UNKNOWN REF: ' + JSON.stringify(v);
+ })
+ .join('\n')}
+${result.result.dtsOutput}`;
+ }),
+ );
+ // Write outputs
+ await Promise.all(
+ jobResults.map(async ({ output, result }) => {
+ await fs.writeFile(output, result.dtsOutput);
+ }),
+ );
+ // Write the index.ts file
+ const exportsByFile: Record<string, string[]> = {};
+ for (const [exported, file] of Object.entries(fileByExport)) {
+ exportsByFile[file] = exportsByFile[file] ?? [];
+ exportsByFile[file].push(exported);
+ }
+ await fs.writeFile(
+ path.join(tsDocOutput, 'main.ts'),
+ Object.entries(exportsByFile)
+ .map(
+ ([file, exports]) =>
+ // We could use export * from, but then we'd get class conflicts if 2 separate files declare the same type - including if our polyfill overwrites or gets overwritten
+ `export { ${exports.join(', ')} } from ${JSON.stringify(
+ './' + path.basename(file), // TODO: use path.relative
+ )};`,
+ )
+ .join(''),
+ );
+
+ // TODO: call tsc on all our stuff, ensure it validates
+})();
diff --git a/contrib/type-extractor/tsconfig.json b/contrib/type-extractor/tsconfig.json
@@ -0,0 +1,109 @@
+{
+ "compilerOptions": {
+ /* Visit https://aka.ms/tsconfig to read more about this file */
+
+ /* Projects */
+ // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
+ // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
+ // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
+ // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
+ // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
+ // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
+
+ /* Language and Environment */
+ "target": "ESNext" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
+ // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
+ // "jsx": "preserve", /* Specify what JSX code is generated. */
+ // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */
+ // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
+ // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
+ // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
+ // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
+ // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
+ // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
+ // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
+ // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
+
+ /* Modules */
+ "module": "NodeNext" /* Specify what module code is generated. */,
+ // "rootDir": "./", /* Specify the root folder within your source files. */
+ "moduleResolution": "NodeNext" /* Specify how TypeScript looks up a file from a given module specifier. */,
+ // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
+ // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
+ // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
+ // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
+ // "types": [], /* Specify type package names to be included without being referenced in a source file. */
+ // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
+ // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
+ // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */
+ // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
+ // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
+ // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
+ // "resolveJsonModule": true, /* Enable importing .json files. */
+ // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
+ // "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
+
+ /* JavaScript Support */
+ // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
+ // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
+ // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
+
+ /* Emit */
+ // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
+ // "declarationMap": true, /* Create sourcemaps for d.ts files. */
+ // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
+ // "sourceMap": true, /* Create source map files for emitted JavaScript files. */
+ // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
+ // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
+ // "outDir": "./", /* Specify an output folder for all emitted files. */
+ // "removeComments": true, /* Disable emitting comments. */
+ // "noEmit": true, /* Disable emitting files from a compilation. */
+ // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
+ // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */
+ // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
+ // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
+ // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
+ // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
+ // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
+ // "newLine": "crlf", /* Set the newline character for emitting files. */
+ // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
+ // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
+ // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
+ // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
+ // "declarationDir": "./", /* Specify the output directory for generated declaration files. */
+ // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
+
+ /* Interop Constraints */
+ // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
+ // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */
+ // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
+ "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */,
+ // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
+ "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */,
+
+ /* Type Checking */
+ "strict": true /* Enable all strict type-checking options. */,
+ // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
+ // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
+ // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
+ // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
+ // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
+ // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
+ // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
+ // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
+ // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
+ // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
+ // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
+ // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
+ // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
+ // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
+ // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
+ // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
+ // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
+ // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
+
+ /* Completeness */
+ // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
+ "skipLibCheck": true /* Skip type checking all .d.ts files. */
+ }
+}