taler-docs

Documentation for GNU Taler components, APIs and protocols
Log | Files | Refs | README | LICENSE

commit cdd751d8cfda63ecd850a7007396f4257cbfe1f5
parent 966d1e292f0fe5478c17519c25144b2c437a14a1
Author: Nullptrderef <nullptrderef@proton.me>
Date:   Sun, 26 May 2024 01:40:39 +0200

move to pkg, do import resolution

Diffstat:
Dcontrib/extract-types.mjs | 128-------------------------------------------------------------------------------
Acontrib/type-extractor/.gitignore | 2++
Acontrib/type-extractor/extract-types.mjs | 253+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Acontrib/type-extractor/package.json | 15+++++++++++++++
Acontrib/type-extractor/pnpm-lock.yaml | 24++++++++++++++++++++++++
5 files changed, 294 insertions(+), 128 deletions(-)

diff --git a/contrib/extract-types.mjs b/contrib/extract-types.mjs @@ -1,128 +0,0 @@ -import fsSync, { promises as fs } from "fs"; -import * as path from "path"; - -/** - * @param {string} file - */ -const runFileJob = async (file) => { - let workingFile = file; - const tsDefs = file.match(/[\t ]*\.\. ts\:def\:\: [a-zA-Z][a-zA-Z0-9_]*/g); - /** @type string[] */ - const defines = []; - /** @type string[] */ - const imports = []; - let dtsOutput = ""; - if (tsDefs) - for (const def of tsDefs) { - if (!def) { - console.warn("No matches in ", file); - break; - } - workingFile = workingFile.substring(workingFile.indexOf(def)); - let [defMatch, indentation, defName] = def.match( - /([\t ])*\.\. ts\:def\:\: ([a-zA-Z][a-zA-Z0-9_]*) *\n?/ - ); - - // Extract the ts def - indentation = indentation ?? ""; - workingFile = workingFile.substring(defMatch.length); - const workingFileLines = workingFile.split("\n"); - let tsMatch = ""; - while (workingFileLines[0]?.trim() === "") workingFileLines.shift(); - while ( - workingFileLines[0]?.trim() === "" || - (workingFileLines[0] && - new RegExp("^" + "[ \\t]".repeat(indentation.length + 2)).test( - workingFileLines[0] - )) - ) { - if (workingFileLines[0].length > indentation.length + 2) - workingFileLines[0] = workingFileLines[0].substring( - indentation.length + 2 - ); - tsMatch += workingFileLines.shift() + "\n"; - } - workingFile = workingFileLines.join("\n"); - - // Convert comments to JSDocs - tsMatch = tsMatch - .replace(/([ \t]*\/\/.*\n?)+/g, (match) => { - match = match - .split("\n") - .map((v) => v.replace(/[ \t]+\/\/ ?/, "").trim()) - .join("\n") - .trim(); - if (match.includes("\n")) - match = `/** -${match - .split("\n") - .map((v) => (v.trimStart().startsWith("//") ? v.replace("//", "") : v)) - .map((v) => " *" + (v.startsWith(" ") ? "" : " ") + v) - .join("\n") - .replace(/\*\//g, "*​/")} - */ -`; - else - match = `/** - * ${(match.trimStart().startsWith("//") ? match.replace("//", "") : match) - .trim() - .replace(/\*\//g, "*​/")} - */ -`; - return match; - }) - .trim(); - - defines.push(defName); - dtsOutput += tsMatch + "\n"; - } - - // Now, find the unknown imports - - return { - defines, - dtsOutput, - }; -}; - -(async () => { - const genDocsForDirs = ["core/"].map((v) => path.resolve(v)); - const genDocsForFiles = ( - await Promise.all( - genDocsForDirs.map(async (dir) => - (await fs.readdir(dir)).map((file) => path.join(dir, file)) - ) - ) - ).flat(); - const output = path.resolve( - process.env.TYPE_OUTPUT ?? process.env.TMP ?? process.env.TEMP ?? "/tmp", - "net.taler.docs.ts-extracted" - ); - const tsDocOutput = path.join(output, "dts"); - const zodOutput = path.join(output, "zod"); - - if (fsSync.existsSync(tsDocOutput)) - await fs.rm(tsDocOutput, { recursive: true }); - await fs.mkdir(tsDocOutput, { - recursive: true, - }); - if (fsSync.existsSync(zodOutput)) await fs.rm(zodOutput, { recursive: true }); - await fs.mkdir(zodOutput, { - recursive: true, - }); - const jobResults = await Promise.all( - genDocsForFiles.map(async (filepath) => ({ - source: filepath, - output: path.join( - tsDocOutput, - path.basename(filepath).replace(".rst", ".ts") - ), - result: await runFileJob(await fs.readFile(filepath, "utf-8")), - })) - ); - await Promise.all( - jobResults.map(async ({ output, result }) => { - await fs.writeFile(output, result.dtsOutput); - }) - ); -})(); diff --git a/contrib/type-extractor/.gitignore b/contrib/type-extractor/.gitignore @@ -0,0 +1 @@ +node_modules +\ No newline at end of file diff --git a/contrib/type-extractor/extract-types.mjs b/contrib/type-extractor/extract-types.mjs @@ -0,0 +1,253 @@ +import fsSync, { promises as fs } from "fs"; +import ts from "typescript"; +import * as path from "path"; + +const ignoredExports = []; + +/** + * @param {string} file + */ +const runFileJob = async (file) => { + // TODO: idk why this was async, im sure i had a reason + let workingFile = file; + const tsDefs = file.match(/[\t ]*\.\. ts\:def\:\: [a-zA-Z][a-zA-Z0-9_]*/g); + /** @type string[] */ + const defines = []; + /** @type string[] */ + const imports = []; + let dtsOutput = ""; + if (tsDefs) + for (const def of tsDefs) { + if (!def) { + console.warn("No matches in ", file); + break; + } + workingFile = workingFile.substring(workingFile.indexOf(def)); + let [defMatch, indentation, defName] = def.match( + /([\t ])*\.\. ts\:def\:\: ([a-zA-Z][a-zA-Z0-9_]*) *\n?/ + ); + + if (ignoredExports.includes(defName)) continue; + + // Extract the ts def + indentation = indentation ?? ""; + workingFile = workingFile.substring(defMatch.length); + const workingFileLines = workingFile.split("\n"); + let tsMatch = ""; + while (workingFileLines[0]?.trim() === "") workingFileLines.shift(); + while ( + (workingFileLines[0]?.trim() === "" || + (workingFileLines[0] && + new RegExp("^" + "[ \\t]".repeat(indentation.length + 2)).test( + workingFileLines[0] + ))) && + !workingFileLines[0]?.trim()?.startsWith(".. ts:def::") + ) { + if (workingFileLines[0].length > indentation.length + 2) + workingFileLines[0] = workingFileLines[0].substring( + indentation.length + 2 + ); + tsMatch += workingFileLines.shift() + "\n"; + } + workingFile = workingFileLines.join("\n"); + + // Convert comments to JSDocs + tsMatch = tsMatch + .replace(/([ \t]*\/\/.*\n?)+/g, (match) => { + match = match + .split("\n") + .map((v) => v.replace(/[ \t]+\/\/ ?/, "").trim()) + .join("\n") + .trim(); + if (match.includes("\n")) + match = `/** +${match + .split("\n") + .map((v) => (v.trimStart().startsWith("//") ? v.replace("//", "") : v)) + .map((v) => " *" + (v.startsWith(" ") ? "" : " ") + v) + .join("\n") + .replace(/\*\//g, "*​/")} + */ +`; + else + match = `/** + * ${(match.trimStart().startsWith("//") ? match.replace("//", "") : match) + .trim() + .replace(/\*\//g, "*​/")} + */ +`; + return match; + }) + .trim(); + + defines.push(defName); + dtsOutput += tsMatch + "\n"; + } + + if (defines.length === 0) return null; // nothing to give back, just exit + + // Now, find the unknown imports + + dtsOutput += ` +export { ${defines.join(", ")} }; +`; + + // Job is done, return + return { + defines, + dtsOutput, + }; +}; + +(async () => { + const genDocsForDirs = ["core/"].map((v) => path.resolve(process.argv[2], v)); + const genDocsForFiles = ( + await Promise.all( + genDocsForDirs.map(async (dir) => + (await fs.readdir(dir)).map((file) => path.join(dir, file)) + ) + ) + ).flat(); + const output = path.resolve( + process.env.TYPE_OUTPUT ?? process.env.TMP ?? process.env.TEMP ?? "/tmp", + "net.taler.docs.ts-extracted" + ); + const tsDocOutput = path.join(output, "dts"); + const zodOutput = path.join(output, "zod"); + + if (fsSync.existsSync(tsDocOutput)) + await fs.rm(tsDocOutput, { recursive: true }); + await fs.mkdir(tsDocOutput, { + recursive: true, + }); + if (fsSync.existsSync(zodOutput)) await fs.rm(zodOutput, { recursive: true }); + await fs.mkdir(zodOutput, { + recursive: true, + }); + const jobResults = ( + await Promise.all( + genDocsForFiles.map(async (filepath) => ({ + source: filepath, + output: path.join( + tsDocOutput, + path.basename(filepath).replace(".rst", ".ts") + ), + result: await runFileJob(await fs.readFile(filepath, "utf-8")), + })) + ) + ).filter((v) => v.result !== null); + // Polyfilling!!! + jobResults.push({ + source: "/tmp/net.taler.docs.extracted/_forced_polyfill", + output: path.join(tsDocOutput, "post-polyfill.ts"), + // This polyfill overwrites any object defined elsewhere + result: await runFileJob(` +.. ts:def:: Integer + // An integer value. + // @integer + type Integer = number; +`), + }); + jobResults.unshift({ + source: "/tmp/net.taler.docs.extracted/_polyfill", + output: path.join(tsDocOutput, "polyfill.ts"), + // This polyfill can be overwritten by the actual docs; it's contents will be outputted but ignored by the import resolver if overwritten + result: await runFileJob(` +.. ts:def:: PaytoHash + // A Binary Object + type PaytoHash = string; +.. ts:def:: AgeCommitmentHash + // A Binary Object + type AgeCommitmentHash = string; +.. ts:def:: TALER_RefreshCommitmentP + // A Binary Object + type TALER_RefreshCommitmentP = string; +.. ts:def:: WireTransferIdentifierRawP + // A Binary Object + type WireTransferIdentifierRawP = string; +.. ts:def:: Base32 + // Binary data is generally encoded using Crockford’s variant of Base32 (https://www.crockford.com/wrmg/base32.html), except that “U” is not excluded but also decodes to “V” to make OCR easy. We will still simply use the JSON type “base32” and the term “Crockford Base32” in the text to refer to the resulting encoding. + type Base32 = string; +.. ts:def:: ExtensionManifest + // Mostly undocumented object; see {@link https://docs.taler.net/design-documents/006-extensions.html#extensionmanifest-object} for what it likely is? + interface ExtensionManifest { + // The criticality of the extension MUST be provided. It has the same + // semantics as "critical" has for extensions in X.509: + // - if "true", the client must "understand" the extension before + // proceeding, + // - if "false", clients can safely skip extensions they do not + // understand. + // (see https://datatracker.ietf.org/doc/html/rfc5280#section-4.2) + critical: boolean; + + // The version information MUST be provided in Taler's protocol version + // ranges notation, see + // https://docs.taler.net/core/api-common.html#protocol-version-ranges + version: LibtoolVersion; + + // Optional configuration object, defined by the feature itself + config?: object; + } +.. ts:def:: WadId + // https://docs.taler.net/core/api-common.html#wadid + type WadId = string; +.. ts:def:: ContractChoice + // Untyped in documentation https://docs.taler.net/design-documents/046-mumimo-contracts.html#tsref-type-ContractChoice + type ContractChoice = any; +`), + }); + // Resolve Inputs + /** @type {Record<string,string>} */ + const exportsByFile = {}; + jobResults.forEach((result) => { + // these are processed intentionally in-order; the last items in jobResults will take priority over the first; polyfill will always take peak priority + result.result.defines.forEach( + (define) => (exportsByFile[define] = result.output) + ); + }); + await Promise.all( + jobResults.map((result) => { + // now that the table is populated, lets resolve imports + const src = result.result.dtsOutput; + + /** @type {string[]} */ + const toBeImported = []; + + const sourceFile = ts.createSourceFile(path.basename(result.output), src); + + /** + * @param {ts.Node} node + */ + const astWalker = (node) => { + if (node.kind === ts.SyntaxKind.TypeReference) { + /** @type {ts.TypeReferenceNode} */ + const typeRefNode = node; + const identifier = typeRefNode.typeName.escapedText; + if (!result.result.defines.includes(identifier)) + toBeImported.push(identifier); + } + ts.forEachChild(node, astWalker); + }; + astWalker(sourceFile); + result.result.dtsOutput = `${toBeImported + .filter((v, i, a) => a.indexOf(v) === i) + .map((v) => { + if (exportsByFile[v]) + return `import { ${v} } from ${JSON.stringify( + "./" + path.basename(exportsByFile[v]) + )}`; + console.warn("Could not find reference to", v); + return "// WARN: UNKNOWN REF: " + JSON.stringify(v); + }) + .join("\n")} +${result.result.dtsOutput}`; + }) + ); + // Write outputs + await Promise.all( + jobResults.map(async ({ output, result }) => { + await fs.writeFile(output, result.dtsOutput); + }) + ); + // TODO: call tsc on all our stuff, ensure it validates +})(); diff --git a/contrib/type-extractor/package.json b/contrib/type-extractor/package.json @@ -0,0 +1,15 @@ +{ + "name": "type-extractor", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "typescript": "^5.4.5" + } +} diff --git a/contrib/type-extractor/pnpm-lock.yaml b/contrib/type-extractor/pnpm-lock.yaml @@ -0,0 +1,24 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + typescript: + specifier: ^5.4.5 + version: 5.4.5 + +packages: + + typescript@5.4.5: + resolution: {integrity: sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==} + engines: {node: '>=14.17'} + hasBin: true + +snapshots: + + typescript@5.4.5: {}