taler-docs

Documentation for GNU Taler components, APIs and protocols
Log | Files | Refs | README | LICENSE

commit e12f1ea1358a2330717c58784647ba43c5855e8a
parent db2a715058613ceaf8beccb3b7f28cb246bb3790
Author: Nullptrderef <nullptrderef@proton.me>
Date:   Sat, 29 Jun 2024 18:16:46 +0200

chore: a

Diffstat:
Acontrib/type-extractor/dist/main.mjs | 216+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Mcontrib/type-extractor/package.json | 5+++--
Mcontrib/type-extractor/pnpm-lock.yaml | 241+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Dcontrib/type-extractor/src/extract-types.ts | 288-------------------------------------------------------------------------------
Acontrib/type-extractor/src/main.ts | 288+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
5 files changed, 748 insertions(+), 290 deletions(-)

diff --git a/contrib/type-extractor/dist/main.mjs b/contrib/type-extractor/dist/main.mjs @@ -0,0 +1,216 @@ +// src/main.ts +import fsSync, { promises as fs } from "fs"; +import ts from "typescript"; +import * as path from "path"; +var ignoredExports = ["PublishedAgeRestrictionBaseKey"]; +var runFileJob = (file) => { + let workingFile = file; + const tsDefs = file.match(/[\t ]*\.\. ts\:def\:\: [a-zA-Z][a-zA-Z0-9_]*/g); + const defines = []; + let dtsOutput = ""; + if (tsDefs) + for (const def of tsDefs) { + if (!def) { + console.warn("No matches in ", file); + break; + } + workingFile = workingFile.substring(workingFile.indexOf(def)); + let [defMatch, indentation, defName] = def.match(/([\t ])*\.\. ts\:def\:\: ([a-zA-Z][a-zA-Z0-9_]*) *\n?/) ?? []; + if (!defMatch || !indentation || !defName || ignoredExports.includes(defName)) + continue; + indentation = indentation ?? ""; + workingFile = workingFile.substring(defMatch.length); + const workingFileLines = workingFile.split("\n"); + let tsMatch = ""; + while (workingFileLines[0]?.trim() === "") workingFileLines.shift(); + while ((workingFileLines[0]?.trim() === "" || workingFileLines[0] && new RegExp("^" + "[ \\t]".repeat(indentation.length + 2)).test( + workingFileLines[0] + )) && !workingFileLines[0]?.trim()?.startsWith(".. ts:def::")) { + if (workingFileLines[0].length > indentation.length + 2) + workingFileLines[0] = workingFileLines[0].substring( + indentation.length + 2 + ); + tsMatch += workingFileLines.shift() + "\n"; + } + workingFile = workingFileLines.join("\n"); + tsMatch = tsMatch.replace(/([ \t]*\/\/.*\n?)+/g, (match) => { + match = match.split("\n").map((v) => v.replace(/[ \t]+\/\/ ?/, "").trim()).join("\n").trim(); + if (match.includes("\n")) + match = `/** +${match.split("\n").map((v) => v.trimStart().startsWith("//") ? v.replace("//", "") : v).map((v) => " *" + (v.startsWith(" ") ? "" : " ") + v).join("\n").replace(/\*\//g, "*\u200B/")} + */ +`; + else + match = `/** + * ${(match.trimStart().startsWith("//") ? match.replace("//", "") : match).trim().replace(/\*\//g, "*\u200B/")} + */ +`; + return match; + }).trim(); + defines.push(defName); + dtsOutput += tsMatch + "\n"; + } + if (defines.length === 0) { + console.warn(file, new Error("No Defines")); + return null; + } + dtsOutput += ` +export { ${defines.join(", ")} }; +`; + return { + defines, + dtsOutput + }; +}; +(async () => { + const genDocsForDirs = ["core/"].map((v) => path.resolve(process.argv[2], v)); + const genDocsForFiles = (await Promise.all( + genDocsForDirs.map( + async (dir) => (await fs.readdir(dir)).map((file) => path.join(dir, file)) + ) + )).flat(); + const output = path.resolve( + process.env.TYPE_OUTPUT ?? process.env.TMP ?? process.env.TEMP ?? "/tmp", + "net.taler.docs.ts-extracted" + ); + const tsDocOutput = path.join(output, "dts"); + if (fsSync.existsSync(tsDocOutput)) + await fs.rm(tsDocOutput, { recursive: true }); + await fs.mkdir(tsDocOutput, { + recursive: true + }); + const jobResults = (await Promise.all( + genDocsForFiles.map(async (filepath) => ({ + source: filepath, + output: path.join( + tsDocOutput, + path.basename(filepath).replace(".rst", ".ts") + ), + result: runFileJob(await fs.readFile(filepath, "utf-8")) + })) + )).filter((v) => v.result !== null); + jobResults.push({ + source: "/tmp/net.taler.docs.extracted/_forced_polyfill", + output: path.join(tsDocOutput, "post-polyfill.ts"), + // This polyfill overwrites any object defined elsewhere + result: runFileJob(` +.. ts:def:: Integer + // An integer value. + // @integer + type Integer = number; +`) + }); + jobResults.unshift({ + source: "/tmp/net.taler.docs.extracted/_polyfill", + output: path.join(tsDocOutput, "polyfill.ts"), + // This polyfill can be overwritten by the actual docs; it's contents will be outputted but ignored by the import resolver if overwritten + result: runFileJob(` +.. ts:def:: PaytoHash + // A Binary Object + type PaytoHash = string; +.. ts:def:: AgeCommitmentHash + // A Binary Object + type AgeCommitmentHash = string; +.. ts:def:: TALER_RefreshCommitmentP + // A Binary Object + type TALER_RefreshCommitmentP = string; +.. ts:def:: WireTransferIdentifierRawP + // A Binary Object + type WireTransferIdentifierRawP = string; +.. ts:def:: Base32 + // Binary data is generally encoded using Crockford\u2019s variant of Base32 (https://www.crockford.com/wrmg/base32.html), except that \u201CU\u201D is not excluded but also decodes to \u201CV\u201D to make OCR easy. We will still simply use the JSON type \u201Cbase32\u201D and the term \u201CCrockford Base32\u201D in the text to refer to the resulting encoding. + type Base32 = string; +.. ts:def:: ExtensionManifest + // Mostly undocumented object; see {@link https://docs.taler.net/design-documents/006-extensions.html#extensionmanifest-object} for what it likely is? + interface ExtensionManifest { + // The criticality of the extension MUST be provided. It has the same + // semantics as "critical" has for extensions in X.509: + // - if "true", the client must "understand" the extension before + // proceeding, + // - if "false", clients can safely skip extensions they do not + // understand. + // (see https://datatracker.ietf.org/doc/html/rfc5280#section-4.2) + critical: boolean; + + // The version information MUST be provided in Taler's protocol version + // ranges notation, see + // https://docs.taler.net/core/api-common.html#protocol-version-ranges + version: LibtoolVersion; + + // Optional configuration object, defined by the feature itself + config?: object; + } +.. ts:def:: WadId + // https://docs.taler.net/core/api-common.html#wadid + type WadId = string; +.. ts:def:: ContractChoice + // Untyped in documentation https://docs.taler.net/design-documents/046-mumimo-contracts.html#tsref-type-ContractChoice + type ContractChoice = any; +`) + }); + const fileByExport = {}; + jobResults.forEach((result) => { + result.result.defines.forEach( + (define) => fileByExport[define] = result.output + ); + }); + await Promise.all( + jobResults.map((result) => { + const src = result.result.dtsOutput; + const toBeImported = []; + const sourceFile = ts.createSourceFile( + path.basename(result.output), + src, + { + languageVersion: ScriptTarget.ESNext + } + ); + const astWalker = (node) => { + if (node.kind === ts.SyntaxKind.TypeReference) { + const typeRefNode = node; + const { typeName } = typeRefNode; + const identifier = "escapedText" in typeName ? typeName.escapedText : typeName.getText(); + if (!result.result.defines.includes(`${identifier}`)) + toBeImported.push(`${identifier}`); + } + ts.forEachChild(node, astWalker); + }; + astWalker(sourceFile); + result.result.dtsOutput = `${toBeImported.filter((v, i, a) => a.indexOf(v) === i).map((v) => { + if (fileByExport[v]) + return `import { ${v} } from ${JSON.stringify( + "./" + path.basename(fileByExport[v]) + )}`; + else if (["String", "Boolean"].includes(v)) + console.warn( + `In file ${result.source}: Please use ${v.toLocaleLowerCase()} instead of ${v}` + ); + console.warn("Could not find reference to", v); + return "// WARN: UNKNOWN REF: " + JSON.stringify(v); + }).join("\n")} +${result.result.dtsOutput}`; + }) + ); + await Promise.all( + jobResults.map(async ({ output: output2, result }) => { + await fs.writeFile(output2, result.dtsOutput); + }) + ); + const exportsByFile = {}; + for (const [exported, file] of Object.entries(fileByExport)) { + exportsByFile[file] = exportsByFile[file] ?? []; + exportsByFile[file].push(exported); + } + await fs.writeFile( + path.join(tsDocOutput, "main.ts"), + Object.entries(exportsByFile).map( + ([file, exports]) => ( + // We could use export * from, but then we'd get class conflicts if 2 separate files declare the same type - including if our polyfill overwrites or gets overwritten + `export { ${exports.join(", ")} } from ${JSON.stringify( + "./" + path.basename(file) + // TODO: use path.relative + )};` + ) + ).join("") + ); +})(); diff --git a/contrib/type-extractor/package.json b/contrib/type-extractor/package.json @@ -4,7 +4,7 @@ "description": "", "main": "index.js", "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" + "start": "esbuild --bundle --packages=external --format=esm --platform=node --outfile=dist/main.mjs src/main.ts && node dist/main.mjs ../.." }, "keywords": [], "author": "", @@ -13,6 +13,7 @@ "typescript": "^5.4.5" }, "devDependencies": { - "@types/node": "^20.14.9" + "@types/node": "^20.14.9", + "esbuild": "^0.21.5" } } diff --git a/contrib/type-extractor/pnpm-lock.yaml b/contrib/type-extractor/pnpm-lock.yaml @@ -15,12 +15,158 @@ importers: '@types/node': specifier: ^20.14.9 version: 20.14.9 + esbuild: + specifier: ^0.21.5 + version: 0.21.5 packages: + '@esbuild/aix-ppc64@0.21.5': + resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.21.5': + resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm@0.21.5': + resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + + '@esbuild/android-x64@0.21.5': + resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + + '@esbuild/darwin-arm64@0.21.5': + resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-x64@0.21.5': + resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + + '@esbuild/freebsd-arm64@0.21.5': + resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.21.5': + resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + + '@esbuild/linux-arm64@0.21.5': + resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm@0.21.5': + resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.21.5': + resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-loong64@0.21.5': + resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-mips64el@0.21.5': + resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.21.5': + resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-riscv64@0.21.5': + resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-s390x@0.21.5': + resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-x64@0.21.5': + resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-x64@0.21.5': + resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-x64@0.21.5': + resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + + '@esbuild/sunos-x64@0.21.5': + resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + + '@esbuild/win32-arm64@0.21.5': + resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-ia32@0.21.5': + resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-x64@0.21.5': + resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + '@types/node@20.14.9': resolution: {integrity: sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==} + esbuild@0.21.5: + resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==} + engines: {node: '>=12'} + hasBin: true + typescript@5.4.5: resolution: {integrity: sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==} engines: {node: '>=14.17'} @@ -31,10 +177,105 @@ packages: snapshots: + '@esbuild/aix-ppc64@0.21.5': + optional: true + + '@esbuild/android-arm64@0.21.5': + optional: true + + '@esbuild/android-arm@0.21.5': + optional: true + + '@esbuild/android-x64@0.21.5': + optional: true + + '@esbuild/darwin-arm64@0.21.5': + optional: true + + '@esbuild/darwin-x64@0.21.5': + optional: true + + '@esbuild/freebsd-arm64@0.21.5': + optional: true + + '@esbuild/freebsd-x64@0.21.5': + optional: true + + '@esbuild/linux-arm64@0.21.5': + optional: true + + '@esbuild/linux-arm@0.21.5': + optional: true + + '@esbuild/linux-ia32@0.21.5': + optional: true + + '@esbuild/linux-loong64@0.21.5': + optional: true + + '@esbuild/linux-mips64el@0.21.5': + optional: true + + '@esbuild/linux-ppc64@0.21.5': + optional: true + + '@esbuild/linux-riscv64@0.21.5': + optional: true + + '@esbuild/linux-s390x@0.21.5': + optional: true + + '@esbuild/linux-x64@0.21.5': + optional: true + + '@esbuild/netbsd-x64@0.21.5': + optional: true + + '@esbuild/openbsd-x64@0.21.5': + optional: true + + '@esbuild/sunos-x64@0.21.5': + optional: true + + '@esbuild/win32-arm64@0.21.5': + optional: true + + '@esbuild/win32-ia32@0.21.5': + optional: true + + '@esbuild/win32-x64@0.21.5': + optional: true + '@types/node@20.14.9': dependencies: undici-types: 5.26.5 + esbuild@0.21.5: + optionalDependencies: + '@esbuild/aix-ppc64': 0.21.5 + '@esbuild/android-arm': 0.21.5 + '@esbuild/android-arm64': 0.21.5 + '@esbuild/android-x64': 0.21.5 + '@esbuild/darwin-arm64': 0.21.5 + '@esbuild/darwin-x64': 0.21.5 + '@esbuild/freebsd-arm64': 0.21.5 + '@esbuild/freebsd-x64': 0.21.5 + '@esbuild/linux-arm': 0.21.5 + '@esbuild/linux-arm64': 0.21.5 + '@esbuild/linux-ia32': 0.21.5 + '@esbuild/linux-loong64': 0.21.5 + '@esbuild/linux-mips64el': 0.21.5 + '@esbuild/linux-ppc64': 0.21.5 + '@esbuild/linux-riscv64': 0.21.5 + '@esbuild/linux-s390x': 0.21.5 + '@esbuild/linux-x64': 0.21.5 + '@esbuild/netbsd-x64': 0.21.5 + '@esbuild/openbsd-x64': 0.21.5 + '@esbuild/sunos-x64': 0.21.5 + '@esbuild/win32-arm64': 0.21.5 + '@esbuild/win32-ia32': 0.21.5 + '@esbuild/win32-x64': 0.21.5 + typescript@5.4.5: {} undici-types@5.26.5: {} diff --git a/contrib/type-extractor/src/extract-types.ts b/contrib/type-extractor/src/extract-types.ts @@ -1,288 +0,0 @@ -// Usage: $0 <path to documentation root> - -import fsSync, { promises as fs } from 'fs'; -import ts, { ScriptTarget } from 'typescript'; -import * as path from 'path'; - -const ignoredExports = ['PublishedAgeRestrictionBaseKey']; - -/** - * @param {string} file - */ -const runFileJob = (file: string) => { - // TODO: idk why this was async, im sure i had a reason - let workingFile = file; - const tsDefs = file.match(/[\t ]*\.\. ts\:def\:\: [a-zA-Z][a-zA-Z0-9_]*/g); - const defines: string[] = []; - let dtsOutput = ''; - if (tsDefs) - for (const def of tsDefs) { - if (!def) { - console.warn('No matches in ', file); - break; - } - workingFile = workingFile.substring(workingFile.indexOf(def)); - let [defMatch, indentation, defName] = - def.match(/([\t ])*\.\. ts\:def\:\: ([a-zA-Z][a-zA-Z0-9_]*) *\n?/) ?? - []; - - if ( - !defMatch || - !indentation || - !defName || - ignoredExports.includes(defName) - ) - continue; - - // Extract the ts def - indentation = indentation ?? ''; - workingFile = workingFile.substring(defMatch.length); - const workingFileLines = workingFile.split('\n'); - let tsMatch = ''; - while (workingFileLines[0]?.trim() === '') workingFileLines.shift(); - while ( - (workingFileLines[0]?.trim() === '' || - (workingFileLines[0] && - new RegExp('^' + '[ \\t]'.repeat(indentation.length + 2)).test( - workingFileLines[0], - ))) && - !workingFileLines[0]?.trim()?.startsWith('.. ts:def::') - ) { - if (workingFileLines[0].length > indentation.length + 2) - workingFileLines[0] = workingFileLines[0].substring( - indentation.length + 2, - ); - tsMatch += workingFileLines.shift() + '\n'; - } - workingFile = workingFileLines.join('\n'); - - // Convert comments to JSDocs - tsMatch = tsMatch - .replace(/([ \t]*\/\/.*\n?)+/g, (match) => { - match = match - .split('\n') - .map((v) => v.replace(/[ \t]+\/\/ ?/, '').trim()) - .join('\n') - .trim(); - if (match.includes('\n')) - match = `/** -${match - .split('\n') - .map((v) => (v.trimStart().startsWith('//') ? v.replace('//', '') : v)) - .map((v) => ' *' + (v.startsWith(' ') ? '' : ' ') + v) - .join('\n') - .replace(/\*\//g, '*​/')} - */ -`; - else - match = `/** - * ${(match.trimStart().startsWith('//') ? match.replace('//', '') : match) - .trim() - .replace(/\*\//g, '*​/')} - */ -`; - return match; - }) - .trim(); - - defines.push(defName); - dtsOutput += tsMatch + '\n'; - } - - if (defines.length === 0) return null; // nothing to give back, just exit - - // Now, find the unknown imports - - dtsOutput += ` -export { ${defines.join(', ')} }; -`; - - // Job is done, return - return { - defines, - dtsOutput, - }; -}; - -(async () => { - const genDocsForDirs = ['core/'].map((v) => path.resolve(process.argv[2], v)); - const genDocsForFiles = ( - await Promise.all( - genDocsForDirs.map(async (dir) => - (await fs.readdir(dir)).map((file) => path.join(dir, file)), - ), - ) - ).flat(); - const output = path.resolve( - process.env.TYPE_OUTPUT ?? process.env.TMP ?? process.env.TEMP ?? '/tmp', - 'net.taler.docs.ts-extracted', - ); - const tsDocOutput = path.join(output, 'dts'); - // const zodOutput = path.join(output, "zod"); // TODO: this would be cool to have in future - - if (fsSync.existsSync(tsDocOutput)) - await fs.rm(tsDocOutput, { recursive: true }); - await fs.mkdir(tsDocOutput, { - recursive: true, - }); - // if (fsSync.existsSync(zodOutput)) await fs.rm(zodOutput, { recursive: true }); - // await fs.mkdir(zodOutput, { - // recursive: true, - // }); - const jobResults = ( - await Promise.all( - genDocsForFiles.map(async (filepath) => ({ - source: filepath, - output: path.join( - tsDocOutput, - path.basename(filepath).replace('.rst', '.ts'), - ), - result: runFileJob(await fs.readFile(filepath, 'utf-8'))!, - })), - ) - ).filter((v) => v.result !== null); - // Polyfilling!!! - // TODO: Extract these to standalone .rst files! - jobResults.push({ - source: '/tmp/net.taler.docs.extracted/_forced_polyfill', - output: path.join(tsDocOutput, 'post-polyfill.ts'), - // This polyfill overwrites any object defined elsewhere - result: runFileJob(` -.. ts:def:: Integer - // An integer value. - // @integer - type Integer = number; -`)!, - }); - jobResults.unshift({ - source: '/tmp/net.taler.docs.extracted/_polyfill', - output: path.join(tsDocOutput, 'polyfill.ts'), - // This polyfill can be overwritten by the actual docs; it's contents will be outputted but ignored by the import resolver if overwritten - result: runFileJob(` -.. ts:def:: PaytoHash - // A Binary Object - type PaytoHash = string; -.. ts:def:: AgeCommitmentHash - // A Binary Object - type AgeCommitmentHash = string; -.. ts:def:: TALER_RefreshCommitmentP - // A Binary Object - type TALER_RefreshCommitmentP = string; -.. ts:def:: WireTransferIdentifierRawP - // A Binary Object - type WireTransferIdentifierRawP = string; -.. ts:def:: Base32 - // Binary data is generally encoded using Crockford’s variant of Base32 (https://www.crockford.com/wrmg/base32.html), except that “U” is not excluded but also decodes to “V” to make OCR easy. We will still simply use the JSON type “base32” and the term “Crockford Base32” in the text to refer to the resulting encoding. - type Base32 = string; -.. ts:def:: ExtensionManifest - // Mostly undocumented object; see {@link https://docs.taler.net/design-documents/006-extensions.html#extensionmanifest-object} for what it likely is? - interface ExtensionManifest { - // The criticality of the extension MUST be provided. It has the same - // semantics as "critical" has for extensions in X.509: - // - if "true", the client must "understand" the extension before - // proceeding, - // - if "false", clients can safely skip extensions they do not - // understand. - // (see https://datatracker.ietf.org/doc/html/rfc5280#section-4.2) - critical: boolean; - - // The version information MUST be provided in Taler's protocol version - // ranges notation, see - // https://docs.taler.net/core/api-common.html#protocol-version-ranges - version: LibtoolVersion; - - // Optional configuration object, defined by the feature itself - config?: object; - } -.. ts:def:: WadId - // https://docs.taler.net/core/api-common.html#wadid - type WadId = string; -.. ts:def:: ContractChoice - // Untyped in documentation https://docs.taler.net/design-documents/046-mumimo-contracts.html#tsref-type-ContractChoice - type ContractChoice = any; -`)!, - }); - // Resolve Inputs - const fileByExport: Record<string, string> = {}; - jobResults.forEach((result) => { - // these are processed intentionally in-order; the last items in jobResults will take priority over the first; polyfill will always take peak priority - result.result.defines.forEach( - (define) => (fileByExport[define] = result.output), - ); - }); - await Promise.all( - jobResults.map((result) => { - // now that the table is populated, lets resolve imports - const src = result.result.dtsOutput; - - const toBeImported: Array<string> = []; - - const sourceFile = ts.createSourceFile( - path.basename(result.output), - src, - { - languageVersion: ScriptTarget.ESNext, - }, - ); - - const astWalker = (node: ts.Node) => { - if (node.kind === ts.SyntaxKind.TypeReference) { - const typeRefNode = node as ts.TypeReferenceNode; - const { typeName } = typeRefNode; - const identifier = - 'escapedText' in typeName - ? typeName.escapedText - : typeName.getText(); - if (!result.result.defines.includes(`${identifier}`)) - toBeImported.push(`${identifier}`); - } - ts.forEachChild(node, astWalker); - }; - astWalker(sourceFile); - result.result.dtsOutput = `${toBeImported - .filter((v, i, a) => a.indexOf(v) === i) - .map((v) => { - if (fileByExport[v]) - return `import { ${v} } from ${JSON.stringify( - './' + path.basename(fileByExport[v]), - )}`; - else if (['String', 'Boolean'].includes(v)) - console.warn( - `In file ${ - result.source - }: Please use ${v.toLocaleLowerCase()} instead of ${v}`, - ); - console.warn('Could not find reference to', v); - return '// WARN: UNKNOWN REF: ' + JSON.stringify(v); - }) - .join('\n')} -${result.result.dtsOutput}`; - }), - ); - // Write outputs - await Promise.all( - jobResults.map(async ({ output, result }) => { - await fs.writeFile(output, result.dtsOutput); - }), - ); - // Write the index.ts file - const exportsByFile: Record<string, string[]> = {}; - for (const [exported, file] of Object.entries(fileByExport)) { - exportsByFile[file] = exportsByFile[file] ?? []; - exportsByFile[file].push(exported); - } - await fs.writeFile( - path.join(tsDocOutput, 'main.ts'), - Object.entries(exportsByFile) - .map( - ([file, exports]) => - // We could use export * from, but then we'd get class conflicts if 2 separate files declare the same type - including if our polyfill overwrites or gets overwritten - `export { ${exports.join(', ')} } from ${JSON.stringify( - './' + path.basename(file), // TODO: use path.relative - )};`, - ) - .join(''), - ); - - // TODO: call tsc on all our stuff, ensure it validates -})(); diff --git a/contrib/type-extractor/src/main.ts b/contrib/type-extractor/src/main.ts @@ -0,0 +1,288 @@ +// Usage: $0 <path to documentation root> + +import fsSync, { promises as fs } from 'fs'; +import ts, { type ScriptTarget } from 'typescript'; +import * as path from 'path'; + +const ignoredExports = ['PublishedAgeRestrictionBaseKey']; + +/** + * @param {string} file + */ +const runFileJob = (file: string) => { + // TODO: idk why this was async, im sure i had a reason + let workingFile = file; + const tsDefs = file.match(/[\t ]*\.\. ts\:def\:\: [a-zA-Z][a-zA-Z0-9_]*/g); + const defines: string[] = []; + let dtsOutput = ''; + if (tsDefs) + for (const def of tsDefs) { + if (!def) { + console.warn('No matches in ', file); + break; + } + workingFile = workingFile.substring(workingFile.indexOf(def)); + let [defMatch, indentation, defName] = + def.match(/([\t ])*\.\. ts\:def\:\: ([a-zA-Z][a-zA-Z0-9_]*) *\n?/) ?? + []; + + if ( + !defMatch || + !indentation || + !defName || + ignoredExports.includes(defName) + ) + continue; + + // Extract the ts def + indentation = indentation ?? ''; + workingFile = workingFile.substring(defMatch.length); + const workingFileLines = workingFile.split('\n'); + let tsMatch = ''; + while (workingFileLines[0]?.trim() === '') workingFileLines.shift(); + while ( + (workingFileLines[0]?.trim() === '' || + (workingFileLines[0] && + new RegExp('^' + '[ \\t]'.repeat(indentation.length + 2)).test( + workingFileLines[0], + ))) && + !workingFileLines[0]?.trim()?.startsWith('.. ts:def::') + ) { + if (workingFileLines[0].length > indentation.length + 2) + workingFileLines[0] = workingFileLines[0].substring( + indentation.length + 2, + ); + tsMatch += workingFileLines.shift() + '\n'; + } + workingFile = workingFileLines.join('\n'); + + // Convert comments to JSDocs + tsMatch = tsMatch + .replace(/([ \t]*\/\/.*\n?)+/g, (match) => { + match = match + .split('\n') + .map((v) => v.replace(/[ \t]+\/\/ ?/, '').trim()) + .join('\n') + .trim(); + if (match.includes('\n')) + match = `/** +${match + .split('\n') + .map((v) => (v.trimStart().startsWith('//') ? v.replace('//', '') : v)) + .map((v) => ' *' + (v.startsWith(' ') ? '' : ' ') + v) + .join('\n') + .replace(/\*\//g, '*​/')} + */ +`; + else + match = `/** + * ${(match.trimStart().startsWith('//') ? match.replace('//', '') : match) + .trim() + .replace(/\*\//g, '*​/')} + */ +`; + return match; + }) + .trim(); + + defines.push(defName); + dtsOutput += tsMatch + '\n'; + } + + if (defines.length === 0) return null; // nothing to give back, just exit + + // Now, find the unknown imports + + dtsOutput += ` +export { ${defines.join(', ')} }; +`; + + // Job is done, return + return { + defines, + dtsOutput, + }; +}; + +(async () => { + const genDocsForDirs = ['core/'].map((v) => path.resolve(process.argv[2], v)); + const genDocsForFiles = ( + await Promise.all( + genDocsForDirs.map(async (dir) => + (await fs.readdir(dir)).map((file) => path.join(dir, file)), + ), + ) + ).flat(); + const output = path.resolve( + process.env.TYPE_OUTPUT ?? process.env.TMP ?? process.env.TEMP ?? '/tmp', + 'net.taler.docs.ts-extracted', + ); + const tsDocOutput = path.join(output, 'dts'); + // const zodOutput = path.join(output, "zod"); // TODO: this would be cool to have in future + + if (fsSync.existsSync(tsDocOutput)) + await fs.rm(tsDocOutput, { recursive: true }); + await fs.mkdir(tsDocOutput, { + recursive: true, + }); + // if (fsSync.existsSync(zodOutput)) await fs.rm(zodOutput, { recursive: true }); + // await fs.mkdir(zodOutput, { + // recursive: true, + // }); + const jobResults = ( + await Promise.all( + genDocsForFiles.map(async (filepath) => ({ + source: filepath, + output: path.join( + tsDocOutput, + path.basename(filepath).replace('.rst', '.ts'), + ), + result: runFileJob(await fs.readFile(filepath, 'utf-8'))!, + })), + ) + ).filter((v) => v.result !== null); + // Polyfilling!!! + // TODO: Extract these to standalone .rst files! + jobResults.push({ + source: '/tmp/net.taler.docs.extracted/_forced_polyfill', + output: path.join(tsDocOutput, 'post-polyfill.ts'), + // This polyfill overwrites any object defined elsewhere + result: runFileJob(` +.. ts:def:: Integer + // An integer value. + // @integer + type Integer = number; +`)!, + }); + jobResults.unshift({ + source: '/tmp/net.taler.docs.extracted/_polyfill', + output: path.join(tsDocOutput, 'polyfill.ts'), + // This polyfill can be overwritten by the actual docs; it's contents will be outputted but ignored by the import resolver if overwritten + result: runFileJob(` +.. ts:def:: PaytoHash + // A Binary Object + type PaytoHash = string; +.. ts:def:: AgeCommitmentHash + // A Binary Object + type AgeCommitmentHash = string; +.. ts:def:: TALER_RefreshCommitmentP + // A Binary Object + type TALER_RefreshCommitmentP = string; +.. ts:def:: WireTransferIdentifierRawP + // A Binary Object + type WireTransferIdentifierRawP = string; +.. ts:def:: Base32 + // Binary data is generally encoded using Crockford’s variant of Base32 (https://www.crockford.com/wrmg/base32.html), except that “U” is not excluded but also decodes to “V” to make OCR easy. We will still simply use the JSON type “base32” and the term “Crockford Base32” in the text to refer to the resulting encoding. + type Base32 = string; +.. ts:def:: ExtensionManifest + // Mostly undocumented object; see {@link https://docs.taler.net/design-documents/006-extensions.html#extensionmanifest-object} for what it likely is? + interface ExtensionManifest { + // The criticality of the extension MUST be provided. It has the same + // semantics as "critical" has for extensions in X.509: + // - if "true", the client must "understand" the extension before + // proceeding, + // - if "false", clients can safely skip extensions they do not + // understand. + // (see https://datatracker.ietf.org/doc/html/rfc5280#section-4.2) + critical: boolean; + + // The version information MUST be provided in Taler's protocol version + // ranges notation, see + // https://docs.taler.net/core/api-common.html#protocol-version-ranges + version: LibtoolVersion; + + // Optional configuration object, defined by the feature itself + config?: object; + } +.. ts:def:: WadId + // https://docs.taler.net/core/api-common.html#wadid + type WadId = string; +.. ts:def:: ContractChoice + // Untyped in documentation https://docs.taler.net/design-documents/046-mumimo-contracts.html#tsref-type-ContractChoice + type ContractChoice = any; +`)!, + }); + // Resolve Inputs + const fileByExport: Record<string, string> = {}; + jobResults.forEach((result) => { + // these are processed intentionally in-order; the last items in jobResults will take priority over the first; polyfill will always take peak priority + result.result.defines.forEach( + (define) => (fileByExport[define] = result.output), + ); + }); + await Promise.all( + jobResults.map((result) => { + // now that the table is populated, lets resolve imports + const src = result.result.dtsOutput; + + const toBeImported: Array<string> = []; + + const sourceFile = ts.createSourceFile( + path.basename(result.output), + src, + { + languageVersion: ScriptTarget.ESNext, + }, + ); + + const astWalker = (node: ts.Node) => { + if (node.kind === ts.SyntaxKind.TypeReference) { + const typeRefNode = node as ts.TypeReferenceNode; + const { typeName } = typeRefNode; + const identifier = + 'escapedText' in typeName + ? typeName.escapedText + : typeName.getText(); + if (!result.result.defines.includes(`${identifier}`)) + toBeImported.push(`${identifier}`); + } + ts.forEachChild(node, astWalker); + }; + astWalker(sourceFile); + result.result.dtsOutput = `${toBeImported + .filter((v, i, a) => a.indexOf(v) === i) + .map((v) => { + if (fileByExport[v]) + return `import { ${v} } from ${JSON.stringify( + './' + path.basename(fileByExport[v]), + )}`; + else if (['String', 'Boolean'].includes(v)) + console.warn( + `In file ${ + result.source + }: Please use ${v.toLocaleLowerCase()} instead of ${v}`, + ); + console.warn('Could not find reference to', v); + return '// WARN: UNKNOWN REF: ' + JSON.stringify(v); + }) + .join('\n')} +${result.result.dtsOutput}`; + }), + ); + // Write outputs + await Promise.all( + jobResults.map(async ({ output, result }) => { + await fs.writeFile(output, result.dtsOutput); + }), + ); + // Write the index.ts file + const exportsByFile: Record<string, string[]> = {}; + for (const [exported, file] of Object.entries(fileByExport)) { + exportsByFile[file] = exportsByFile[file] ?? []; + exportsByFile[file].push(exported); + } + await fs.writeFile( + path.join(tsDocOutput, 'main.ts'), + Object.entries(exportsByFile) + .map( + ([file, exports]) => + // We could use export * from, but then we'd get class conflicts if 2 separate files declare the same type - including if our polyfill overwrites or gets overwritten + `export { ${exports.join(', ')} } from ${JSON.stringify( + './' + path.basename(file), // TODO: use path.relative + )};`, + ) + .join(''), + ); + + // TODO: call tsc on all our stuff, ensure it validates +})();