summaryrefslogtreecommitdiff
path: root/contrib
diff options
context:
space:
mode:
Diffstat (limited to 'contrib')
-rw-r--r--contrib/ci/Containerfile23
-rwxr-xr-xcontrib/ci/jobs/0-build/build-docs.sh17
-rwxr-xr-xcontrib/ci/jobs/0-build/job.sh6
-rw-r--r--contrib/type-extractor/.gitignore1
-rw-r--r--contrib/type-extractor/extract-types.mjs272
-rw-r--r--contrib/type-extractor/package.json15
-rw-r--r--contrib/type-extractor/pnpm-lock.yaml24
7 files changed, 358 insertions, 0 deletions
diff --git a/contrib/ci/Containerfile b/contrib/ci/Containerfile
new file mode 100644
index 00000000..023d02de
--- /dev/null
+++ b/contrib/ci/Containerfile
@@ -0,0 +1,23 @@
+FROM docker.io/library/debian:bookworm-slim
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+# Install docs generation utils
+RUN apt-get update -yqq && \
+ apt-get install -yqq --no-install-recommends \
+ make \
+ graphviz \
+ python3-sphinx \
+ python3-recommonmark \
+ python3-texext \
+ python3-sphinx-book-theme \
+ texlive-latex-extra \
+ texlive-fonts-recommended \
+ tex-gyre \
+ dvipng \
+ latexmk \
+ && rm -rf /var/lib/apt/lists/*
+
+WORKDIR /workdir
+
+CMD ["bash", "/workdir/ci/jobs/0-build/job.sh"]
diff --git a/contrib/ci/jobs/0-build/build-docs.sh b/contrib/ci/jobs/0-build/build-docs.sh
new file mode 100755
index 00000000..43152df9
--- /dev/null
+++ b/contrib/ci/jobs/0-build/build-docs.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+
+set -exou
+
+make html
+make latexpdf
+
+# Publish to docs.taler.net if on master branch
+if [[ ${CI_GIT_BRANCH} = "master" ]]; then
+ rm -rf /artifacts/docs_build
+
+ mkdir -p /artifacts/docs_build/docs/html/
+ mkdir -p /artifacts/docs_build/docs/pdf/
+
+ cp -r _build/html/* /artifacts/docs_build/docs/html/
+ cp -r _build/latex/*.pdf /artifacts/docs_build/docs/pdf/
+fi
diff --git a/contrib/ci/jobs/0-build/job.sh b/contrib/ci/jobs/0-build/job.sh
new file mode 100755
index 00000000..627aecf0
--- /dev/null
+++ b/contrib/ci/jobs/0-build/job.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+set -exuo pipefail
+
+job_dir=$(dirname "${BASH_SOURCE[0]}")
+
+"${job_dir}"/build-docs.sh
diff --git a/contrib/type-extractor/.gitignore b/contrib/type-extractor/.gitignore
new file mode 100644
index 00000000..b512c09d
--- /dev/null
+++ b/contrib/type-extractor/.gitignore
@@ -0,0 +1 @@
+node_modules \ No newline at end of file
diff --git a/contrib/type-extractor/extract-types.mjs b/contrib/type-extractor/extract-types.mjs
new file mode 100644
index 00000000..86bccd9c
--- /dev/null
+++ b/contrib/type-extractor/extract-types.mjs
@@ -0,0 +1,272 @@
+import fsSync, { promises as fs } from "fs";
+import ts from "typescript";
+import * as path from "path";
+
+const ignoredExports = ["PublishedAgeRestrictionBaseKey"];
+
+/**
+ * @param {string} file
+ */
+const runFileJob = async (file) => {
+ // TODO: idk why this was async, im sure i had a reason
+ let workingFile = file;
+ const tsDefs = file.match(/[\t ]*\.\. ts\:def\:\: [a-zA-Z][a-zA-Z0-9_]*/g);
+ /** @type string[] */
+ const defines = [];
+ let dtsOutput = "";
+ if (tsDefs)
+ for (const def of tsDefs) {
+ if (!def) {
+ console.warn("No matches in ", file);
+ break;
+ }
+ workingFile = workingFile.substring(workingFile.indexOf(def));
+ let [defMatch, indentation, defName] = def.match(
+ /([\t ])*\.\. ts\:def\:\: ([a-zA-Z][a-zA-Z0-9_]*) *\n?/
+ );
+
+ if (ignoredExports.includes(defName)) continue;
+
+ // Extract the ts def
+ indentation = indentation ?? "";
+ workingFile = workingFile.substring(defMatch.length);
+ const workingFileLines = workingFile.split("\n");
+ let tsMatch = "";
+ while (workingFileLines[0]?.trim() === "") workingFileLines.shift();
+ while (
+ (workingFileLines[0]?.trim() === "" ||
+ (workingFileLines[0] &&
+ new RegExp("^" + "[ \\t]".repeat(indentation.length + 2)).test(
+ workingFileLines[0]
+ ))) &&
+ !workingFileLines[0]?.trim()?.startsWith(".. ts:def::")
+ ) {
+ if (workingFileLines[0].length > indentation.length + 2)
+ workingFileLines[0] = workingFileLines[0].substring(
+ indentation.length + 2
+ );
+ tsMatch += workingFileLines.shift() + "\n";
+ }
+ workingFile = workingFileLines.join("\n");
+
+ // Convert comments to JSDocs
+ tsMatch = tsMatch
+ .replace(/([ \t]*\/\/.*\n?)+/g, (match) => {
+ match = match
+ .split("\n")
+ .map((v) => v.replace(/[ \t]+\/\/ ?/, "").trim())
+ .join("\n")
+ .trim();
+ if (match.includes("\n"))
+ match = `/**
+${match
+ .split("\n")
+ .map((v) => (v.trimStart().startsWith("//") ? v.replace("//", "") : v))
+ .map((v) => " *" + (v.startsWith(" ") ? "" : " ") + v)
+ .join("\n")
+ .replace(/\*\//g, "*​/")}
+ */
+`;
+ else
+ match = `/**
+ * ${(match.trimStart().startsWith("//") ? match.replace("//", "") : match)
+ .trim()
+ .replace(/\*\//g, "*​/")}
+ */
+`;
+ return match;
+ })
+ .trim();
+
+ defines.push(defName);
+ dtsOutput += tsMatch + "\n";
+ }
+
+ if (defines.length === 0) return null; // nothing to give back, just exit
+
+ // Now, find the unknown imports
+
+ dtsOutput += `
+export { ${defines.join(", ")} };
+`;
+
+ // Job is done, return
+ return {
+ defines,
+ dtsOutput,
+ };
+};
+
+(async () => {
+ const genDocsForDirs = ["core/"].map((v) => path.resolve(process.argv[2], v));
+ const genDocsForFiles = (
+ await Promise.all(
+ genDocsForDirs.map(async (dir) =>
+ (await fs.readdir(dir)).map((file) => path.join(dir, file))
+ )
+ )
+ ).flat();
+ const output = path.resolve(
+ process.env.TYPE_OUTPUT ?? process.env.TMP ?? process.env.TEMP ?? "/tmp",
+ "net.taler.docs.ts-extracted"
+ );
+ const tsDocOutput = path.join(output, "dts");
+ // const zodOutput = path.join(output, "zod"); // TODO: this would be cool to have in future
+
+ if (fsSync.existsSync(tsDocOutput))
+ await fs.rm(tsDocOutput, { recursive: true });
+ await fs.mkdir(tsDocOutput, {
+ recursive: true,
+ });
+ // if (fsSync.existsSync(zodOutput)) await fs.rm(zodOutput, { recursive: true });
+ // await fs.mkdir(zodOutput, {
+ // recursive: true,
+ // });
+ const jobResults = (
+ await Promise.all(
+ genDocsForFiles.map(async (filepath) => ({
+ source: filepath,
+ output: path.join(
+ tsDocOutput,
+ path.basename(filepath).replace(".rst", ".ts")
+ ),
+ result: await runFileJob(await fs.readFile(filepath, "utf-8")),
+ }))
+ )
+ ).filter((v) => v.result !== null);
+ // Polyfilling!!!
+ // TODO: Extract these to standalone .rst files!
+ jobResults.push({
+ source: "/tmp/net.taler.docs.extracted/_forced_polyfill",
+ output: path.join(tsDocOutput, "post-polyfill.ts"),
+ // This polyfill overwrites any object defined elsewhere
+ result: await runFileJob(`
+.. ts:def:: Integer
+ // An integer value.
+ // @integer
+ type Integer = number;
+`),
+ });
+ jobResults.unshift({
+ source: "/tmp/net.taler.docs.extracted/_polyfill",
+ output: path.join(tsDocOutput, "polyfill.ts"),
+ // This polyfill can be overwritten by the actual docs; it's contents will be outputted but ignored by the import resolver if overwritten
+ result: await runFileJob(`
+.. ts:def:: PaytoHash
+ // A Binary Object
+ type PaytoHash = string;
+.. ts:def:: AgeCommitmentHash
+ // A Binary Object
+ type AgeCommitmentHash = string;
+.. ts:def:: TALER_RefreshCommitmentP
+ // A Binary Object
+ type TALER_RefreshCommitmentP = string;
+.. ts:def:: WireTransferIdentifierRawP
+ // A Binary Object
+ type WireTransferIdentifierRawP = string;
+.. ts:def:: Base32
+ // Binary data is generally encoded using Crockford’s variant of Base32 (https://www.crockford.com/wrmg/base32.html), except that “U” is not excluded but also decodes to “V” to make OCR easy. We will still simply use the JSON type “base32” and the term “Crockford Base32” in the text to refer to the resulting encoding.
+ type Base32 = string;
+.. ts:def:: ExtensionManifest
+ // Mostly undocumented object; see {@link https://docs.taler.net/design-documents/006-extensions.html#extensionmanifest-object} for what it likely is?
+ interface ExtensionManifest {
+ // The criticality of the extension MUST be provided. It has the same
+ // semantics as "critical" has for extensions in X.509:
+ // - if "true", the client must "understand" the extension before
+ // proceeding,
+ // - if "false", clients can safely skip extensions they do not
+ // understand.
+ // (see https://datatracker.ietf.org/doc/html/rfc5280#section-4.2)
+ critical: boolean;
+
+ // The version information MUST be provided in Taler's protocol version
+ // ranges notation, see
+ // https://docs.taler.net/core/api-common.html#protocol-version-ranges
+ version: LibtoolVersion;
+
+ // Optional configuration object, defined by the feature itself
+ config?: object;
+ }
+.. ts:def:: WadId
+ // https://docs.taler.net/core/api-common.html#wadid
+ type WadId = string;
+.. ts:def:: ContractChoice
+ // Untyped in documentation https://docs.taler.net/design-documents/046-mumimo-contracts.html#tsref-type-ContractChoice
+ type ContractChoice = any;
+`),
+ });
+ // Resolve Inputs
+ /** @type {Record<string,string>} */
+ const fileByExport = {};
+ jobResults.forEach((result) => {
+ // these are processed intentionally in-order; the last items in jobResults will take priority over the first; polyfill will always take peak priority
+ result.result.defines.forEach(
+ (define) => (fileByExport[define] = result.output)
+ );
+ });
+ await Promise.all(
+ jobResults.map((result) => {
+ // now that the table is populated, lets resolve imports
+ const src = result.result.dtsOutput;
+
+ /** @type {string[]} */
+ const toBeImported = [];
+
+ const sourceFile = ts.createSourceFile(path.basename(result.output), src);
+
+ /**
+ * @param {ts.Node} node
+ */
+ const astWalker = (node) => {
+ if (node.kind === ts.SyntaxKind.TypeReference) {
+ /** @type {ts.TypeReferenceNode} */
+ const typeRefNode = node;
+ const identifier = typeRefNode.typeName.escapedText;
+ if (!result.result.defines.includes(identifier))
+ toBeImported.push(identifier);
+ }
+ ts.forEachChild(node, astWalker);
+ };
+ astWalker(sourceFile);
+ result.result.dtsOutput = `${toBeImported
+ .filter((v, i, a) => a.indexOf(v) === i)
+ .map((v) => {
+ if (fileByExport[v])
+ return `import { ${v} } from ${JSON.stringify(
+ "./" + path.basename(fileByExport[v])
+ )}`;
+ console.warn("Could not find reference to", v);
+ return "// WARN: UNKNOWN REF: " + JSON.stringify(v);
+ })
+ .join("\n")}
+${result.result.dtsOutput}`;
+ })
+ );
+ // Write outputs
+ await Promise.all(
+ jobResults.map(async ({ output, result }) => {
+ await fs.writeFile(output, result.dtsOutput);
+ })
+ );
+ // Write the index.ts file
+ /** @type {Record<string,string[]>} */
+ const exportsByFile = {};
+ for (const [exported, file] of Object.entries(fileByExport)) {
+ exportsByFile[file] = exportsByFile[file] ?? [];
+ exportsByFile[file].push(exported);
+ }
+ await fs.writeFile(
+ path.join(tsDocOutput, "main.ts"),
+ Object.entries(exportsByFile)
+ .map(
+ ([file, exports]) =>
+ // We could use export * from, but then we'd get class conflicts if 2 separate files declare the same type - including if our polyfill overwrites or gets overwritten
+ `export { ${exports.join(", ")} } from ${JSON.stringify(
+ "./" + path.basename(file) // TODO: use path.relative
+ )};`
+ )
+ .join("")
+ );
+
+ // TODO: call tsc on all our stuff, ensure it validates
+})();
diff --git a/contrib/type-extractor/package.json b/contrib/type-extractor/package.json
new file mode 100644
index 00000000..4fbf0020
--- /dev/null
+++ b/contrib/type-extractor/package.json
@@ -0,0 +1,15 @@
+{
+ "name": "type-extractor",
+ "version": "1.0.0",
+ "description": "",
+ "main": "index.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "keywords": [],
+ "author": "",
+ "license": "ISC",
+ "dependencies": {
+ "typescript": "^5.4.5"
+ }
+}
diff --git a/contrib/type-extractor/pnpm-lock.yaml b/contrib/type-extractor/pnpm-lock.yaml
new file mode 100644
index 00000000..76630473
--- /dev/null
+++ b/contrib/type-extractor/pnpm-lock.yaml
@@ -0,0 +1,24 @@
+lockfileVersion: '9.0'
+
+settings:
+ autoInstallPeers: true
+ excludeLinksFromLockfile: false
+
+importers:
+
+ .:
+ dependencies:
+ typescript:
+ specifier: ^5.4.5
+ version: 5.4.5
+
+packages:
+
+ typescript@5.4.5:
+ resolution: {integrity: sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==}
+ engines: {node: '>=14.17'}
+ hasBin: true
+
+snapshots:
+
+ typescript@5.4.5: {}