taler-docs

Documentation for GNU Taler components, APIs and protocols
Log | Files | Refs | README | LICENSE

main.ts (9613B)


      1 // Usage: $0 <path to documentation root>
      2 
      3 import fsSync, { promises as fs } from 'fs';
      4 import ts from 'typescript';
      5 import * as path from 'path';
      6 
      7 const ignoredExports = ['PublishedAgeRestrictionBaseKey'];
      8 
      9 /**
     10  * @param {string} file
     11  */
     12 const runFileJob = (file: string) => {
     13   // TODO: idk why this was async, im sure i had a reason
     14   let workingFile = file;
     15   const tsDefs = file.match(/[\t ]*\.\. ts\:def\:\: [a-zA-Z][a-zA-Z0-9_]*/g);
     16   const defines: string[] = [];
     17   let dtsOutput = '';
     18   if (!tsDefs) return null;
     19   for (const def of tsDefs) {
     20     if (!def) {
     21       console.warn('No matches in ', file);
     22       break;
     23     }
     24     workingFile = workingFile.substring(workingFile.indexOf(def));
     25     let [defMatch, indentation, defName] =
     26       def.match(/([\t ])*\.\. ts\:def\:\: ([a-zA-Z][a-zA-Z0-9_]*) *\n?/) ?? [];
     27 
     28     if (!defMatch || !defName || ignoredExports.includes(defName)) continue;
     29 
     30     // Extract the ts def
     31     indentation = indentation ?? '';
     32     workingFile = workingFile.substring(defMatch.length);
     33     const workingFileLines = workingFile.split('\n');
     34     let tsMatch = '';
     35     while (workingFileLines[0]?.trim() === '') workingFileLines.shift();
     36     while (
     37       (workingFileLines[0]?.trim() === '' ||
     38         (workingFileLines[0] &&
     39           new RegExp('^' + '[ \\t]'.repeat(indentation.length + 2)).test(
     40             workingFileLines[0],
     41           ))) &&
     42       !workingFileLines[0]?.trim()?.startsWith('.. ts:def::')
     43     ) {
     44       if (workingFileLines[0].length > indentation.length + 2)
     45         workingFileLines[0] = workingFileLines[0].substring(
     46           indentation.length + 2,
     47         );
     48       tsMatch += workingFileLines.shift() + '\n';
     49     }
     50     workingFile = workingFileLines.join('\n');
     51 
     52     // Convert comments to JSDocs
     53     tsMatch = tsMatch
     54       .replace(/([ \t]*\/\/.*\n?)+/g, (match) => {
     55         match = match
     56           .split('\n')
     57           .map((v) => v.replace(/[ \t]+\/\/ ?/, '').trim())
     58           .join('\n')
     59           .trim();
     60         if (match.includes('\n'))
     61           match = `/**
     62 ${match
     63   .split('\n')
     64   .map((v) => (v.trimStart().startsWith('//') ? v.replace('//', '') : v))
     65   .map((v) => ' *' + (v.startsWith(' ') ? '' : ' ') + v)
     66   .join('\n')
     67   .replace(/\*\//g, '*​/')}
     68  */
     69 `;
     70         else
     71           match = `/**
     72  * ${(match.trimStart().startsWith('//') ? match.replace('//', '') : match)
     73    .trim()
     74    .replace(/\*\//g, '*​/')}
     75  */
     76 `;
     77         return match;
     78       })
     79       .trim();
     80 
     81     defines.push(defName);
     82     dtsOutput += tsMatch + '\n';
     83   }
     84 
     85   if (defines.length === 0) return null; // nothing to give back, just exit
     86 
     87   // Now, find the unknown imports
     88 
     89   dtsOutput += `
     90 export { ${defines.join(', ')} };
     91 `;
     92 
     93   // Job is done, return
     94   return {
     95     defines,
     96     dtsOutput,
     97   };
     98 };
     99 
    100 (async () => {
    101   const genDocsForDirs = ['core/'].map((v) => path.resolve(process.argv[2], v));
    102   const genDocsForFiles = (
    103     await Promise.all(
    104       genDocsForDirs.map(async (dir) =>
    105         (await fs.readdir(dir)).map((file) => path.join(dir, file)),
    106       ),
    107     )
    108   ).flat();
    109   const output = path.resolve(
    110     process.env.TYPE_OUTPUT ?? process.env.TMP ?? process.env.TEMP ?? '/tmp',
    111     'net.taler.docs.ts-extracted',
    112   );
    113   const tsDocOutput = path.join(output, 'dts');
    114   // const zodOutput = path.join(output, "zod"); // TODO: this would be cool to have in future
    115 
    116   if (fsSync.existsSync(tsDocOutput))
    117     await fs.rm(tsDocOutput, { recursive: true });
    118   await fs.mkdir(tsDocOutput, {
    119     recursive: true,
    120   });
    121   // if (fsSync.existsSync(zodOutput)) await fs.rm(zodOutput, { recursive: true });
    122   // await fs.mkdir(zodOutput, {
    123   //   recursive: true,
    124   // });
    125   const jobResults = (
    126     await Promise.all(
    127       genDocsForFiles.map(async (filepath) => ({
    128         source: filepath,
    129         output: path.join(
    130           tsDocOutput,
    131           path.basename(filepath).replace('.rst', '.ts'),
    132         ),
    133         result: runFileJob(await fs.readFile(filepath, 'utf-8'))!,
    134       })),
    135     )
    136   ).filter((v) => v.result !== null && v.result !== undefined);
    137   // Polyfilling!!!
    138   // TODO: Extract these to standalone .rst files!
    139   jobResults.push({
    140     source: '/tmp/net.taler.docs.extracted/_forced_polyfill',
    141     output: path.join(tsDocOutput, 'post-polyfill.ts'),
    142     // This polyfill overwrites any object defined elsewhere
    143     result: runFileJob(`
    144 .. ts:def:: Integer
    145   // An integer value.
    146   // @integer
    147   type Integer = number;
    148 `)!,
    149   });
    150   jobResults.unshift({
    151     source: '/tmp/net.taler.docs.extracted/_polyfill',
    152     output: path.join(tsDocOutput, 'polyfill.ts'),
    153     // This polyfill can be overwritten by the actual docs; it's contents will be outputted but ignored by the import resolver if overwritten
    154     result: runFileJob(`
    155 .. ts:def:: PaytoHash
    156   // A Binary Object
    157   type PaytoHash = string;
    158 .. ts:def:: AgeCommitmentHash
    159   // A Binary Object
    160   type AgeCommitmentHash = string;
    161 .. ts:def:: TALER_RefreshCommitmentP
    162   // A Binary Object
    163   type TALER_RefreshCommitmentP = string;
    164 .. ts:def:: WireTransferIdentifierRawP
    165   // A Binary Object
    166   type WireTransferIdentifierRawP = string;
    167 .. ts:def:: Base32
    168   // Binary data is generally encoded using Crockford’s variant of Base32 (https://www.crockford.com/wrmg/base32.html), except that “U” is not excluded but also decodes to “V” to make OCR easy. We will still simply use the JSON type “base32” and the term “Crockford Base32” in the text to refer to the resulting encoding.
    169   type Base32 = string;
    170 .. ts:def:: ExtensionManifest
    171   // Mostly undocumented object; see {@link https://docs.taler.net/design-documents/006-extensions.html#extensionmanifest-object} for what it likely is?
    172   interface ExtensionManifest {
    173     // The criticality of the extension MUST be provided.  It has the same
    174     // semantics as "critical" has for extensions in X.509:
    175     // - if "true", the client must "understand" the extension before
    176     //   proceeding,
    177     // - if "false", clients can safely skip extensions they do not
    178     //   understand.
    179     // (see https://datatracker.ietf.org/doc/html/rfc5280#section-4.2)
    180     critical: boolean;
    181 
    182     // The version information MUST be provided in Taler's protocol version
    183     // ranges notation, see
    184     // https://docs.taler.net/core/api-common.html#protocol-version-ranges
    185     version: LibtoolVersion;
    186 
    187     // Optional configuration object, defined by the feature itself
    188     config?: object;
    189   }
    190 .. ts:def:: WadId
    191   // https://docs.taler.net/core/api-common.html#wadid
    192   type WadId = string;
    193 .. ts:def:: ContractChoice
    194   // Untyped in documentation https://docs.taler.net/design-documents/046-mumimo-contracts.html#tsref-type-ContractChoice
    195   type ContractChoice = any;
    196 `)!,
    197   });
    198   // Resolve Inputs
    199   const fileByExport: Record<string, string> = {};
    200   jobResults.forEach((result) => {
    201     // these are processed intentionally in-order; the last items in jobResults will take priority over the first; polyfill will always take peak priority
    202     result.result.defines.forEach(
    203       (define) => (fileByExport[define] = result.output),
    204     );
    205   });
    206   await Promise.all(
    207     jobResults.map((result) => {
    208       // now that the table is populated, lets resolve imports
    209       const src = result.result.dtsOutput;
    210 
    211       const toBeImported: Array<string> = [];
    212 
    213       const sourceFile = ts.createSourceFile(
    214         path.basename(result.output),
    215         src,
    216         {
    217           languageVersion: ts.ScriptTarget.ESNext,
    218         },
    219       );
    220 
    221       const astWalker = (node: ts.Node) => {
    222         if (node.kind === ts.SyntaxKind.TypeReference) {
    223           const typeRefNode = node as ts.TypeReferenceNode;
    224           const { typeName } = typeRefNode;
    225           const identifier =
    226             'escapedText' in typeName
    227               ? typeName.escapedText
    228               : typeName.getText();
    229           if (!result.result.defines.includes(`${identifier}`))
    230             toBeImported.push(`${identifier}`);
    231         }
    232         ts.forEachChild(node, astWalker);
    233       };
    234       astWalker(sourceFile);
    235       result.result.dtsOutput = `${toBeImported
    236         .filter((v, i, a) => a.indexOf(v) === i)
    237         .map((v) => {
    238           if (fileByExport[v])
    239             return `import { ${v} } from ${JSON.stringify(
    240               './' + path.basename(fileByExport[v]),
    241             )}`;
    242           else if (['String', 'Boolean'].includes(v))
    243             console.warn(
    244               `In file ${
    245                 result.source
    246               }: Please use ${v.toLocaleLowerCase()} instead of ${v}`,
    247             );
    248           console.warn('Could not find reference to', v);
    249           return '// WARN: UNKNOWN REF: ' + JSON.stringify(v);
    250         })
    251         .join('\n')}
    252 ${result.result.dtsOutput}`;
    253     }),
    254   );
    255   // Write outputs
    256   await Promise.all(
    257     jobResults.map(async ({ output, result }) => {
    258       await fs.writeFile(output, result.dtsOutput);
    259     }),
    260   );
    261   // Write the index.ts file
    262   const exportsByFile: Record<string, string[]> = {};
    263   for (const [exported, file] of Object.entries(fileByExport)) {
    264     exportsByFile[file] = exportsByFile[file] ?? [];
    265     exportsByFile[file].push(exported);
    266   }
    267   await fs.writeFile(
    268     path.join(tsDocOutput, 'main.ts'),
    269     Object.entries(exportsByFile)
    270       .map(
    271         ([file, exports]) =>
    272           // We could use export * from, but then we'd get class conflicts if 2 separate files declare the same type - including if our polyfill overwrites or gets overwritten
    273           `export { ${exports.join(', ')} } from ${JSON.stringify(
    274             './' + path.basename(file), // TODO: use path.relative
    275           )};`,
    276       )
    277       .join(''),
    278   );
    279 
    280   // TODO: call tsc on all our stuff, ensure it validates
    281 })();