From 89cc5ef8e8182eab6482717383c02b3bdd575161 Mon Sep 17 00:00:00 2001 From: Edward Thomson Date: Mon, 25 Nov 2024 10:12:29 +0000 Subject: [PATCH] Include documentation generator libgit2 has a new documentation generator that generates API schema from our headers, then produces reference documentation that is included into the website directly. --- .github/workflows/documentation.yml | 60 ++ .github/workflows/main.yml | 49 - script/api-docs/README.md | 13 + script/api-docs/api-generator.js | 1543 +++++++++++++++++++++++++++ script/api-docs/docs-generator.js | 1326 +++++++++++++++++++++++ script/api-docs/generate | 105 ++ script/api-docs/package-lock.json | 79 ++ script/api-docs/package.json | 6 + 8 files changed, 3132 insertions(+), 49 deletions(-) create mode 100644 .github/workflows/documentation.yml create mode 100644 script/api-docs/README.md create mode 100755 script/api-docs/api-generator.js create mode 100755 script/api-docs/docs-generator.js create mode 100755 script/api-docs/generate create mode 100644 script/api-docs/package-lock.json create mode 100644 script/api-docs/package.json diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml new file mode 100644 index 000000000..a2e45ca5f --- /dev/null +++ b/.github/workflows/documentation.yml @@ -0,0 +1,60 @@ +# Update the www.libgit2.org reference documentation +name: Generate Documentation + +on: + push: + branches: [ main, maint/* ] + release: + workflow_dispatch: + +permissions: + contents: read + +jobs: + documentation: + name: "Generate documentation" + runs-on: "ubuntu-latest" + steps: + - name: Check out source repository + uses: actions/checkout@v4 + with: + path: source + fetch-depth: 0 + - name: Check out documentation repository + uses: actions/checkout@v4 + with: + repository: libgit2/www.libgit2.org + path: www + fetch-depth: 0 + ssh-key: ${{ secrets.DOCS_PUBLISH_KEY }} + - name: Prepare branches + run: | + for a in main $(git branch -r --list 'origin/maint/*' | sed -e "s/^ origin\///"); do + git branch --track "$a" "origin/$a" + done + working-directory: source + - name: Generate documentation + run: | + npm install + ./generate ../.. ../../../www/docs + working-directory: source/script/api-docs + - name: Examine changes + run: | + if [ -n "$(git diff --name-only)" ]; then + echo "changes=true" >> $GITHUB_OUTPUT + else + echo "changes=false" >> $GITHUB_OUTPUT + fi + id: check + working-directory: www + - name: Publish documentation + run: | + DATE=$(date +"%Y-%m-%d") + + git config user.name 'Documentation Site Generator' + git config user.email 'libgit2@users.noreply.github.com' + git add . + git commit -m"Documentation update ${DATE}" + git push origin main + if: steps.check.outputs.changes == 'true' + working-directory: www diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d16647299..d18321f5f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -245,52 +245,3 @@ jobs: uses: test-summary/action@v2 with: paths: 'test-results-*/*.xml' - - - # Generate documentation using docurium. We'll upload the documentation - # as a build artifact so that it can be reviewed as part of a pull - # request or in a forked build. For CI builds in the main repository's - # main branch, we'll push the gh-pages branch back up so that it is - # published to our documentation site. - documentation: - name: Generate documentation - if: success() || failure() - runs-on: ubuntu-latest - steps: - - name: Check out repository - uses: actions/checkout@v4 - with: - path: source - fetch-depth: 0 - - name: Set up container - uses: ./source/.github/actions/download-or-build-container - with: - registry: ${{ env.docker-registry }} - config-path: ${{ env.docker-config-path }} - container: docurium - github_token: ${{ secrets.github_token }} - dockerfile: ${{ matrix.platform.container.dockerfile }} - - name: Generate documentation - working-directory: source - run: | - git config user.name 'Documentation Generation' - git config user.email 'libgit2@users.noreply.github.com' - git branch gh-pages origin/gh-pages - docker login https://${{ env.docker-registry }} -u ${{ github.actor }} -p ${{ github.token }} - docker run \ - --rm \ - -v "$(pwd):/home/libgit2" \ - -w /home/libgit2 \ - ${{ env.docker-registry }}/${{ github.repository }}/docurium:latest \ - cm doc api.docurium - git checkout gh-pages - zip --exclude .git/\* --exclude .gitignore --exclude .gitattributes -r api-documentation.zip . - - uses: actions/upload-artifact@v4 - name: Upload artifact - with: - name: api-documentation - path: source/api-documentation.zip - - name: Push documentation branch - working-directory: source - run: git push origin gh-pages - if: github.event_name == 'push' && github.repository == 'libgit2/libgit2' diff --git a/script/api-docs/README.md b/script/api-docs/README.md new file mode 100644 index 000000000..fb329e2fa --- /dev/null +++ b/script/api-docs/README.md @@ -0,0 +1,13 @@ +# API Documentation Generator + +These scripts generate the "raw API" specs and reference documentation +for [www.libgit2.org](https://libgit2.org/docs/reference). + +The "raw API" specs consists of JSON documents, on per +released version or branch, that describes the APIs. This is +suitable for creating documentation from, or may be useful for +language bindings as well. + +The reference documentation is documentation fragments for each +API in each version, ready to be included in the libgit2 documentation +website. diff --git a/script/api-docs/api-generator.js b/script/api-docs/api-generator.js new file mode 100755 index 000000000..fffeb5e9c --- /dev/null +++ b/script/api-docs/api-generator.js @@ -0,0 +1,1543 @@ +#!/usr/bin/env node + +const path = require('node:path'); +const child_process = require('node:child_process'); +const fs = require('node:fs').promises; +const util = require('node:util'); +const process = require('node:process'); + +const { program } = require('commander'); + +const includePath = (p) => `${p}/include`; +const ancientIncludePath = (p) => `${p}/src/git`; +const legacyIncludePath = (p) => `${p}/src/git2`; +const standardIncludePath = (p) => `${includePath(p)}/git2`; +const systemIncludePath = (p) => `${includePath(p)}/git2/sys`; + +const fileIgnoreList = [ 'stdint.h', 'inttypes.h' ]; +const apiIgnoreList = [ 'GIT_BEGIN_DECL', 'GIT_END_DECL', 'GIT_WIN32' ]; + +// Some older versions of libgit2 need some help with includes +const defaultIncludes = [ + 'checkout.h', 'common.h', 'diff.h', 'email.h', 'oidarray.h', 'merge.h', 'remote.h', 'types.h' +]; + +// We're unable to fully map `types.h` defined types into groups; +// provide some help. +const groupMap = { + 'filemode': 'tree', + 'treebuilder': 'tree', + 'note': 'notes', + 'packbuilder': 'pack', + 'reference': 'refs', + 'push': 'remote' }; + +async function headerPaths(p) { + const possibleIncludePaths = [ + ancientIncludePath(p), + legacyIncludePath(p), + standardIncludePath(p), + systemIncludePath(p) + ]; + + const includePaths = [ ]; + const paths = [ ]; + + for (const possibleIncludePath of possibleIncludePaths) { + try { + await fs.stat(possibleIncludePath); + includePaths.push(possibleIncludePath); + } + catch (e) { + if (e?.code !== 'ENOENT') { + throw e; + } + } + } + + if (!includePaths.length) { + throw new Error(`no include paths for ${p}`); + } + + for (const fullPath of includePaths) { + paths.push(...(await fs.readdir(fullPath)). + filter((filename) => filename.endsWith('.h')). + filter((filename) => !fileIgnoreList.includes(filename)). + map((filename) => `${fullPath}/${filename}`)); + } + + return paths; +} + +function trimPath(basePath, headerPath) { + const possibleIncludePaths = [ + ancientIncludePath(basePath), + legacyIncludePath(basePath), + standardIncludePath(basePath), + systemIncludePath(basePath) + ]; + + for (const possibleIncludePath of possibleIncludePaths) { + if (headerPath.startsWith(possibleIncludePath + '/')) { + return headerPath.substr(possibleIncludePath.length + 1); + } + } + + throw new Error("header path is not beneath include root"); +} + +function parseFileAst(path, ast) { + let currentFile = undefined; + const fileData = [ ]; + + for (const node of ast.inner) { + if (node.loc?.file && currentFile != node.loc.file) { + currentFile = node.loc.file; + } else if (node.loc?.spellingLoc?.file && currentFile != node.loc.spellingLoc.file) { + currentFile = node.loc.spellingLoc.file; + } + + if (currentFile != path) { + continue; + } + + fileData.push(node); + } + + return fileData; +} + +function includeBase(path) { + const segments = path.split('/'); + + while (segments.length > 1) { + if (segments[segments.length - 1] === 'git2' || + segments[segments.length - 1] === 'git') { + segments.pop(); + return segments.join('/'); + } + + segments.pop(); + } + + throw new Error(`could not resolve include base for ${path}`); +} + +function readAst(path, options) { + return new Promise((resolve, reject) => { + let errorMessage = ''; + const chunks = [ ]; + + const processArgs = [ path, '-Xclang', '-ast-dump=json', `-I${includeBase(path)}` ]; + + if (options?.deprecateHard) { + processArgs.push(`-DGIT_DEPRECATE_HARD`); + } + + if (options?.includeFiles) { + for (const file of options.includeFiles) { + processArgs.push(`-include`); + processArgs.push(file) + } + } + + const process = child_process.spawn('clang', processArgs); + + process.stderr.on('data', (message) => { + errorMessage += message; + }); + process.stdout.on('data', (chunk) => { + chunks.push(chunk); + }); + process.on('close', (code) => { + if (code != 0 && options.strict) { + reject(new Error(`clang exit code ${code}: ${errorMessage}`)); + } + else if (code != 0) { + resolve([ ]); + } + else { + const ast = JSON.parse(Buffer.concat(chunks).toString()); + resolve(parseFileAst(path, ast)); + } + }); + process.on('error', function (err) { + reject(err); + }); + }); +} + +async function readFile(path) { + const buf = await fs.readFile(path); + return buf.toString(); +} + +function ensure(message, test) { + if (!test) { + throw new Error(message); + } +} + +function ensureDefined(name, value) { + if (!value) { + throw new Error(`could not find ${name} for declaration`); + } + + return value; +} + +function groupifyId(location, id) { + if (!id) { + throw new Error(`could not find id in declaration`); + } + + if (!location || !location.file) { + throw new Error(`unspecified location`); + } + + return `${location.file}-${id}`; +} + +function blockCommentText(block) { + ensure('block does not have a single paragraph element', block.inner.length === 1 && block.inner[0].kind === 'ParagraphComment'); + return commentText(block.inner[0]); +} + +function richBlockCommentText(block) { + ensure('block does not have a single paragraph element', block.inner.length === 1 && block.inner[0].kind === 'ParagraphComment'); + return richCommentText(block.inner[0]); +} + +function paramCommentText(param) { + ensure('param does not have a single paragraph element', param.inner.length === 1 && param.inner[0].kind === 'ParagraphComment'); + return richCommentText(param.inner[0]); +} + +function appendCommentText(chunk) { + return chunk.startsWith(' ') ? "\n" + chunk : chunk; +} + +function commentText(para) { + let text = ''; + + for (const comment of para.inner) { + // docbook allows backslash escaped text, and reports it differently. + // we restore the literal `\`. + if (comment.kind === 'InlineCommandComment') { + text += `\\${comment.name}`; + } + else if (comment.kind === 'TextComment') { + text += text ? "\n" + comment.text : comment.text; + } else { + throw new Error(`unknown paragraph comment element: ${comment.kind}`); + } + } + + return text.trim(); +} + +function nextText(para, idx) { + if (!para.inner[idx + 1] || para.inner[idx + 1].kind !== 'TextComment') { + throw new Error("expected text comment"); + } + + return para.inner[idx + 1].text; +} + +function inlineCommandData(data, command) { + ensure(`${command} information does not follow @${command}`, data?.kind === 'TextComment'); + + const result = data.text.match(/^(?:\[([^\]]+)\])? ((?:[a-zA-Z0-9\_]+)|`[a-zA-Z0-9\_\* ]+`)(.*)/); + ensure(`${command} data does not follow @${command}`, result); + + const [ , attr, spec, remain ] = result; + return [ attr, spec.replace(/^`(.*)`$/, "$1"), remain ] +} + +function richCommentText(para) { + let text = ''; + let extendedType = undefined; + let subkind = undefined; + let versionMacro = undefined; + let initMacro = undefined; + let initFunction = undefined; + let lastComment = undefined; + + for (let i = 0; i < para.inner?.length; i++) { + const comment = para.inner[i]; + + if (comment.kind === 'InlineCommandComment' && + comment.name === 'type') { + const [ attr, data, remain ] = inlineCommandData(para.inner[++i], "type"); + + extendedType = { kind: attr, type: data }; + text += remain; + } + else if (comment.kind === 'InlineCommandComment' && + comment.name === 'flags') { + subkind = 'flags'; + } + else if (comment.kind === 'InlineCommandComment' && + comment.name === 'options') { + const [ attr, data, remain ] = inlineCommandData(para.inner[++i], "options"); + + if (attr === 'version') { + versionMacro = data; + } + else if (attr === 'init_macro') { + initMacro = data; + } + else if (attr === 'init_function') { + initFunction = data; + } + + subkind = 'options'; + text += remain; + } + // docbook allows backslash escaped text, and reports it differently. + // we restore the literal `\`. + else if (comment.kind === 'InlineCommandComment') { + text += `\\${comment.name}`; + } + else if (comment.kind === 'TextComment') { + // clang oddity: it breaks into two + // comment blocks, assuming that the trailing > should be a + // blockquote newline sort of thing. unbreak them. + if (comment.text.startsWith('>') && + lastComment && + lastComment.loc.offset + lastComment.text.length === comment.loc.offset) { + + text += comment.text; + } else { + text += text ? "\n" + comment.text : comment.text; + } + } + else if (comment.kind === 'HTMLStartTagComment' && comment.name === 'p') { + text += "\n"; + } + else { + throw new Error(`unknown paragraph comment element: ${comment.kind}`); + } + + lastComment = comment; + } + + return { + text: text.trim(), + extendedType: extendedType, + subkind: subkind, + versionMacro: versionMacro, + initMacro: initMacro, + initFunction: initFunction + } +} + +function join(arr, elem) { + if (arr) { + return [ ...arr, elem ]; + } + + return [ elem ]; +} + +function joinIfNotEmpty(arr, elem) { + if (!elem || elem === '') { + return arr; + } + + if (arr) { + return [ ...arr, elem ]; + } + + return [ elem ]; +} + +function pushIfNotEmpty(arr, elem) { + if (elem && elem !== '') { + arr.push(elem); + } +} + +function single(arr, fn, message) { + let result = undefined; + + if (!arr) { + return undefined; + } + + for (const match of arr.filter(fn)) { + if (result) { + throw new Error(`multiple matches in array for ${fn}${message ? ' (' + message + ')': ''}`); + } + + result = match; + } + + return result; +} + +function updateLocation(location, decl) { + location.file = trimBase(decl.loc?.spellingLoc?.file || decl.loc?.file) || location.file; + location.line = decl.loc?.spellingLoc?.line || decl.loc?.line || location.line; + location.column = decl.loc?.spellingLoc?.col || decl.loc?.col || location.column; + + return location; +} + +async function readFileLocation(startLocation, endLocation) { + if (startLocation.file != endLocation.file) { + throw new Error("cannot read across files"); + } + + const data = await fs.readFile(startLocation.file, "utf8"); + const lines = data.split(/\r?\n/).slice(startLocation.line - 1, endLocation.line); + + lines[lines.length - 1] = lines[lines.length - 1].slice(0, endLocation.column); + lines[0] = lines[0].slice(startLocation.column - 1); + + return lines +} + +function formatLines(lines) { + let result = ""; + let continuation = false; + + for (const i in lines) { + if (!continuation) { + lines[i] = lines[i].trimStart(); + } + + continuation = lines[i].endsWith("\\"); + + if (continuation) { + lines[i] = lines[i].slice(0, -1); + } else { + lines[i] = lines[i].trimEnd(); + } + + result += lines[i]; + } + + if (continuation) { + throw new Error("unterminated literal continuation"); + } + + return result; +} + +async function parseExternalRange(location, range) { + const startLocation = {...location}; + startLocation.file = trimBase(range.begin.spellingLoc.file || startLocation.file); + startLocation.line = range.begin.spellingLoc.line || startLocation.line; + startLocation.column = range.begin.spellingLoc.col || startLocation.column; + + const endLocation = {...startLocation}; + endLocation.file = trimBase(range.end.spellingLoc.file || endLocation.file); + endLocation.line = range.end.spellingLoc.line || endLocation.line; + endLocation.column = range.end.spellingLoc.col || endLocation.column; + + const lines = await readFileLocation(startLocation, endLocation); + + return formatLines(lines); +} + +async function parseLiteralRange(location, range) { + const startLocation = updateLocation({...location}, { loc: range.begin }); + const endLocation = updateLocation({...location}, { loc: range.end }); + + const lines = await readFileLocation(startLocation, endLocation); + + return formatLines(lines); +} + +async function parseRange(location, range) { + return range.begin.spellingLoc ? parseExternalRange(location, range) : parseLiteralRange(location, range); +} + +class ParserError extends Error { + constructor(message, location) { + if (!location) { + super(`${message} at (unknown)`); + } + else { + super(`${message} at ${location.file}:${location.line}`); + } + this.name = 'ParserError'; + } +} + +function validateParsing(test, message, location) { + if (!test) { + throw new ParserError(message, location); + } +} + +function parseComment(spec, location, comment, options) { + let result = { }; + let last = undefined; + + for (const c of comment.inner.filter(c => c.kind === 'ParagraphComment' || c.kind === 'VerbatimLineComment')) { + if (c.kind === 'ParagraphComment') { + const commentData = richCommentText(c); + + result.comment = joinIfNotEmpty(result.comment, commentData.text); + delete commentData.text; + + result = { ...result, ...commentData }; + } + else if (c.kind === 'VerbatimLineComment') { + result.comment = joinIfNotEmpty(result.comment, c.text.trim()); + } + else { + throw new Error(`unknown comment ${c.kind}`); + } + } + + for (const c of comment.inner.filter(c => c.kind !== 'ParagraphComment' && c.kind !== 'VerbatimLineComment')) { + if (c.kind === 'BlockCommandComment' && c.name === 'see') { + result.see = joinIfNotEmpty(result.see, blockCommentText(c)); + } + else if (c.kind === 'BlockCommandComment' && c.name === 'note') { + result.notes = joinIfNotEmpty(result.notes, blockCommentText(c)); + } + else if (c.kind === 'BlockCommandComment' && c.name === 'deprecated') { + result.deprecations = joinIfNotEmpty(result.deprecations, blockCommentText(c)); + } + else if (c.kind === 'BlockCommandComment' && c.name === 'warning') { + result.warnings = joinIfNotEmpty(result.warnings, blockCommentText(c)); + } + else if (c.kind === 'BlockCommandComment' && + (c.name === 'return' || (c.name === 'returns' && !options.strict))) { + const returnData = richBlockCommentText(c); + + result.returns = { + extendedType: returnData.extendedType, + comment: returnData.text + }; + } + else if (c.kind === 'ParamCommandComment') { + ensure('param has a name', c.param); + + const paramDetails = paramCommentText(c); + + result.params = join(result.params, { + name: c.param, + direction: c.direction, + values: paramDetails.type, + extendedType: paramDetails.extendedType, + comment: paramDetails.text + }); + } + else if (options.strict) { + if (c.kind === 'BlockCommandComment') { + throw new ParserError(`unknown block command comment ${c.name}`, location); + } + else if (c.kind === 'VerbatimBlockComment') { + throw new Error(`unknown verbatim command comment ${c.name}`, location); + } + else { + throw new Error(`unknown comment ${c.kind} in ${kind}`); + } + } + } + + return result; +} + +async function parseFunction(location, decl, options) { + let result = { + kind: 'function', + id: groupifyId(location, decl.id), + name: ensureDefined('name', decl.name), + location: {...location} + }; + + // prototype + const [ , returnType, ] = decl.type.qualType.match(/(.*?)(?: )?\((.*)\)$/) || [ ]; + ensureDefined('return type declaration', returnType); + result.returns = { type: returnType }; + + for (const paramDecl of decl.inner.filter(attr => attr.kind === 'ParmVarDecl')) { + updateLocation(location, paramDecl); + + const inner = paramDecl.inner || []; + const innerLocation = {...location}; + let paramAnnotations = undefined; + + for (const annotateDecl of inner.filter(attr => attr.kind === 'AnnotateAttr')) { + updateLocation(innerLocation, annotateDecl); + + paramAnnotations = join(paramAnnotations, await parseRange(innerLocation, annotateDecl.range)); + } + + result.params = join(result.params, { + name: paramDecl.name, + type: paramDecl.type.qualType, + annotations: paramAnnotations + }); + } + + // doc comment + const commentText = single(decl.inner, (attr => attr.kind === 'FullComment')); + + if (commentText) { + const commentData = parseComment(`function:${decl.name}`, location, commentText, options); + + if (result.params) { + if (options.strict && (!commentData.params || result.params.length > commentData.params.length)) { + throw new ParserError(`not all params are documented`, location); + } + + if (options.strict && result.params.length < commentData.params.length) { + throw new ParserError(`additional params are documented`, location); + } + } + + if (commentData.params) { + for (const i in result.params) { + let match; + + for (const j in commentData.params) { + if (result.params[i].name === commentData.params[j].name) { + match = j; + break; + } + } + + if (options.strict && (!match || match != i)) { + throw new ParserError( + `param documentation does not match param name '${result.params[i].name}'`, + location); + } + + if (match) { + result.params[i] = { ...result.params[i], ...commentData.params[match] }; + } + } + } else if (options.strict && result.params) { + throw new ParserError(`no params documented for ${decl.name}`, location); + } + + if (options.strict && !commentData.returns && result.returns.type != 'void') { + throw new ParserError(`return information is not documented for ${decl.name}`, location); + } + + result.returns = { ...result.returns, ...commentData.returns }; + + delete commentData.params; + delete commentData.returns; + + result = { ...result, ...commentData }; + } + else if (options.strict) { + throw new ParserError(`no documentation for function ${decl.name}`, location); + } + + return result; +} + +function parseEnum(location, decl, options) { + let result = { + kind: 'enum', + id: groupifyId(location, decl.id), + name: decl.name, + referenceName: decl.name ? `enum ${decl.name}` : undefined, + members: [ ], + comment: undefined, + location: {...location} + }; + + for (const member of decl.inner.filter(attr => attr.kind === 'EnumConstantDecl')) { + ensure('enum constant has a name', member.name); + + const explicitValue = single(member.inner, (attr => attr.kind === 'ConstantExpr')); + const commentText = single(member.inner, (attr => attr.kind === 'FullComment')); + const commentData = commentText ? parseComment(`enum:${decl.name}:member:${member.name}`, location, commentText, options) : undefined; + + result.members.push({ + name: member.name, + value: explicitValue ? explicitValue.value : undefined, + ...commentData + }); + } + + const commentText = single(decl.inner, (attr => attr.kind === 'FullComment')); + + if (commentText) { + result = { ...result, ...parseComment(`enum:${decl.name}`, location, commentText, options) }; + } + + return result; +} + +function resolveFunctionPointerTypedef(location, typedef) { + const signature = typedef.type.match(/^((?:const )?[^\s]+(?:\s+\*+)?)\s*\(\*\)\((.*)\)$/); + const [ , returnType, paramData ] = signature; + const params = paramData.split(/,\s+/); + + if (options.strict && (!typedef.params || params.length != typedef.params.length)) { + throw new ParserError(`not all params are documented for function pointer typedef ${typedef.name}`, typedef.location); + } + + if (!typedef.params) { + typedef.params = [ ]; + } + + for (const i in params) { + if (!typedef.params[i]) { + typedef.params[i] = { }; + } + + typedef.params[i].type = params[i]; + } + + if (typedef.returns === undefined && returnType === 'void') { + typedef.returns = { type: 'void' }; + } + else if (typedef.returns !== undefined) { + typedef.returns.type = returnType; + } + else if (options.strict) { + throw new ParserError(`return type is not documented for function pointer typedef ${typedef.name}`, typedef.location); + } +} + +function parseTypedef(location, decl, options) { + updateLocation(location, decl); + + let result = { + kind: 'typedef', + id: groupifyId(location, decl.id), + name: ensureDefined('name', decl.name), + type: ensureDefined('type.qualType', decl.type.qualType), + targetId: undefined, + comment: undefined, + location: {...location} + }; + + const elaborated = single(decl.inner, (attr => attr.kind === 'ElaboratedType')); + if (elaborated !== undefined && elaborated.ownedTagDecl?.id) { + result.targetId = groupifyId(location, elaborated.ownedTagDecl?.id); + } + + const commentText = single(decl.inner, (attr => attr.kind === 'FullComment')); + + if (commentText) { + const commentData = parseComment(`typedef:${decl.name}`, location, commentText, options); + result = { ...result, ...commentData }; + } + + if (isFunctionPointer(result.type)) { + resolveFunctionPointerTypedef(location, result); + } + + return result; +} + +function parseStruct(location, decl, options) { + let result = { + kind: 'struct', + id: groupifyId(location, decl.id), + name: decl.name, + referenceName: decl.name ? `struct ${decl.name}` : undefined, + comment: undefined, + members: [ ], + location: {...location} + }; + + for (const member of decl.inner.filter(attr => attr.kind === 'FieldDecl')) { + let memberData = { + 'name': member.name, + 'type': member.type.qualType + }; + + const commentText = single(member.inner, (attr => attr.kind === 'FullComment')); + + if (commentText) { + memberData = {...memberData, ...parseComment(`struct:${decl.name}:member:${member.name}`, location, commentText, options)}; + } + + result.members.push(memberData); + } + + const commentText = single(decl.inner, (attr => attr.kind === 'FullComment')); + + if (commentText) { + const commentData = parseComment(`struct:${decl.name}`, location, commentText, options); + result = { ...result, ...commentData }; + } + + return result; +} + +function newResults() { + return { + all: [ ], + functions: [ ], + enums: [ ], + typedefs: [ ], + structs: [ ], + macros: [ ] + }; +}; + +const returnMap = { }; +const paramMap = { }; + +function simplifyType(givenType) { + let type = givenType; + + if (type.startsWith('const ')) { + type = type.substring(6); + } + + while (type.endsWith('*') && type !== 'void *' && type !== 'char *') { + type = type.substring(0, type.length - 1).trim(); + } + + if (!type.length) { + throw new Error(`invalid type: ${result.returns.extendedType || result.returns.type}`); + } + + return type; +} + +function createAndPush(arr, name, value) { + if (!arr[name]) { + arr[name] = [ ]; + } + + if (arr[name].length && arr[name][arr[name].length - 1] === value) { + return; + } + + arr[name].push(value); +} + +function addReturn(result) { + if (!result.returns) { + return; + } + + let type = simplifyType(result.returns.extendedType?.type || result.returns.type); + + createAndPush(returnMap, type, result.name); +} + +function addParameters(result) { + if (!result.params) { + return; + } + + for (const param of result.params) { + let type = param.extendedType?.type || param.type; + + if (!type && options.strict) { + throw new Error(`parameter ${result.name} erroneously documented when not specified`); + } else if (!type) { + continue; + } + + type = simplifyType(type); + + if (param.direction === 'out') { + createAndPush(returnMap, type, result.name); + } + else { + createAndPush(paramMap, type, result.name); + } + } +} + +function addResult(results, result) { + results[`${result.kind}s`].push(result); + results.all.push(result); + + addReturn(result); + addParameters(result); +} + +function mergeResults(one, two) { + const results = newResults(); + + for (const inst of Object.keys(results)) { + results[inst].push(...one[inst]); + results[inst].push(...two[inst]); + } + + return results; +} + +function getById(results, id) { + ensure("id is set", id !== undefined); + return single(results.all.all, (item => item.id === id), id); +} + +function getByKindAndName(results, kind, name) { + ensure("kind is set", kind !== undefined); + ensure("name is set", name !== undefined); + return single(results.all[`${kind}s`], (item => item.name === name), name); +} + +function getByName(results, name) { + ensure("name is set", name !== undefined); + return single(results.all.all, (item => item.name === name), name); +} + +function isFunctionPointer(type) { + return type.match(/^(?:const )?[A-Za-z0-9_]+\s+\**\(\*/); +} + +function resolveCallbacks(results) { + // expand callback types + for (const fn of results.all.functions) { + for (const param of fn.params || [ ]) { + const typedef = getByName(results, param.type); + + if (typedef === undefined) { + continue; + } + + param.referenceType = typedef.type; + } + } + + for (const struct of results.all.structs) { + for (const member of struct.members) { + const typedef = getByKindAndName(results, 'typedef', member.type); + + if (typedef === undefined) { + continue; + } + + member.referenceType = typedef.type; + } + } +} + +function trimBase(path) { + if (!path) { + return path; + } + + for (const segment of [ 'git2', 'git' ]) { + const base = [ includeBase(path), segment ].join('/'); + + if (path.startsWith(base + '/')) { + return path.substr(base.length + 1); + } + } + + throw new Error(`header path ${path} is not beneath standard root`); +} + +function resolveTypedefs(results) { + for (const typedef of results.all.typedefs) { + let target = typedef.targetId ? getById(results, typedef.targetId) : undefined; + + if (target) { + // update the target's preferred name with the short name + target.referenceName = typedef.name; + + if (target.name === undefined) { + target.name = typedef.name; + } + } + else if (typedef.type.startsWith('struct ')) { + const path = typedef.location.file; + + /* + * See if this is actually a typedef to a declared struct, + * then it is not actually opaque. + */ + if (results.all.structs.filter(fn => fn.name === typedef.name).length > 0) { + typedef.opaque = false; + continue; + } + + opaque = { + kind: 'struct', + id: groupifyId(typedef.location, typedef.id), + name: typedef.name, + referenceName: typedef.type, + opaque: true, + comment: typedef.comment, + location: typedef.location, + group: typedef.group + }; + + addResult(results.files[path], opaque); + addResult(results.all, opaque); + } + else if (isFunctionPointer(typedef.type) || + typedef.type === 'int64_t' || + typedef.type === 'uint64_t') { + // standard types + // TODO : make these a list + } + else { + typedef.kind = 'alias'; + typedef.typedef = true; + } + } +} + +function lastCommentIsGroupDelimiter(decls) { + if (decls[decls.length - 1].inner && + decls[decls.length - 1].inner.length > 0) { + return lastCommentIsGroupDelimiter(decls[decls.length - 1].inner); + } + + if (decls.length >= 2 && + decls[decls.length - 1].kind.endsWith('Comment') && + decls[decls.length - 2].kind.endsWith('Comment') && + decls[decls.length - 2].text === '@' && + decls[decls.length - 1].text === '{') { + return true; + } + + return false; +} + +async function parseAst(decls, options) { + const location = { + file: undefined, + line: undefined, + column: undefined + }; + + const results = newResults(); + + /* The first decl might have picked up the javadoc _for the file + * itself_ based on the file's structure. Remove it. + */ + if (decls.length && decls[0].inner && + decls[0].inner.length > 0 && + decls[0].inner[0].kind === 'FullComment' && + lastCommentIsGroupDelimiter(decls[0].inner[0].inner)) { + updateLocation(location, decls[0]); + delete decls[0].inner[0]; + } + + for (const decl of decls) { + updateLocation(location, decl); + + ensureDefined('kind', decl.kind); + + if (decl.kind === 'FunctionDecl') { + addResult(results, await parseFunction({...location}, decl, options)); + } + else if (decl.kind === 'EnumDecl') { + addResult(results, parseEnum({...location}, decl, options)); + } + else if (decl.kind === 'TypedefDecl') { + addResult(results, parseTypedef({...location}, decl, options)); + } + else if (decl.kind === 'RecordDecl' && decl.tagUsed === 'struct') { + if (decl.completeDefinition) { + addResult(results, parseStruct({...location}, decl, options)); + } + } + else if (decl.kind === 'VarDecl') { + if (options.strict) { + throw new Error(`unsupported variable declaration ${decl.kind}`); + } + } + else { + throw new Error(`unknown declaration type ${decl.kind}`); + } + } + + return results; +} + +function parseCommentForMacro(lines, macroIdx, name) { + let startIdx = -1, endIdx = 0; + const commentLines = [ ]; + + while (macroIdx > 0 && + (line = lines[macroIdx - 1].trim()) && + (line.trim() === '' || + line.trim().endsWith('\\') || + line.trim().match(/^#\s*if\s+/) || + line.trim().startsWith('#ifdef ') || + line.trim().startsWith('#ifndef ') || + line.trim().startsWith('#elif ') || + line.trim().startsWith('#else ') || + line.trim().match(/^#\s*define\s+${name}\s+/))) { + macroIdx--; + } + + if (macroIdx > 0 && lines[macroIdx - 1].trim().endsWith('*/')) { + endIdx = macroIdx - 1; + } else { + return ''; + } + + for (let i = endIdx; i >= 0; i--) { + if (lines[i].trim().startsWith('/**')) { + startIdx = i; + break; + } + else if (lines[i].trim().startsWith('/*')) { + break; + } + } + + if (startIdx < 0) { + return ''; + } + + for (let i = startIdx; i <= endIdx; i++) { + let line = lines[i].trim(); + + if (i == startIdx) { + line = line.replace(/^\s*\/\*\*\s*/, ''); + } + + if (i === endIdx) { + line = line.replace(/\s*\*\/\s*$/, ''); + } + + if (i != startIdx) { + line = line.replace(/^\s*\*\s*/, ''); + } + + if (i == startIdx && (line === '@{' || line.startsWith("@{ "))) { + return ''; + } + + if (line === '') { + continue; + } + + commentLines.push(line); + } + + return commentLines.join(' '); +} + +async function parseInfo(data) { + const fileHeader = data.match(/(.*)\n+GIT_BEGIN_DECL.*/s); + const headerLines = fileHeader ? fileHeader[1].split(/\n/) : [ ]; + + let lines = [ ]; + const detailsLines = [ ]; + + let summary = undefined; + let endIdx = headerLines.length - 1; + + for (let i = headerLines.length - 1; i >= 0; i--) { + let line = headerLines[i].trim(); + + if (line.match(/^\s*\*\/\s*$/)) { + endIdx = i; + } + + if (line.match(/^\/\*\*(\s+.*)?$/)) { + lines = headerLines.slice(i + 1, endIdx); + break; + } + else if (line.match(/^\/\*(\s+.*)?$/)) { + break; + } + } + + for (let line of lines) { + line = line.replace(/^\s\*/, ''); + line = line.trim(); + + const comment = line.match(/^\@(\w+|{)\s*(.*)/); + + if (comment) { + if (comment[1] === 'brief') { + summary = comment[2]; + } + } + else if (line != '') { + detailsLines.push(line); + } + } + + const details = detailsLines.length > 0 ? detailsLines.join("\n") : undefined; + + return { + 'summary': summary, + 'details': details + }; +} + +async function parseMacros(path, data, options) { + const results = newResults(); + const lines = data.split(/\r?\n/); + + const macros = { }; + + for (let i = 0; i < lines.length; i++) { + const macro = lines[i].match(/^(\s*#\s*define\s+)([^\s\(]+)(\([^\)]+\))?\s*(.*)/); + let more = false; + + if (!macro) { + continue; + } + + let [ , prefix, name, args, value ] = macro; + + if (name.startsWith('INCLUDE_') || name.startsWith('_INCLUDE_')) { + continue; + } + + if (args) { + name = name + args; + } + + if (macros[name]) { + continue; + } + + macros[name] = true; + + value = value.trim(); + + if (value.endsWith('\\')) { + value = value.substring(0, value.length - 1).trim(); + more = true; + } + + while (more) { + more = false; + + let line = lines[++i]; + + if (line.endsWith('\\')) { + line = line.substring(0, line.length - 1); + more = true; + } + + value += ' ' + line.trim(); + } + + const comment = parseCommentForMacro(lines, i, name); + const location = { + file: path, + line: i + 1, + column: prefix.length + 1, + }; + + if (options.strict && !comment) { + throw new ParserError(`no comment for ${name}`, location); + } + + addResult(results, { + kind: 'macro', + name: name, + location: location, + value: value, + comment: comment, + }); + } + + return results; +} + +function resolveUngroupedTypes(results) { + const groups = { }; + + for (const result of results.all.all) { + result.group = result.location.file; + + if (result.group.endsWith('.h')) { + result.group = result.group.substring(0, result.group.length - 2); + groups[result.group] = true; + } + } + + for (const result of results.all.all) { + if (result.location.file === 'types.h' && + result.name.startsWith('git_')) { + let possibleGroup = result.name.substring(4); + + do { + if (groupMap[possibleGroup]) { + result.group = groupMap[possibleGroup]; + break; + } + else if (groups[possibleGroup]) { + result.group = possibleGroup; + break; + } + else if (groups[`sys/${possibleGroup}`]) { + result.group = `sys/${possibleGroup}`; + break; + } + + let match = possibleGroup.match(/^(.*)_[^_]+$/); + + if (!match) { + break; + } + + possibleGroup = match[1]; + } while (true); + } + } +} + +function resolveReturns(results) { + for (const result of results.all.all) { + result.returnedBy = returnMap[result.name]; + } +} + +function resolveParameters(results) { + for (const result of results.all.all) { + result.parameterTo = paramMap[result.name]; + } +} + +async function parseHeaders(sourcePath, options) { + const results = { all: newResults(), files: { } }; + + for (const fullPath of await headerPaths(sourcePath)) { + const path = trimPath(sourcePath, fullPath); + const fileContents = await readFile(fullPath); + + const ast = await parseAst(await readAst(fullPath, options), options); + const macros = await parseMacros(path, fileContents, options); + const info = await parseInfo(fileContents); + + const filedata = mergeResults(ast, macros); + + filedata['info'] = info; + + results.files[path] = filedata; + results.all = mergeResults(results.all, filedata); + } + + resolveCallbacks(results); + resolveTypedefs(results); + + resolveUngroupedTypes(results); + + resolveReturns(results); + resolveParameters(results); + + return results; +} + +function isFunctionPointer(type) { + return type.match(/^(const\s+)?[A-Za-z0-9_]+\s+\*?\(\*/); +} +function isEnum(type) { + return type.match(/^enum\s+/); +} +function isStruct(type) { + return type.match(/^struct\s+/); +} + +/* + * We keep the `all` arrays around so that we can lookup; drop them + * for the end result. + */ +function simplify(results) { + const simplified = { + 'info': { }, + 'groups': { } + }; + + results.all.all.sort((a, b) => { + if (!a.group) { + throw new Error(`missing group for api ${a.name}`); + } + + if (!b.group) { + throw new Error(`missing group for api ${b.name}`); + } + + const aSystem = a.group.startsWith('sys/'); + const aName = aSystem ? a.group.substr(4) : a.group; + + const bSystem = b.group.startsWith('sys/'); + const bName = bSystem ? b.group.substr(4) : b.group; + + if (aName !== bName) { + return aName.localeCompare(bName); + } + + if (aSystem !== bSystem) { + return aSystem ? 1 : -1; + } + + if (a.location.file !== b.location.file) { + return a.location.file.localeCompare(b.location.file); + } + + if (a.location.line !== b.location.line) { + return a.location.line - b.location.line; + } + + return a.location.column - b.location.column; + }); + + for (const api of results.all.all) { + delete api.id; + delete api.targetId; + + const type = api.referenceType || api.type; + + if (api.kind === 'typedef' && isFunctionPointer(type)) { + api.kind = 'callback'; + api.typedef = true; + } + else if (api.kind === 'typedef' && (!isEnum(type) && !isStruct(type))) { + api.kind = 'alias'; + api.typedef = true; + } + else if (api.kind === 'typedef') { + continue; + } + + if (apiIgnoreList.includes(api.name)) { + continue; + } + + // TODO: do a warning where there's a redefinition of a symbol + // There are occasions where we redefine a symbol. First, our + // parser is not smart enough to know #ifdef's around #define's. + // But also we declared `git_email_create_from_diff` twice (in + // email.h and sys/email.h) for several releases. + + if (!simplified['groups'][api.group]) { + simplified['groups'][api.group] = { }; + simplified['groups'][api.group].apis = { }; + simplified['groups'][api.group].info = results.files[`${api.group}.h`].info; + } + + simplified['groups'][api.group].apis[api.name] = api; + } + + return simplified; +} + +function joinArguments(next, previous) { + if (previous) { + return [...previous, next]; + } + return [next]; +} + +async function findIncludes() { + const includes = [ ]; + + for (const possible of defaultIncludes) { + const includeFile = `${docsPath}/include/git2/${possible}`; + + try { + await fs.stat(includeFile); + includes.push(`git2/${possible}`); + } + catch (e) { + if (e?.code !== 'ENOENT') { + throw e; + } + } + } + + return includes; +} + +async function execGit(path, command) { + const process = child_process.spawn('git', command, { cwd: path }); + const chunks = [ ]; + + return new Promise((resolve, reject) => { + process.stdout.on('data', (chunk) => { + chunks.push(chunk); + }); + process.on('close', (code) => { + resolve(code == 0 ? Buffer.concat(chunks).toString() : undefined); + }); + process.on('error', function (err) { + reject(err); + }); + }); +} + +async function readMetadata(path) { + let commit = await execGit(path, [ 'rev-parse', 'HEAD' ]); + + if (commit) { + commit = commit.trimEnd(); + } + + let version = await execGit(path, [ 'describe', '--tags', '--exact' ]); + + if (!version) { + const ref = await execGit(path, [ 'describe', '--all', '--exact' ]); + + if (ref && ref.startsWith('heads/')) { + version = ref.substr(6); + } + } + + if (version) { + version = version.trimEnd(); + } + + return { + 'version': version, + 'commit': commit + }; +} + +program.option('--output ') + .option('--include ', undefined, joinArguments) + .option('--no-includes') + .option('--deprecate-hard') + .option('--strict'); +program.parse(); + +const options = program.opts(); + +if (program.args.length != 1) { + console.error(`usage: ${path.basename(process.argv[1])} docs`); + process.exit(1); +} + +const docsPath = program.args[0]; + +if (options['include'] && !options['includes']) { + console.error(`usage: cannot combined --include with --no-include`); + process.exit(1); +} + +(async () => { + try { + if (options['include']) { + includes = options['include']; + } + else if (!options['includes']) { + includes = [ ]; + } + else { + includes = await findIncludes(); + } + + const parseOptions = { + deprecateHard: options.deprecateHard || false, + includeFiles: includes, + strict: options.strict || false + }; + + const results = await parseHeaders(docsPath, parseOptions); + const metadata = await readMetadata(docsPath); + + const simplified = simplify(results); + simplified['info'] = metadata; + + console.log(JSON.stringify(simplified, null, 2)); + } catch (e) { + console.error(e); + process.exit(1); + } +})(); diff --git a/script/api-docs/docs-generator.js b/script/api-docs/docs-generator.js new file mode 100755 index 000000000..5be9e1d20 --- /dev/null +++ b/script/api-docs/docs-generator.js @@ -0,0 +1,1326 @@ +#!/usr/bin/env node + +const markdownit = require('markdown-it'); +const { program } = require('commander'); + +const path = require('node:path'); +const fs = require('node:fs/promises'); +const process = require('node:process'); + +const githubPath = 'https://github.com/libgit2/libgit2'; + +const linkPrefix = '/docs/reference'; + +const projectTitle = 'libgit2'; +const includePath = 'include/git2'; + +const fileDenylist = [ 'stdint.h' ]; +const showVersions = true; + +const defaultBranch = 'main'; + +const markdown = markdownit(); +const markdownDefaults = { + code_inline: markdown.renderer.rules.code_inline +}; +markdown.renderer.rules.code_inline = (tokens, idx, options, env, self) => { + const version = env.__version || defaultBranch; + + const code = tokens[idx].content; + const text = `${nowrap(sanitize(tokens[idx].content))}`; + const link = linkForCode(version, code, text); + + return link ? link : text; +}; + +// globals +const apiData = { }; +const versions = [ ]; +const versionDeltas = { }; + +function produceVersionPicker(version, classes, cb) { + let content = ""; + + if (!showVersions) { + return content; + } + + content += `
\n`; + content += ` Version:\n`; + content += ` \n`; + + content += `
\n`; + + return content; +} + +function produceBreadcrumb(version, api, type) { + let content = ""; + let group = api.group; + let sys = false; + + if (group.endsWith('.h')) { + group = group.substr(0, group.length - 2); + } + + let groupTitle = group; + + if (groupTitle.startsWith('sys/')) { + groupTitle = groupTitle.substr(4); + groupTitle += ' (advanced)'; + } + + content += `
\n`; + content += ` \n`; + content += `
\n`; + + return content; +} + +function produceHeader(version, api, type) { + let content = ""; + + content += `
\n`; + content += `

${api.name}

\n`; + + content += produceAttributes(version, api, type); + + content += produceVersionPicker(version, + `apiHeaderVersionSelect ${type}HeaderVersionSelect`, + (v) => { + const versionedApi = selectApi(v, (i => i.name === api.name)); + return versionedApi ? linkFor(v, versionedApi) : undefined; + }); + + content += `
\n`; + content += `\n`; + + return content; +} + +function produceAttributes(version, api, type) { + let content = ""; + + if (api.deprecations) { + content += ` Deprecated\n`; + } + + return content; +} + +function produceDescription(version, desc, type) { + let content = ""; + + if (! desc.comment) { + return content; + } + + content += `\n`; + content += `
\n`; + + for (const para of Array.isArray(desc.comment) ? desc.comment : [ desc.comment ]) { + content += ` ${markdown.render(para, { __version: version })}\n`; + } + + content += `
\n`; + + return content; +} + +function produceList(version, api, type, listType) { + let content = ""; + + if (!api[listType]) { + return content; + } + + const listTypeUpper = listType.charAt(0).toUpperCase() + listType.slice(1); + const listTypeTitle = listTypeUpper.replaceAll(/(.)([A-Z])/g, (match, one, two) => { return one + ' ' + two; }); + + content += `\n`; + content += `

${listTypeTitle}

\n`; + + content += `
\n`; + content += `
    \n`; + + for (const item of api[listType]) { + content += `
  • \n`; + content += ` ${linkText(version, item)}\n`; + content += `
  • \n`; + } + + content += `
\n`; + content += `
\n`; + + return content; +} + +function produceNotes(version, api, type) { + return produceList(version, api, type, 'notes'); +} + +function produceSeeAlso(version, api, type) { + return produceList(version, api, type, 'deprecated'); +} + +function produceSeeAlso(version, api, type) { + return produceList(version, api, type, 'see'); +} + +function produceWarnings(version, api, type) { + return produceList(version, api, type, 'warnings'); +} + +function produceDeprecations(version, api, type) { + return produceList(version, api, type, 'deprecations'); +} + +function produceGitHubLink(version, api, type) { + if (!api || !api.location || !api.location.file) { + return undefined; + } + + let file = api.location.file; + + let link = githubPath + '/blob/' + version + '/' + includePath + '/' + file; + + if (api.location.line) { + link += '#L' + api.location.line; + } + + return link; +} + +function produceSignatureForFunction(version, api, type) { + let content = ""; + let paramCount = 0; + + let prefix = type === 'callback' ? 'typedef' : ''; + const returnType = api.returns?.type || 'int'; + + const githubLink = produceGitHubLink(version, api, type); + + content += `\n`; + + content += `

Signature

\n`; + + if (githubLink) { + content += ` \n`; + } + + content += `
\n`; + + content += ` ${prefix ? prefix + ' ' : ''}${returnType}`; + content += returnType.endsWith('*') ? '' : ' '; + content += `${api.name}(`; + + for (const param of api.params || [ ]) { + content += (paramCount++ > 0) ? ', ' : ''; + + if (!param.type && options.strict) { + throw new Error(`param ${param.name} has no type for function ${api.name}`); + } + else if (!param.type) { + continue; + } + + content += ``; + content += `${param.type}`; + content += param.type.endsWith('*') ? '' : ' '; + + if (param.name) { + content += `${param.name}`; + } + + content += ``; + } + + content += `);\n`; + content += `
\n`; + + return content; +} + +function produceFunctionParameters(version, api, type) { + let content = ""; + + if (!api.params || api.params.length == 0) { + return content; + } + + content += `\n`; + + content += `

Parameters

\n`; + content += `
\n`; + + for (const param of api.params) { + let direction = param.direction || 'in'; + direction = direction.charAt(0).toUpperCase() + direction.slice(1); + + if (!param.type && options.strict) { + throw new Error(`param ${param.name} has no type for function ${api.name}`); + } + else if (!param.type) { + continue; + } + + content += `
\n`; + content += `
\n`; + content += ` ${linkType(version, param.type)}\n`; + content += `
\n`; + + if (param.extendedType) { + content += `
\n`; + content += ` ${linkType(version, param.extendedType.type)}\n`; + content += `
\n`; + } + + content += `
\n`; + + content += ` ${direction}\n`; + content += `
\n`; + + if (param.name) { + content += `
\n`; + content += ` ${param.name}\n`; + content += `
\n`; + } + + content += `
\n`; + content += ` ${render(version, param.comment)}\n`; + content += `
\n`; + content += `
\n`; + } + + content += `
\n`; + + return content; +} + +function produceFunctionReturn(version, api, type) { + let content = ""; + + if (api.returns && api.returns.type && api.returns.type !== 'void') { + content += `\n`; + content += `

Returns

\n`; + content += `
\n`; + content += `
\n`; + content += ` ${linkType(version, api.returns.type)}\n`; + content += `
\n`; + content += `
\n`; + content += ` ${render(version, api.returns.comment)}\n`; + content += `
\n`; + content += `
\n`; + } + + return content; +} + +function produceSignatureForObject(version, api, type) { + let content = ""; + + const githubLink = produceGitHubLink(version, api, type); + + content += `\n`; + + content += `

Signature

\n`; + + if (githubLink) { + content += ` \n`; + } + + content += `
\n`; + content += ` typedef ${api.referenceName} ${api.name}\n`; + content += `
\n`; + + return content; +} + +function produceSignatureForStruct(version, api, type) { + let content = ""; + + const githubLink = produceGitHubLink(version, api, type); + + content += `\n`; + + content += `

Signature

\n`; + + if (githubLink) { + content += ` \n`; + } + + const typedef = api.name.startsWith('struct') ? '' : 'typedef '; + + content += `
\n`; + content += ` ${typedef}struct ${api.name} {\n`; + + for (const member of api.members || [ ]) { + content += ``; + content += `${member.type}`; + content += member.type.endsWith('*') ? '' : ' '; + + if (member.name) { + content += `${member.name}`; + } + + content += `\n`; + } + + content += ` };\n`; + content += `
\n`; + + return content; +} + +function isOctalEnum(version, api, type) { + return api.name === 'git_filemode_t'; +} + +function isFlagsEnum(version, api, type) { + // TODO: also handle the flags metadata instead of always just guessing + if (type !== 'enum') { + return false; + } + + let largest = 0; + + for (const member of api.members) { + if (member.value === undefined) { + return false; + } + + if (member.value && (member.value & (member.value - 1))) { + return false; + } + + largest = member.value; + } + + return (largest > 1); +} + +function flagsOctal(v) { + const n = parseInt(v); + return n ? `0${n.toString(8)}` : 0; +} + +function flagsValue(v) { + if (v === '0') { + return '0'; + } + + return `(1 << ${Math.log2(v)})`; +} + +function produceMembers(version, api, type) { + let content = ""; + let value = 0; + + if (!api.members || api.members.length == 0) { + return ""; + } + + let title = type === 'enum' ? 'Values' : 'Members'; + const isOctal = isOctalEnum(version, api, type); + const isFlags = isFlagsEnum(version, api, type); + + content += `\n`; + + content += `

${title}

\n`; + + const githubLink = api.kind === 'struct' ? undefined : produceGitHubLink(version, api, type); + + if (githubLink) { + content += ` \n`; + } + + content += `
\n`; + + for (const member of api.members) { + value = member.value ? member.value : value; + + content += `
\n`; + + if (type === 'struct') { + content += `
\n`; + content += ` ${linkType(version, member.type)}\n`; + content += `
\n`; + } + + content += `
\n`; + content += ` ${member.name}\n`; + content += `
\n`; + + if (type === 'enum') { + const enumValue = isOctal ? flagsOctal(value) : (isFlags ? flagsValue(value) : value); + + content += `
\n`; + content += ` ${enumValue}\n`; + content += `
\n`; + } + + content += `
\n`; + content += ` ${render(version, member.comment)}\n`; + content += `
\n`; + content += `
\n`; + + value++; + } + + content += `
\n`; + + return content; +} + +function produceReturnedBy(version, api, type) { + return produceList(version, api, type, 'returnedBy'); +} + +function produceParameterTo(version, api, type) { + return produceList(version, api, type, 'parameterTo'); +} + +function produceVersionDeltas(version, api, type) { + let content = ''; + + if (!showVersions) { + return content; + } + + const deltas = versionDeltas[api.name]; + if (!deltas) { + throw new Error(`no version information for ${api.kind} ${api.name}`); + } + + content += `

Versions

\n`; + content += `
\n`; + content += `
    \n`; + + for (const idx in deltas) { + const item = deltas[idx]; + + if (idx == deltas.length - 1) { + content += `
  • \n`; + } else if (item.changed) { + content += `
  • \n`; + } else { + content += `
  • \n`; + } + + content += ` ${item.version}\n`; + content += `
  • \n`; + } + + content += `
\n`; + content += `
\n`; + + return content; +} + +async function layout(data) { + let layout; + + if (options.layout) { + layout = await fs.readFile(options.layout); + } + else if (options.jekyllLayout) { + layout = `---\ntitle: {{title}}\nlayout: ${options.jekyllLayout}\n---\n\n{{content}}`; + } + else { + return data.content; + } + + return layout.toString().replaceAll(/{{([a-z]+)}}/g, (match, p1) => data[p1] || ""); +} + +async function produceDocumentationForApi(version, api, type) { + let content = ""; + + content += `
\n`; + + content += produceBreadcrumb(version, api, type); + content += produceHeader(version, api, type); + content += produceDescription(version, api, type); + content += produceNotes(version, api, type); + content += produceDeprecations(version, api, type); + content += produceSeeAlso(version, api, type); + content += produceWarnings(version, api, type); + content += produceSignature(version, api, type); + content += produceMembers(version, api, type); + content += produceFunctionParameters(version, api, type); + content += produceFunctionReturn(version, api, type); + content += produceReturnedBy(version, api, type); + content += produceParameterTo(version, api, type); + content += produceVersionDeltas(version, api, type); + + content += `
\n`; + + + const name = (type === 'macro' && api.name.includes('(')) ? + api.name.replace(/\(.*/, '') : api.name; + + const groupDir = `${outputPath}/${version}/${api.group}`; + const filename = `${groupDir}/${name}.html`; + + await fs.mkdir(groupDir, { recursive: true }); + await fs.writeFile(filename, await layout({ + title: `${api.name} (${projectTitle} ${version})`, + content: content + })); +} + +function selectApi(version, cb) { + const allApis = allApisForVersion(version, apiData[version]['groups']); + + for (const name in allApis) { + const api = allApis[name]; + + if (cb(api)) { + return api; + } + } + + return undefined; +} + +function apiFor(version, type) { + return selectApi(version, ((api) => api.name === type)); +} + +function linkFor(version, api) { + const name = (api.kind === 'macro' && api.name.includes('(')) ? + api.name.replace(/\(.*/, '') : api.name; + + return `${linkPrefix}/${version}/${api.group}/${name}.html`; +} + +function linkForCode(version, code, text) { + let api = selectApi(version, ((api) => api.name === code)); + let valueDecl = undefined; + + const apisForVersion = allApisForVersion(version, apiData[version]['groups']); + + if (!api) { + for (const enumDecl of Object.values(apisForVersion).filter(api => api.kind === 'enum')) { + const member = enumDecl.members.filter((m) => m.name === code); + + if (member && member[0]) { + api = enumDecl; + valueDecl = member[0]; + break; + } + } + } + + if (!api) { + return undefined; + } + + const kind = internalKind(version, api); + let link = linkFor(version, api); + + if (valueDecl) { + link += `#${valueDecl.name}`; + } + + if (!text) { + text = `${sanitize(code)}`; + } + + return `${text}`; +} + +function linkType(version, given) { + let type = given; + + if ((content = given.match(/^(?:const\s+)?([A-Za-z0-9_]+)(?:\s+\*+)?/))) { + type = content[1]; + } + + const api = apiFor(version, type); + + if (api) { + return `${given}`; + } + + return given; +} + +function linkText(version, str) { + const api = apiFor(version, str); + + if (api) { + return `${str}`; + } + + return sanitize(str); +} + +function render(version, str) { + let content = [ ]; + + if (!str) { + return ''; + } + + for (const s of Array.isArray(str) ? str : [ str ] ) { + content.push(markdown.render(s, { __version: version }).replaceAll(/\s+/g, ' ')); + } + + return content.join(' '); +} + +function nowrap(text) { + text = text.replaceAll(' ', ' '); + text = `${text}`; + return text; +} + +function sanitize(str) { + let content = [ ]; + + if (!str) { + return ''; + } + + for (const s of Array.isArray(str) ? str : [ str ] ) { + content.push(s.replaceAll('&', '&') + .replaceAll('<', '<') + .replaceAll('>', '>') + .replaceAll('{', '{') + .replaceAll('}', '}')); + } + + return content.join(' '); +} + +function produceSignatureForAlias(version, api, type) { + let content = ""; + + const githubLink = produceGitHubLink(version, api, type); + + content += `

Signature

\n`; + + if (githubLink) { + content += ` \n`; + } + + content += `
\n`; + content += ` typedef ${api.name} ${api.type};`; + content += `
\n`; + + return content; +} + +function produceSignatureForMacro(version, api, type) { + let content = ""; + + const githubLink = produceGitHubLink(version, api, type); + + content += `

Signature

\n`; + + if (githubLink) { + content += ` \n`; + } + + content += `
\n`; + content += ` #define ${api.name} ${sanitize(api.value)}`; + content += `
\n`; + + return content; +} + +function produceSignature(version, api, type) { + if (type === 'macro') { + return produceSignatureForMacro(version, api, type); + } + else if (type === 'alias') { + return produceSignatureForAlias(version, api, type); + } + else if (type === 'function' || type === 'callback') { + return produceSignatureForFunction(version, api, type); + } + else if (type === 'object') { + return produceSignatureForObject(version, api, type); + } + else if (type === 'struct') { + return produceSignatureForStruct(version, api, type); + } + else if (type === 'struct' || type === 'enum') { + return ""; + } + else { + throw new Error(`unknown type: ${api.kind}`); + } +} + +function isFunctionPointer(type) { + return type.match(/^(const\s+)?[A-Za-z0-9_]+\s+\*?\(\*/); +} + +function isEnum(type) { + return type.match(/^enum\s+/); +} + +function isStruct(type) { + return type.match(/^struct\s+/); +} + +function internalKind(version, api) { + if (api.kind === 'struct' && api.opaque) { + return 'object'; + } + + return api.kind; +} + +function externalKind(kind) { + if (kind === 'object') { + return 'struct'; + } + + return kind; +} + +async function produceIndexForGroup(version, group, versionApis) { + let content = ""; + + if (versionApis['groups'][group].apis.length == 0) { + return; + } + + const apis = Object.values(versionApis['groups'][group].apis); + + let fileName = group; + if (fileName.endsWith('.h')) { + fileName = fileName.substr(0, fileName.length - 2); + } + + const system = fileName.startsWith('sys/'); + let groupName = system ? fileName.substr(4) : fileName; + + content += `
\n`; + + content += `
\n`; + content += ` \n`; + content += `
\n`; + + content += `
\n`; + content += `

${groupName}

\n`; + + content += produceVersionPicker(version, "groupHeaderVersionSelect", (v) => { + if (apiData[v]['groups'][group]) { + return `${linkPrefix}/${v}/${groupName}/index.html`; + } + return undefined; + }); + + content += `
\n`; + + let details = undefined; + + if (versionApis['groups'][group].info?.details) { + details = markdown.render(versionApis['groups'][group].info.details, { __version: version }); + } else if (versionApis['groups'][group].info?.summary) { + details = versionApis['groups'][group].info.summary; + } + + if (details) { + content += `
\n`; + content += ` ${details}\n`; + content += `
\n`; + } + + for (const kind of [ 'object', 'struct', 'macro', 'enum', 'callback', 'alias', 'function' ]) { + content += produceIndexForApiKind(version, apis.filter(api => { + if (kind === 'object') { + return api.kind === 'struct' && api.opaque; + } + else if (kind === 'struct') { + return api.kind === 'struct' && !api.opaque; + } + else { + return api.kind === kind; + } + }), kind); + } + + content += `
\n`; + + const groupsDir = `${outputPath}/${version}/${fileName}`; + const filename = `${groupsDir}/index.html`; + + await fs.mkdir(groupsDir, { recursive: true }); + await fs.writeFile(filename, await layout({ + title: `${groupName} APIs (${projectTitle} ${version})`, + content: content + })); +} + +async function produceDocumentationForApis(version, apiData) { + const apis = allApisForVersion(version, apiData['groups']); + + for (const func of Object.values(apis).filter(api => api.kind === 'function')) { + await produceDocumentationForApi(version, func, 'function'); + } + + for (const struct of Object.values(apis).filter(api => api.kind === 'struct')) { + await produceDocumentationForApi(version, struct, internalKind(version, struct)); + } + + for (const e of Object.values(apis).filter(api => api.kind === 'enum')) { + await produceDocumentationForApi(version, e, 'enum'); + } + + for (const callback of Object.values(apis).filter(api => api.kind === 'callback')) { + await produceDocumentationForApi(version, callback, 'callback'); + } + + for (const alias of Object.values(apis).filter(api => api.kind === 'alias')) { + await produceDocumentationForApi(version, alias, 'alias'); + } + + for (const macro of Object.values(apis).filter(api => api.kind === 'macro')) { + await produceDocumentationForApi(version, macro, 'macro'); + } +} + +function produceIndexForApiKind(version, apis, kind) { + let content = ""; + + if (!apis || !apis.length) { + return content; + } + + let kindUpper = kind.charAt(0).toUpperCase() + kind.slice(1); + kindUpper += (kind === 'alias') ? 'es' : 's'; + + content += `\n`; + content += `

${kindUpper}

\n`; + + content += `
\n`; + + for (const item of apis) { + if (item.changed) { + content += `
\n`; + } else { + content += `
\n`; + } + + content += `
\n`; + content += ` \n`; + content += ` ${item.name}\n`; + content += ` \n`; + content += `
\n`; + + let shortComment = Array.isArray(item.comment) ? item.comment[0] : item.comment; + shortComment = shortComment || ''; + + shortComment = shortComment.replace(/\..*/, ''); + + content += `
\n`; + content += ` ${render(version, shortComment)}\n`; + content += `
\n`; + content += `
\n`; + } + + content += `
\n`; + + return content; +} + +function versionIndexContent(version, apiData) { + let content = ""; + let hasSystem = false; + + content += `
\n`; + content += `
\n`; + content += `

${projectTitle} ${version}

\n`; + + content += produceVersionPicker(version, "versionHeaderVersionSelect", + (v) => `${linkPrefix}/${v}/index.html`); + + content += `
\n`; + + content += `\n`; + content += `

Groups

\n`; + content += `
    \n`; + + for (const group of Object.keys(apiData['groups']).sort((a, b) => { + if (a.startsWith('sys/')) { return 1; } + if (b.startsWith('sys/')) { return -1; } + return a.localeCompare(b); + }).map(fn => { + let n = fn; + let sys = false; + + if (n.endsWith('.h')) { + n = n.substr(0, n.length - 2); + } + + if (n.startsWith('sys/')) { + n = n.substr(4); + sys = true; + } + + return { + name: n, filename: fn, system: sys, info: apiData['groups'][fn].info, apis: apiData['groups'][fn] + }; + }).filter(filedata => { + return Object.keys(filedata.apis).length > 0 && !fileDenylist.includes(filedata.filename); + })) { + if (group.system && !hasSystem) { + hasSystem = true; + + content += `
\n`; + content += `\n`; + content += `

System Groups (Advanced)

\n`; + content += `
    \n`; + } + + let link = `${linkPrefix}/${version}/`; + link += group.system ? `sys/` : ''; + link += group.name; + link += `/index.html`; + + content += `
  • \n`; + content += `
    \n`; + content += ` \n`; + content += ` ${group.name}\n`; + content += ` \n`; + content += `
    \n`; + + if (group.info?.summary) { + content += `
    \n`; + content += ` ${group.info.summary}`; + content += `
    \n`; + } + + content += `
  • \n`; + } + + content += `
\n`; + + content += `
\n`; + + return content; +} + +async function produceDocumentationIndex(version, apiData) { + const content = versionIndexContent(version, apiData); + + const versionDir = `${outputPath}/${version}`; + const filename = `${versionDir}/index.html`; + + await fs.mkdir(versionDir, { recursive: true }); + await fs.writeFile(filename, await layout({ + title: `APIs (${projectTitle} ${version})`, + content: content + })); +} + +async function documentationIsUpToDateForVersion(version, apiData) { + try { + const existingMetadata = JSON.parse(await fs.readFile(`${outputPath}/${version}/.metadata`)); + return existingMetadata?.commit === apiData.info.commit; + } + catch (e) { + } + + return false; +} + +async function produceDocumentationMetadata(version, apiData) { + const versionDir = `${outputPath}/${version}`; + const filename = `${versionDir}/.metadata`; + + await fs.mkdir(versionDir, { recursive: true }); + await fs.writeFile(filename, JSON.stringify(apiData.info, null, 2) + "\n"); +} + +async function produceDocumentationForVersion(version, apiData) { + if (!options.force && await documentationIsUpToDateForVersion(version, apiData)) { + if (options.verbose) { + console.log(`Documentation exists for ${version} at version ${apiData.info.commit.substr(0, 7)}; skipping...`); + } + + return; + } + + if (options.verbose) { + console.log(`Producing documentation for ${version}...`); + } + + await produceDocumentationForApis(version, apiData); + + for (const group in apiData['groups']) { + await produceIndexForGroup(version, group, apiData); + } + + await produceDocumentationIndex(version, apiData); + + await produceDocumentationMetadata(version, apiData); +} + +function versionDeltaData(version, api) { + const base = { version: version, api: api }; + + if (api.kind === 'function') { + return { + ...base, + returns: api.returns?.type || 'int', + params: api.params?.map((p) => p.type) || [ 'void' ] + }; + } + else if (api.kind === 'enum') { + return { + ...base, + members: api.members?.map((m) => { return { 'name': m.name, 'value': m.value } }) + }; + } + else if (api.kind === 'callback') { + return { ...base, }; + } + else if (api.kind === 'alias') { + return { ...base, }; + } + else if (api.kind === 'struct') { + return { + ...base, + members: api.members?.map((m) => { return { 'name': m.name, 'type': m.type } }) + }; + } + else if (api.kind === 'macro') { + return { + ...base, + name: api.name, + value: api.value + }; + } + else { + throw new Error(`unknown api kind: '${api.kind}'`); + } +} + +function deltasEqual(a, b) { + const unversionedA = { ...a }; + const unversionedB = { ...b }; + + delete unversionedA.version; + delete unversionedA.api; + delete unversionedA.changed; + delete unversionedB.version; + delete unversionedB.api; + delete unversionedB.changed; + + return JSON.stringify(unversionedA) === JSON.stringify(unversionedB); +} + +const apiForVersionCache = { }; +function allApisForVersion(version, apiData) { + if (apiForVersionCache[version]) { + return apiForVersionCache[version]; + } + + let result = { }; + for (const file in apiData['groups']) { + result = { ...result, ...apiData['groups'][file].apis }; + } + + apiForVersionCache[version] = result; + return result; +} + +function seedVersionApis(apiData) { + for (const version in apiData) { + allApisForVersion(version, apiData[version]); + } +} + +function calculateVersionDeltas(apiData) { + for (const version in apiData) { + const apisForVersion = allApisForVersion(version, apiData[version]); + + for (const api in apisForVersion) { + if (!versionDeltas[api]) { + versionDeltas[api] = [ ]; + } + + versionDeltas[api].push(versionDeltaData(version, apisForVersion[api])); + } + } + + for (const api in versionDeltas) { + const count = versionDeltas[api].length; + + versionDeltas[api][count - 1].changed = true; + + for (let i = count - 2; i >= 0; i--) { + versionDeltas[api][i].changed = !deltasEqual(versionDeltas[api][i], versionDeltas[api][i + 1]); + } + } +} + +async function produceMainIndex(versions) { + const versionList = versions.sort(versionSort); + const versionDefault = versionList[versionList.length - 1]; + + if (options.verbose) { + console.log(`Producing documentation index...`); + } + + let content = ""; + + content += `\n`; + content += `\n`; + + content += versionIndexContent(versionDefault, apiData[versionDefault]); + + const filename = `${outputPath}/index.html`; + + await fs.mkdir(outputPath, { recursive: true }); + await fs.writeFile(filename, await layout({ + title: `APIs (${projectTitle} ${versionDefault})`, + content: content + })); +} + +function versionSort(a, b) { + if (a === b) { + return 0; + } + + const aVersion = a.match(/^v(\d+)(?:\.(\d+)(?:\.(\d+)(?:\.(\d+))?)?)?(?:-(.*))?$/); + const bVersion = b.match(/^v(\d+)(?:\.(\d+)(?:\.(\d+)(?:\.(\d+))?)?)?(?:-(.*))?$/); + + if (!aVersion && !bVersion) { + return a.localeCompare(b); + } + else if (aVersion && !bVersion) { + return -1; + } + else if (!aVersion && bVersion) { + return 1; + } + + for (let i = 1; i < 5; i++) { + if (!aVersion[i] && !bVersion[i]) { + break; + } + else if (aVersion[i] && !bVersion[i]) { + return 1; + } + else if (!aVersion[i] && bVersion[i]) { + return -1; + } + else if (aVersion[i] !== bVersion[i]) { + return aVersion[i] - bVersion[i]; + } + } + + if (aVersion[5] && !bVersion[5]) { + return -1; + } + else if (!aVersion[5] && bVersion[5]) { + return 1; + } + else if (aVersion[5] && bVersion[5]) { + return aVersion[5].localeCompare(bVersion[5]); + } + + return 0; +} + +program.option('--output ') + .option('--layout ') + .option('--jekyll-layout ') + .option('--verbose') + .option('--force') + .option('--strict'); +program.parse(); + +const options = program.opts(); + +if (program.args.length != 2) { + console.error(`usage: ${path.basename(process.argv[1])} raw_api_dir output_dir`); + process.exit(1); +} + +const docsPath = program.args[0]; +const outputPath = program.args[1]; + +(async () => { + try { + for (const version of (await fs.readdir(docsPath)) + .filter(a => a.endsWith('.json')) + .map(a => a.replace(/\.json$/, '')) + .sort(versionSort) + .reverse()) { + versions.push(version); + } + + for (const version of versions) { + if (options.verbose) { + console.log(`Reading documentation data for ${version}...`); + } + + apiData[version] = JSON.parse(await fs.readFile(`${docsPath}/${version}.json`)); + } + + if (showVersions) { + if (options.verbose) { + console.log(`Calculating version deltas...`); + } + + calculateVersionDeltas(apiData); + } + + for (const version of versions) { + await produceDocumentationForVersion(version, apiData[version]); + } + + await produceMainIndex(versions); + } catch (e) { + console.error(e); + process.exit(1); + } +})(); diff --git a/script/api-docs/generate b/script/api-docs/generate new file mode 100755 index 000000000..c103cc198 --- /dev/null +++ b/script/api-docs/generate @@ -0,0 +1,105 @@ +#!/usr/bin/env bash +# +# Usage: generate repo_path output_path +# +# Example: generate https://github.com/libgit2/libgit2 path_to_output +# to clone the repository from GitHub and produce documentation; +# the repo_path can also be a local path + +set -eo pipefail + +source_path=$(mktemp -d) +verbose=true +force= + +if [ "$1" = "" ]; then + echo "usage: $0 repo_path output_path" 1>&2 + exit 1 +fi + +repo_path=$1 +output_path=$2 + +function do_checkout { + if [ "$1" = "" ]; then + echo "usage: $0 source_path" 1>&2 + exit 1 + fi + + if [ "${verbose}" ]; then + echo ":: Checking out source trees..." + echo "" + fi + + source_path=$1 + + mkdir -p "${source_path}" + git clone "${repo_path}" "${source_path}/main" --no-checkout + ( cd "${source_path}/main" && git sparse-checkout set --no-cone 'include/*' ) + ( cd "${source_path}/main" && git read-tree origin/main ) + ( cd "${source_path}/main" && git checkout -- include ) + + for tag in $(git --git-dir="${source_path}/main/.git" tag -l); do + git --git-dir="${source_path}/main/.git" worktree add -f "${source_path}/${tag}" "${tag}" --no-checkout + ( cd "${source_path}/${tag}" && git sparse-checkout set --no-cone 'include/*' ) + ( cd "${source_path}/${tag}" && git read-tree HEAD ) + + if [ "${tag}" == "v0.1.0" ]; then + ( cd "${source_path}/${tag}" && git checkout -- src/git ) + elif [ "${tag}" == "v0.2.0" -o "${tag}" == "v0.3.0" ]; then + ( cd "${source_path}/${tag}" && git checkout -- src/git2 ) + else + ( cd "${source_path}/${tag}" && git checkout -- include ) + fi + done +} + +do_checkout ${source_path} + +if [ "${verbose}" ]; then + echo "" + echo ":: Generating raw API documentation..." + echo "" +fi + +for version in ${source_path}/*; do + version=$(echo "${version}" | sed -e "s/.*\///") + commit=$( cd "${source_path}/${version}" && git rev-parse HEAD ) + + if [ -f "${output_path}/api/${version}.json" ]; then + existing_commit=$(jq -r .info.commit < "${output_path}/api/${version}.json") + + if [ "${existing_commit}" == "${commit}" -a ! "${force}" ]; then + if [ "${verbose}" ]; then + echo "Raw API documentation for ${version} exists; skipping..." + fi + + continue + fi + fi + + options="" + if [ "${force}" ]; then + options="${options} --force" + fi + + echo "Generating raw API documentation for ${version}..." + mkdir -p "${output_path}/api" + node ./api-generator.js $options "${source_path}/${version}" > "${output_path}/api/${version}.json" +done + +if [ "${verbose}" ]; then + echo "" + echo ":: Generating HTML documentation..." + echo "" +fi + +options="" +if [ "${verbose}" ]; then + options="${options} --verbose" +fi +if [ "${force}" ]; then + options="${options} --force" +fi + +node ./docs-generator.js --verbose --jekyll-layout default "${output_path}/api" "${output_path}/reference" diff --git a/script/api-docs/package-lock.json b/script/api-docs/package-lock.json new file mode 100644 index 000000000..32c4e3b6f --- /dev/null +++ b/script/api-docs/package-lock.json @@ -0,0 +1,79 @@ +{ + "name": "_generator", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "commander": "^12.1.0", + "markdown-it": "^14.1.0" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, + "node_modules/commander": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", + "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", + "engines": { + "node": ">=18" + } + }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "dependencies": { + "uc.micro": "^2.0.0" + } + }, + "node_modules/markdown-it": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "dependencies": { + "argparse": "^2.0.1", + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" + }, + "bin": { + "markdown-it": "bin/markdown-it.mjs" + } + }, + "node_modules/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==" + }, + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/uc.micro": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==" + } + } +} diff --git a/script/api-docs/package.json b/script/api-docs/package.json new file mode 100644 index 000000000..53ae07054 --- /dev/null +++ b/script/api-docs/package.json @@ -0,0 +1,6 @@ +{ + "dependencies": { + "commander": "^12.1.0", + "markdown-it": "^14.1.0" + } +}