diff --git a/scripts/convert-adoc.js b/scripts/convert-adoc.js index e4ac4e7a..93eebe5c 100755 --- a/scripts/convert-adoc.js +++ b/scripts/convert-adoc.js @@ -1,6 +1,7 @@ #!/usr/bin/env node const fs = require("fs").promises; +const fsSync = require("fs"); const path = require("path"); const { execSync } = require("child_process"); const { glob } = require("glob"); @@ -61,7 +62,7 @@ async function convertAdocFiles(directory) { await fs.writeFile(tempFile, content, "utf8"); // Run downdoc - execSync(`bunx downdoc "${tempFile}"`, { stdio: "pipe" }); + execSync(`pnpm dlx downdoc "${tempFile}"`, { stdio: "pipe" }); // Find the generated .md file const tempMdFile = path.join(dir, `temp_${filename}.md`); @@ -91,70 +92,67 @@ async function convertAdocFiles(directory) { // Fix xref: links - remove xref: and convert .adoc to .mdx mdContent = mdContent.replace( /xref:\[([^\]]+)\]\(([^)]+)\)/g, - "[$1]($2)" + "[$1]($2)", ); // Fix .adoc internal links to .mdx mdContent = mdContent.replace( /\]\(([^)]+)\.adoc([^)]*)\)/g, - "]($1.mdx$2)" + "]($1.mdx$2)", ); // Fix curly bracket file references {filename} -> filename - mdContent = mdContent.replace( - /\{([^}]+)\}/g, - "$1" - ); + mdContent = mdContent.replace(/\{([^}]+)\}/g, "$1"); // Fix HTML-style callouts
๐Ÿ“Œ NOTE
...
// Handle multi-line callouts by using a more permissive pattern mdContent = mdContent.replace( - /
[๐Ÿ“Œ๐Ÿ””โ„น๏ธ]\s*(NOTE|TIP|INFO)<\/strong><\/dt>
([\s\S]*?)<\/dd><\/dl>/g, - "\n$2\n" + /
[๐Ÿ“Œ๐Ÿ””โ„น๏ธ]\s*(NOTE|TIP|INFO)<\/strong><\/dt>
([\s\S]*?)<\/dd><\/dl>/gu, + "\n$2\n", ); mdContent = mdContent.replace( /
[โš ๏ธ๐Ÿšจโ—]\s*(WARNING|IMPORTANT|CAUTION|DANGER)<\/strong><\/dt>
([\s\S]*?)<\/dd><\/dl>/g, - "\n$2\n" + "\n$2\n", ); // Handle cases where
might be missing or malformed mdContent = mdContent.replace( - /
[๐Ÿ“Œ๐Ÿ””โ„น๏ธ]\s*(NOTE|TIP|INFO)<\/strong><\/dt>
([\s\S]*?)(?=\n\n|
|$)/g, - "\n$2\n" + /
[๐Ÿ“Œ๐Ÿ””โ„น๏ธ]\s*(NOTE|TIP|INFO)<\/strong><\/dt>
([\s\S]*?)(?=\n\n|
|$)/gu, + "\n$2\n", ); mdContent = mdContent.replace( /
[โš ๏ธ๐Ÿšจโ—]\s*(WARNING|IMPORTANT|CAUTION|DANGER)<\/strong><\/dt>
([\s\S]*?)(?=\n\n|
|$)/g, - "\n$2\n" + "\n$2\n", ); // Fix xref patterns with complex anchors like xref:#ISRC6-\\__execute__[...] mdContent = mdContent.replace( /xref:#([^[\]]+)\[([^\]]+)\]/g, - "[$2](#$1)" + "[$2](#$1)", ); // Fix simple xref patterns - mdContent = mdContent.replace( - /xref:([^[\s]+)\[([^\]]+)\]/g, - "[$2]($1)" - ); + mdContent = mdContent.replace(/xref:([^[\s]+)\[([^\]]+)\]/g, "[$2]($1)"); // Clean up orphaned HTML tags from malformed callouts // Handle orphaned
EMOJI TYPE
without closing tags mdContent = mdContent.replace( - /
[๐Ÿ“Œ๐Ÿ””โ„น๏ธ]\s*(NOTE|TIP|INFO)<\/strong><\/dt>
\s*\n([\s\S]*?)(?=\n\n|
|$)/g, - "\n$2\n" + /
[๐Ÿ“Œ๐Ÿ””โ„น๏ธ]\s*(NOTE|TIP|INFO)<\/strong><\/dt>
\s*\n([\s\S]*?)(?=\n\n|
|$)/gu, + "\n$2\n", ); mdContent = mdContent.replace( /
[โš ๏ธ๐Ÿšจโ—]\s*(WARNING|IMPORTANT|CAUTION|DANGER)<\/strong><\/dt>
\s*\n([\s\S]*?)(?=\n\n|
|$)/g, - "\n$2\n" + "\n$2\n", ); // Clean up any remaining orphaned HTML tags - mdContent = mdContent.replace(/
.*?<\/strong><\/dt>
/g, ""); + mdContent = mdContent.replace( + /
.*?<\/strong><\/dt>
/g, + "", + ); mdContent = mdContent.replace(/<\/dd><\/dl>/g, ""); mdContent = mdContent.replace(/
/g, ""); mdContent = mdContent.replace(/<\/dd>/g, ""); @@ -172,12 +170,13 @@ async function convertAdocFiles(directory) { const title = headerMatch ? headerMatch[1].trim() : filename; // Remove the first H1 from content - const contentWithoutFirstH1 = mdContent.replace(/^#+\s+.+$/m, '').replace(/^\n+/, ''); + const contentWithoutFirstH1 = mdContent + .replace(/^#+\s+.+$/m, "") + .replace(/^\n+/, ""); // Create MDX with frontmatter const mdxContent = `--- title: ${title} -description: ${title} --- ${contentWithoutFirstH1}`; @@ -196,5 +195,43 @@ ${contentWithoutFirstH1}`; } } +// Process files to remove curly brackets after conversion +function processFile(filePath) { + try { + const content = fsSync.readFileSync(filePath, "utf8"); + // Preserve brackets inside code fences (```...```) + const modifiedContent = content.replace(/```[\s\S]*?```|[{}]/g, (match) => { + // If match contains newlines or starts with ```, it's a code block - preserve it + return match.includes("\n") || match.startsWith("```") ? match : ""; + }); + fsSync.writeFileSync(filePath, modifiedContent, "utf8"); + console.log(`Processed: ${filePath}`); + } catch (error) { + console.error(`Error processing ${filePath}: ${error.message}`); + } +} + +function crawlDirectory(dirPath) { + try { + const items = fsSync.readdirSync(dirPath); + + for (const item of items) { + const itemPath = path.join(dirPath, item); + const stats = fsSync.statSync(itemPath); + + if (stats.isDirectory()) { + crawlDirectory(itemPath); + } else if (stats.isFile()) { + processFile(itemPath); + } + } + } catch (error) { + console.error(`Error crawling directory ${dirPath}: ${error.message}`); + } +} + const directory = process.argv[2]; convertAdocFiles(directory).catch(console.error); + +// Run bracket processing after conversion +crawlDirectory(directory); diff --git a/scripts/generate-api-docs.js b/scripts/generate-api-docs.js new file mode 100755 index 00000000..a38db146 --- /dev/null +++ b/scripts/generate-api-docs.js @@ -0,0 +1,222 @@ +#!/usr/bin/env node + +const fs = require("node:fs").promises; +const path = require("node:path"); +const { execSync } = require("node:child_process"); + +// Parse command line arguments +function parseArgs() { + const args = process.argv.slice(2); + const options = { + contractsRepo: + "https://github.com/stevedylandev/openzeppelin-contracts.git", + contractsBranch: "master", + tempDir: "temp-contracts", + apiOutputDir: "content/contracts/v5.x/api", + examplesOutputDir: "examples", + }; + + for (let i = 0; i < args.length; i++) { + const arg = args[i]; + switch (arg) { + case "--help": + case "-h": + showHelp(); + process.exit(0); + break; + case "--repo": + case "-r": + options.contractsRepo = args[++i]; + break; + case "--branch": + case "-b": + options.contractsBranch = args[++i]; + break; + case "--temp-dir": + case "-t": + options.tempDir = args[++i]; + break; + case "--api-output": + case "-a": + options.apiOutputDir = args[++i]; + break; + case "--examples-output": + case "-e": + options.examplesOutputDir = args[++i]; + break; + default: + console.error(`Unknown option: ${arg}`); + showHelp(); + process.exit(1); + } + } + + return options; +} + +function showHelp() { + console.log(` +Generate OpenZeppelin Contracts API documentation + +Usage: node generate-api-docs.js [options] + +Options: + -r, --repo Contracts repository URL (default: https://github.com/OpenZeppelin/openzeppelin-contracts.git) + -b, --branch Contracts repository branch (default: master) + -t, --temp-dir Temporary directory for cloning (default: temp-contracts) + -a, --api-output API documentation output directory (default: content/contracts/v5.x/api) + -e, --examples-output Examples output directory (default: examples) + -h, --help Show this help message + +Examples: + node generate-api-docs.js + node generate-api-docs.js --repo https://github.com/myorg/contracts.git --branch v4.0 + node generate-api-docs.js --api-output content/contracts/v4.x/api --examples-output examples-v4 +`); +} + +async function generateApiDocs(options) { + const { + contractsRepo, + contractsBranch, + tempDir, + apiOutputDir, + examplesOutputDir, + } = options; + + console.log("๐Ÿ”„ Generating OpenZeppelin Contracts API documentation..."); + console.log(`๐Ÿ“ฆ Repository: ${contractsRepo}`); + console.log(`๐ŸŒฟ Branch: ${contractsBranch}`); + console.log(`๐Ÿ“‚ API Output: ${apiOutputDir}`); + console.log(`๐Ÿ“‚ Examples Output: ${examplesOutputDir}`); + + try { + // Clean up previous runs + console.log("๐Ÿงน Cleaning up previous runs..."); + await fs.rm(tempDir, { recursive: true, force: true }); + await fs.rm(apiOutputDir, { recursive: true, force: true }); + + // Create output directory + await fs.mkdir(apiOutputDir, { recursive: true }); + + // Clone the contracts repository + console.log("๐Ÿ“ฆ Cloning contracts repository..."); + execSync( + `git clone --depth 1 --branch "${contractsBranch}" --recurse-submodules "${contractsRepo}" "${tempDir}"`, + { + stdio: "inherit", + }, + ); + + // Navigate to contracts directory and install dependencies + console.log("๐Ÿ“š Installing dependencies..."); + const originalDir = process.cwd(); + process.chdir(tempDir); + + try { + execSync("npm install --silent", { stdio: "inherit" }); + + // Generate markdown documentation + console.log("๐Ÿ—๏ธ Generating clean markdown documentation..."); + execSync("npm run prepare-docs", { stdio: "inherit" }); + + // Copy generated markdown files + console.log("๐Ÿ“‹ Copying generated documentation..."); + const docsPath = path.join("docs", "modules", "api", "pages"); + + try { + await fs.access(docsPath); + // Copy API docs + const apiSource = path.join(process.cwd(), docsPath); + const apiDest = path.join(originalDir, apiOutputDir); + await copyDirRecursive(apiSource, apiDest); + console.log(`โœ… API documentation copied to ${apiOutputDir}`); + } catch (error) { + console.log( + "โŒ Error: Markdown documentation not found at expected location", + ); + process.exit(1); + } + + // Copy examples if they exist + const examplesPath = path.join("docs", "modules", "api", "examples"); + if ( + await fs + .access(examplesPath) + .then(() => true) + .catch(() => false) + ) { + const examplesDest = path.join(originalDir, examplesOutputDir); + await fs.mkdir(examplesDest, { recursive: true }); + await copyDirRecursive( + path.join(process.cwd(), examplesPath), + examplesDest, + ); + console.log(`โœ… Examples copied to ${examplesOutputDir}`); + } + + // Get version for index file + let version = "latest"; + try { + const packageJson = JSON.parse( + await fs.readFile("package.json", "utf8"), + ); + version = packageJson.version || version; + } catch (error) { + console.log("โš ๏ธ Could not read package.json for version info"); + } + + // Generate index file + console.log("๐Ÿ“ Generating API index..."); + const indexContent = `--- +title: API Reference +--- + +# API Reference +`; + + await fs.writeFile( + path.join(originalDir, apiOutputDir, "index.mdx"), + indexContent, + "utf8", + ); + } finally { + // Go back to original directory + process.chdir(originalDir); + } + + // Clean up temporary directory + console.log("๐Ÿงน Cleaning up..."); + await fs.rm(tempDir, { recursive: true, force: true }); + + console.log("๐ŸŽ‰ API documentation generation complete!"); + console.log(`๐Ÿ“‚ Documentation available in: ${apiOutputDir}`); + console.log(""); + console.log("Next steps:"); + console.log(` - Review generated markdown files in ${apiOutputDir}`); + console.log(" - Update the api/index.mdx file with your TOC"); + } catch (error) { + console.error("โŒ Error generating API documentation:", error.message); + process.exit(1); + } +} + +async function copyDirRecursive(src, dest) { + const entries = await fs.readdir(src, { withFileTypes: true }); + + for (const entry of entries) { + const srcPath = path.join(src, entry.name); + const destPath = path.join(dest, entry.name); + + if (entry.isDirectory()) { + await fs.mkdir(destPath, { recursive: true }); + await copyDirRecursive(srcPath, destPath); + } else { + await fs.copyFile(srcPath, destPath); + } + } +} + +// Main execution +const options = parseArgs(); +generateApiDocs(options); diff --git a/scripts/generate-api-docs.sh b/scripts/generate-api-docs.sh deleted file mode 100644 index ced7b4a9..00000000 --- a/scripts/generate-api-docs.sh +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -# Configuration -CONTRACTS_REPO="${CONTRACTS_REPO:-https://github.com/stevedylandev/openzeppelin-contracts.git}" -CONTRACTS_BRANCH="${CONTRACTS_BRANCH:-release-v3.4}" -TEMP_DIR="temp-contracts" -API_OUTPUT_DIR="content/contracts/v3.x/api" -EXAMPLES_OUTPUT_DIR="examples" - -echo "๐Ÿ”„ Generating OpenZeppelin Contracts API documentation..." - -# Clean up previous runs -rm -rf "$TEMP_DIR" "$API_OUTPUT_DIR" -mkdir -p "$API_OUTPUT_DIR" - -# Clone the contracts repository -echo "๐Ÿ“ฆ Cloning contracts repository..." -git clone --depth 1 --branch "$CONTRACTS_BRANCH" --recurse-submodules "$CONTRACTS_REPO" "$TEMP_DIR" - -# Navigate to contracts directory and install dependencies -echo "๐Ÿ“š Installing dependencies..." -cd "$TEMP_DIR" -npm i --silent - -# Generate markdown documentation using default templates -echo "๐Ÿ—๏ธ Generating clean markdown documentation..." -npm run prepare-docs - -# Copy generated markdown files to our docs directory -echo "๐Ÿ“‹ Copying generated documentation..." -if [ -d "docs/modules/api/pages" ]; then - cp -r docs/modules/api/pages/* "../$API_OUTPUT_DIR/" - echo "โœ… API documentation copied to $API_OUTPUT_DIR" -else - echo "โŒ Error: Markdown documentation not found at expected location" - exit 1 -fi - -# Copy examples if they exist -if [ -d "docs/modules/api/examples" ]; then - mkdir -p "../$EXAMPLES_OUTPUT_DIR" - cp -r docs/modules/api/examples/* "../$EXAMPLES_OUTPUT_DIR" - echo "โœ… Examples copied to $EXAMPLES_OUTPUT_DIR" -fi - -# Raw markdown files are ready to use as-is -echo "๐Ÿ“„ Using raw markdown files from solidity-docgen" - -# Go back to the docs repo directory -cd .. - -# Clean up temporary directory -echo "๐Ÿงน Cleaning up..." -rm -rf "$TEMP_DIR" - -# Generate index file -echo "๐Ÿ“ Generating API index..." -cat > "$API_OUTPUT_DIR/index.mdx" << 'EOF' ---- -title: API Reference ---- - -# OpenZeppelin Contracts API Reference - -This API reference is automatically generated from the OpenZeppelin Contracts repository. - -## Contract Categories - -### Access Control -- [Access Control](access.md) - Role-based access control mechanisms -- [Ownable](access.md#ownable) - Simple ownership access control - -### Tokens -- [ERC20](token/ERC20.md) - Fungible token standard implementation -- [ERC721](token/ERC721.md) - Non-fungible token standard implementation -- [ERC1155](token/ERC1155.md) - Multi-token standard implementation - -### Utilities -- [Utils](utils.md) - General utility functions and contracts -- [Cryptography](utils/cryptography.md) - Cryptographic utilities - -### Governance -- [Governance](governance.md) - On-chain governance systems - -### Proxy Patterns -- [Proxy](proxy.md) - Upgradeable proxy patterns - -### Interfaces -- [Interfaces](interfaces.md) - Standard interfaces - ---- - -*Generated from OpenZeppelin Contracts v$(cat ../temp-contracts/package.json | grep '"version"' | cut -d '"' -f 4)* -EOF - -echo "๐ŸŽ‰ API documentation generation complete!" -echo "๐Ÿ“‚ Documentation available in: $API_OUTPUT_DIR" -echo "" -echo "Next steps:" -echo " - Review generated markdown files in $API_OUTPUT_DIR" -echo " - Run your documentation site build process" -echo " - Consider setting up automated regeneration on contract updates" diff --git a/scripts/generate-changelog.js b/scripts/generate-changelog.js new file mode 100755 index 00000000..8f232575 --- /dev/null +++ b/scripts/generate-changelog.js @@ -0,0 +1,71 @@ +#!/usr/bin/env node + +const fs = require("node:fs").promises; +const path = require("node:path"); +const { execSync } = require("node:child_process"); + +// Check if two arguments are provided +if (process.argv.length !== 4) { + console.error("Usage: node generate-changelog.js "); + process.exit(1); +} + +// Assign arguments to variables +const url = process.argv[2]; +const filePath = process.argv[3]; + +// Check if changelog-from-release is installed +function checkChangelogFromRelease() { + try { + // Try to run the command to see if it exists + execSync("which changelog-from-release", { stdio: "pipe" }); + } catch (error) { + console.error("Error: changelog-from-release is not installed."); + console.error( + "Please install it from: https://github.com/rhysd/changelog-from-release", + ); + process.exit(1); + } +} + +async function generateChangelog() { + try { + // Create the changelog.mdx file with the specified content + const frontmatter = `--- +title: Changelog +--- + +`; + + // Run changelog-from-release and get output + const changelogOutput = execSync(`changelog-from-release -r "${url}"`, { + encoding: "utf8", + stdio: "pipe", + }); + + // Remove the generated tag at the end + const cleanOutput = changelogOutput + .replace(/\s*$/, "") + .trim(); + + // Write the complete content + const fullContent = frontmatter + cleanOutput + "\n"; + await fs.writeFile( + path.join(filePath, "changelog.mdx"), + fullContent, + "utf8", + ); + + console.log( + `Changelog generated successfully at ${path.join(filePath, "changelog.mdx")}`, + ); + } catch (error) { + console.error("Error generating changelog:", error.message); + process.exit(1); + } +} + +// Check if required tool is installed +checkChangelogFromRelease(); + +generateChangelog(); diff --git a/scripts/generate-changelog.sh b/scripts/generate-changelog.sh deleted file mode 100644 index d3a1d3bc..00000000 --- a/scripts/generate-changelog.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash - -# Check if two arguments are provided -if [ $# -ne 2 ]; then - echo "Usage: $0 " - exit 1 -fi - -# Assign arguments to variables -url="$1" -file_path="$2" - -# Create the changelog.mdx file with the specified content -cat > "$file_path/changelog.mdx" << EOF ---- -title: Changelog ---- - -EOF - -# Run changelog-from-release and append to the file -changelog-from-release -r "$url" >> "$file_path/changelog.mdx" diff --git a/scripts/replace-brackets.js b/scripts/replace-brackets.js deleted file mode 100644 index 920b666e..00000000 --- a/scripts/replace-brackets.js +++ /dev/null @@ -1,40 +0,0 @@ -const fs = require("fs"); -const path = require("path"); - -function processFile(filePath) { - try { - const content = fs.readFileSync(filePath, "utf8"); - // Preserve brackets inside code fences (```...```) - const modifiedContent = content.replace(/```[\s\S]*?```|[{}]/g, (match) => { - // If match contains newlines or starts with ```, it's a code block - preserve it - return match.includes('\n') || match.startsWith('```') ? match : ''; - }); - fs.writeFileSync(filePath, modifiedContent, "utf8"); - console.log(`Processed: ${filePath}`); - } catch (error) { - console.error(`Error processing ${filePath}: ${error.message}`); - } -} - -function crawlDirectory(dirPath) { - try { - const items = fs.readdirSync(dirPath); - - for (const item of items) { - const itemPath = path.join(dirPath, item); - const stats = fs.statSync(itemPath); - - if (stats.isDirectory()) { - crawlDirectory(itemPath); - } else if (stats.isFile()) { - processFile(itemPath); - } - } - } catch (error) { - console.error(`Error crawling directory ${dirPath}: ${error.message}`); - } -} - -// Start crawling from current directory or specify a path -const targetPath = process.argv[2] || "."; -crawlDirectory(targetPath);