mirror of
https://github.com/zadam/trilium.git
synced 2025-11-01 20:19:05 +01:00
feat(docs/share): integrate in the CI
This commit is contained in:
parent
4c3fcdba4a
commit
bde03e8378
58
.github/workflows/deploy-docs.yml
vendored
58
.github/workflows/deploy-docs.yml
vendored
@ -11,11 +11,8 @@ on:
|
||||
# Only run when docs files change
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'README.md' # README is synced to docs/index.md
|
||||
- 'mkdocs.yml'
|
||||
- 'requirements-docs.txt'
|
||||
- '.github/workflows/deploy-docs.yml'
|
||||
- 'scripts/fix-mkdocs-structure.ts'
|
||||
- 'apps/edit-docs/**'
|
||||
- 'packages/share-theme/**'
|
||||
|
||||
# Allow manual triggering from Actions tab
|
||||
workflow_dispatch:
|
||||
@ -52,69 +49,24 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0 # Fetch all history for git info and mkdocs-git-revision-date plugin
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.14'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: 'requirements-docs.txt'
|
||||
|
||||
- name: Install MkDocs and Dependencies
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install -r requirements-docs.txt
|
||||
env:
|
||||
PIP_DISABLE_PIP_VERSION_CHECK: 1
|
||||
|
||||
# Setup pnpm before fixing docs structure
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
|
||||
# Setup Node.js with pnpm
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: '24'
|
||||
cache: 'pnpm'
|
||||
|
||||
# Install Node.js dependencies for the TypeScript script
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Fix Documentation Structure
|
||||
run: |
|
||||
# Fix duplicate navigation entries by moving overview pages to index.md
|
||||
pnpm run chore:fix-mkdocs-structure
|
||||
|
||||
- name: Build MkDocs Site
|
||||
run: |
|
||||
# Build with strict mode but allow expected warnings
|
||||
mkdocs build --verbose || {
|
||||
EXIT_CODE=$?
|
||||
# Check if the only issue is expected warnings
|
||||
if mkdocs build 2>&1 | grep -E "WARNING.*(README|not found)" && \
|
||||
[ $(mkdocs build 2>&1 | grep -c "ERROR") -eq 0 ]; then
|
||||
echo "✅ Build succeeded with expected warnings"
|
||||
mkdocs build --verbose
|
||||
else
|
||||
echo "❌ Build failed with unexpected errors"
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
}
|
||||
|
||||
- name: Fix HTML Links
|
||||
run: |
|
||||
# Remove .md extensions from links in generated HTML
|
||||
pnpm tsx ./scripts/fix-html-links.ts site
|
||||
- name: Trigger build of documentation
|
||||
run: pnpm docs:build
|
||||
|
||||
- name: Validate Built Site
|
||||
run: |
|
||||
# Basic validation that important files exist
|
||||
test -f site/index.html || (echo "ERROR: site/index.html not found" && exit 1)
|
||||
test -f site/sitemap.xml || (echo "ERROR: site/sitemap.xml not found" && exit 1)
|
||||
test -d site/assets || (echo "ERROR: site/assets directory not found" && exit 1)
|
||||
echo "✅ Site validation passed"
|
||||
|
||||
- name: Deploy
|
||||
uses: ./.github/actions/deploy-to-cloudflare-pages
|
||||
|
||||
@ -3,10 +3,10 @@ process.env.TRILIUM_RESOURCE_DIR = "../server/src";
|
||||
process.env.NODE_ENV = "development";
|
||||
|
||||
import cls from "@triliumnext/server/src/services/cls.js";
|
||||
import { join, resolve } from "path";
|
||||
import { dirname, join, resolve } from "path";
|
||||
import fs from "fs/promises";
|
||||
import fsExtra, { type WriteStream } from "fs-extra";
|
||||
import archiver, { type Archiver } from "archiver";
|
||||
import type { ExportFormat } from "@triliumnext/server/src/services/export/zip/abstract_provider.js";
|
||||
|
||||
const DOCS_ROOT = "../../../docs";
|
||||
|
||||
@ -39,8 +39,15 @@ export async function importData(path: string) {
|
||||
|
||||
// Export
|
||||
const zipFilePath = "output.zip";
|
||||
try {
|
||||
const { exportToZipFile } = (await import("@triliumnext/server/src/services/export/zip.js")).default;
|
||||
await exportToZipFile(note.noteId, "share", zipFilePath);
|
||||
await extractZip(zipFilePath, "../../site");
|
||||
} finally {
|
||||
if (await fsExtra.exists(zipFilePath)) {
|
||||
await fsExtra.rm(zipFilePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function createImportZip(path: string) {
|
||||
@ -70,4 +77,28 @@ function waitForEnd(archive: Archiver, stream: WriteStream) {
|
||||
});
|
||||
}
|
||||
|
||||
export async function extractZip(zipFilePath: string, outputPath: string, ignoredFiles?: Set<string>) {
|
||||
const deferred = (await import("@triliumnext/server/src/services/utils.js")).deferred;
|
||||
|
||||
const promise = deferred<void>()
|
||||
setTimeout(async () => {
|
||||
// Then extract the zip.
|
||||
const { readZipFile, readContent } = (await import("@triliumnext/server/src/services/import/zip.js"));
|
||||
await readZipFile(await fs.readFile(zipFilePath), async (zip, entry) => {
|
||||
// We ignore directories since they can appear out of order anyway.
|
||||
if (!entry.fileName.endsWith("/") && !ignoredFiles?.has(entry.fileName)) {
|
||||
const destPath = join(outputPath, entry.fileName);
|
||||
const fileContent = await readContent(zip, entry);
|
||||
|
||||
await fsExtra.mkdirs(dirname(destPath));
|
||||
await fs.writeFile(destPath, fileContent);
|
||||
}
|
||||
|
||||
zip.readEntry();
|
||||
});
|
||||
promise.resolve();
|
||||
}, 1000);
|
||||
await promise;
|
||||
}
|
||||
|
||||
cls.init(main);
|
||||
|
||||
@ -26,7 +26,7 @@
|
||||
"chore:generate-openapi": "tsx ./scripts/generate-openapi.ts",
|
||||
"chore:update-build-info": "tsx ./scripts/update-build-info.ts",
|
||||
"chore:update-version": "tsx ./scripts/update-version.ts",
|
||||
"chore:fix-mkdocs-structure": "tsx ./scripts/fix-mkdocs-structure.ts",
|
||||
"docs:build": "cd ./apps/edit-docs && tsx ./src/build-docs.ts",
|
||||
"edit-docs:edit-docs": "pnpm run --filter edit-docs edit-docs",
|
||||
"edit-docs:edit-demo": "pnpm run --filter edit-docs edit-demo",
|
||||
"test:all": "pnpm test:parallel && pnpm test:sequential",
|
||||
|
||||
@ -1,79 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Post-process HTML files generated by MkDocs to remove .md extensions from links
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
/**
|
||||
* Process HTML content to remove .md extensions from links
|
||||
*/
|
||||
function fixHtmlLinks(content: string): string {
|
||||
// Replace .md extensions in href attributes
|
||||
// Handle both quoted and unquoted href attributes
|
||||
|
||||
// First, handle quoted hrefs: href="...something.md" or href="...something.md#anchor"
|
||||
content = content.replace(/href="([^"]*?)\.md(#[^"]*)?"/g, 'href="$1$2"');
|
||||
|
||||
// Then, handle unquoted hrefs: href=...something.md or href=...something.md#anchor
|
||||
// This matches href= followed by a non-whitespace URL ending in .md
|
||||
content = content.replace(/href=([^\s>]*?)\.md(#[^\s>]*)?(?=[\s>])/g, 'href=$1$2');
|
||||
|
||||
return content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively process all HTML files in a directory
|
||||
*/
|
||||
function processDirectory(dir: string): number {
|
||||
let filesProcessed = 0;
|
||||
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dir, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
// Recursively process subdirectories
|
||||
filesProcessed += processDirectory(fullPath);
|
||||
} else if (entry.isFile() && entry.name.endsWith('.html')) {
|
||||
// Process HTML files
|
||||
const content = fs.readFileSync(fullPath, 'utf-8');
|
||||
const fixedContent = fixHtmlLinks(content);
|
||||
|
||||
if (content !== fixedContent) {
|
||||
fs.writeFileSync(fullPath, fixedContent, 'utf-8');
|
||||
console.log(`Fixed: ${path.relative(process.cwd(), fullPath)}`);
|
||||
filesProcessed++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return filesProcessed;
|
||||
}
|
||||
|
||||
function main(): number {
|
||||
const args = process.argv.slice(2);
|
||||
const siteDir = args[0] || 'site';
|
||||
|
||||
const fullPath = path.resolve(siteDir);
|
||||
|
||||
if (!fs.existsSync(fullPath)) {
|
||||
console.error(`Error: Directory '${fullPath}' does not exist`);
|
||||
return 1;
|
||||
}
|
||||
|
||||
console.log(`Processing HTML files in: ${fullPath}`);
|
||||
console.log('-'.repeat(50));
|
||||
|
||||
const filesProcessed = processDirectory(fullPath);
|
||||
|
||||
console.log('-'.repeat(50));
|
||||
console.log(`Processed ${filesProcessed} HTML files`);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Run the main function
|
||||
process.exit(main());
|
||||
@ -1,342 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Fix MkDocs structure by:
|
||||
* 1. Syncing README.md to docs/index.md with necessary path adjustments
|
||||
* 2. Moving overview pages to index.md inside their directories to prevent duplicate navigation entries
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
interface FixResult {
|
||||
message: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find markdown files that have a corresponding directory with the same name,
|
||||
* and move them to index.md inside that directory.
|
||||
*/
|
||||
function fixDuplicateEntries(docsDir: string): FixResult[] {
|
||||
const fixesMade: FixResult[] = [];
|
||||
|
||||
function walkDir(dir: string): void {
|
||||
let files: string[];
|
||||
try {
|
||||
files = fs.readdirSync(dir);
|
||||
} catch (err) {
|
||||
console.warn(`Warning: Unable to read directory ${dir}: ${err.message}`);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(dir, file);
|
||||
let stat: fs.Stats;
|
||||
|
||||
try {
|
||||
stat = fs.statSync(filePath);
|
||||
} catch (err) {
|
||||
// File might have been moved already, skip it
|
||||
continue;
|
||||
}
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
walkDir(filePath);
|
||||
} else if (file.endsWith('.md')) {
|
||||
const basename = file.slice(0, -3); // Remove .md extension
|
||||
const dirPath = path.join(dir, basename);
|
||||
|
||||
// Check if there's a directory with the same name
|
||||
if (fs.existsSync(dirPath) && fs.statSync(dirPath).isDirectory()) {
|
||||
const indexPath = path.join(dirPath, 'index.md');
|
||||
|
||||
// Check if index.md already exists in that directory
|
||||
if (!fs.existsSync(indexPath)) {
|
||||
// Move the file to index.md in the directory
|
||||
fs.renameSync(filePath, indexPath);
|
||||
fixesMade.push({
|
||||
message: `Moved ${path.relative(docsDir, filePath)} -> ${path.relative(docsDir, indexPath)}`
|
||||
});
|
||||
|
||||
// Move associated images with pattern basename_*
|
||||
try {
|
||||
const dirFiles = fs.readdirSync(dir);
|
||||
for (const imgFile of dirFiles) {
|
||||
if (imgFile.startsWith(`${basename}_`)) {
|
||||
const imgSrc = path.join(dir, imgFile);
|
||||
try {
|
||||
if (!fs.statSync(imgSrc).isDirectory()) {
|
||||
const imgDest = path.join(dirPath, imgFile);
|
||||
fs.renameSync(imgSrc, imgDest);
|
||||
fixesMade.push({
|
||||
message: `Moved ${path.relative(docsDir, imgSrc)} -> ${path.relative(docsDir, imgDest)}`
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
// File might have been moved already, skip it
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// Directory might not exist anymore, skip it
|
||||
}
|
||||
|
||||
// Move exact match images
|
||||
const imgExtensions = ['.png', '.jpg', '.jpeg', '.gif', '.svg'];
|
||||
for (const ext of imgExtensions) {
|
||||
const imgFile = path.join(dir, `${basename}${ext}`);
|
||||
if (fs.existsSync(imgFile)) {
|
||||
const imgDest = path.join(dirPath, `${basename}${ext}`);
|
||||
fs.renameSync(imgFile, imgDest);
|
||||
fixesMade.push({
|
||||
message: `Moved ${path.relative(docsDir, imgFile)} -> ${path.relative(docsDir, imgDest)}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(docsDir);
|
||||
return fixesMade;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update references in markdown files to point to the new locations.
|
||||
*/
|
||||
function updateReferences(docsDir: string): FixResult[] {
|
||||
const updatesMade: FixResult[] = [];
|
||||
|
||||
function fixLink(match: string, text: string, link: string, currentDir: string, isIndex: boolean): string {
|
||||
// Skip external links
|
||||
if (link.startsWith('http')) {
|
||||
return match;
|
||||
}
|
||||
|
||||
// Decode URL-encoded paths for processing
|
||||
let decodedLink: string;
|
||||
try {
|
||||
decodedLink = decodeURIComponent(link);
|
||||
} catch (err) {
|
||||
// If decoding fails, use the original link
|
||||
decodedLink = link;
|
||||
}
|
||||
|
||||
// Special case: if we're in index.md and the link starts with the parent directory name
|
||||
// This happens when a file was converted to index.md and had links to siblings
|
||||
if (isIndex && decodedLink.includes('/')) {
|
||||
const pathParts = decodedLink.split('/');
|
||||
const parentDirName = path.basename(currentDir);
|
||||
|
||||
// Check if first part matches the parent directory name
|
||||
if (pathParts[0] === parentDirName) {
|
||||
// This is a self-referential path, strip the first part
|
||||
const fixedLink = pathParts.slice(1).join('/');
|
||||
// Re-encode spaces for URL compatibility before recursing
|
||||
const fixedLinkEncoded = fixedLink.replace(/ /g, '%20');
|
||||
// Recursively process the fixed link
|
||||
return fixLink(`[${text}](${fixedLinkEncoded})`, text, fixedLinkEncoded, currentDir, isIndex);
|
||||
}
|
||||
}
|
||||
|
||||
// For any .md link, check if there's a directory with index.md
|
||||
// that should be used instead
|
||||
if (!decodedLink.startsWith('/')) {
|
||||
// Resolve relative to current directory
|
||||
const resolvedPath = path.resolve(currentDir, decodedLink);
|
||||
|
||||
// Check if this points to a file that should be a directory
|
||||
// Remove .md extension to get the potential directory name
|
||||
if (resolvedPath.endsWith('.md')) {
|
||||
const potentialDir = resolvedPath.slice(0, -3);
|
||||
const potentialIndex = path.join(potentialDir, 'index.md');
|
||||
|
||||
// If a directory with index.md exists, update the link
|
||||
if (fs.existsSync(potentialIndex)) {
|
||||
// If we're in an index.md file and linking to a file that's now
|
||||
// in a sibling directory, adjust the path
|
||||
if (isIndex) {
|
||||
// Check if they share the same parent directory
|
||||
if (path.dirname(potentialDir) === path.dirname(currentDir)) {
|
||||
// It's a sibling - just use directory name
|
||||
const dirName = path.basename(potentialDir).replace(/ /g, '%20');
|
||||
return `[${text}](${dirName}/)`;
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate relative path from current file to the directory
|
||||
const newPath = path.relative(currentDir, potentialDir).replace(/\\/g, '/').replace(/ /g, '%20');
|
||||
return `[${text}](${newPath}/)`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Also handle local references (same directory)
|
||||
if (!decodedLink.includes('/')) {
|
||||
const basename = decodedLink.slice(0, -3); // Remove .md extension
|
||||
const possibleDir = path.join(currentDir, basename);
|
||||
|
||||
if (fs.existsSync(possibleDir) && fs.statSync(possibleDir).isDirectory()) {
|
||||
// Re-encode spaces for URL compatibility
|
||||
const encodedBasename = basename.replace(/ /g, '%20');
|
||||
return `[${text}](${encodedBasename}/)`;
|
||||
}
|
||||
}
|
||||
|
||||
return match;
|
||||
}
|
||||
|
||||
function walkDir(dir: string): void {
|
||||
let files: string[];
|
||||
try {
|
||||
files = fs.readdirSync(dir);
|
||||
} catch (err) {
|
||||
console.warn(`Warning: Unable to read directory ${dir}: ${err.message}`);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(dir, file);
|
||||
let stat: fs.Stats;
|
||||
|
||||
try {
|
||||
stat = fs.statSync(filePath);
|
||||
} catch (err) {
|
||||
// File might have been moved already, skip it
|
||||
continue;
|
||||
}
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
walkDir(filePath);
|
||||
} else if (file.endsWith('.md')) {
|
||||
let content = fs.readFileSync(filePath, 'utf-8');
|
||||
const originalContent = content;
|
||||
|
||||
const isIndex = file === 'index.md';
|
||||
const currentDir = path.dirname(filePath);
|
||||
|
||||
// Update markdown links: [text](path.md)
|
||||
const pattern = /\[([^\]]*)\]\(([^)]+\.md)\)/g;
|
||||
content = content.replace(pattern, (match, text, link) => {
|
||||
return fixLink(match, text, link, currentDir, isIndex);
|
||||
});
|
||||
|
||||
if (content !== originalContent) {
|
||||
fs.writeFileSync(filePath, content, 'utf-8');
|
||||
updatesMade.push({
|
||||
message: `Updated references in ${path.relative(docsDir, filePath)}`
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(docsDir);
|
||||
return updatesMade;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync README.md to docs/index.md with necessary path adjustments
|
||||
*/
|
||||
function syncReadmeToIndex(projectRoot: string, docsDir: string): FixResult[] {
|
||||
const results: FixResult[] = [];
|
||||
const readmePath = path.join(projectRoot, 'README.md');
|
||||
const indexPath = path.join(docsDir, 'index.md');
|
||||
|
||||
if (!fs.existsSync(readmePath)) {
|
||||
console.warn('README.md not found in project root');
|
||||
return results;
|
||||
}
|
||||
|
||||
// Read README content
|
||||
let content = fs.readFileSync(readmePath, 'utf-8');
|
||||
|
||||
// Fix image path (./docs/app.png -> app.png)
|
||||
content = content.replace(/src="\.\/docs\/app\.png"/g, 'src="app.png"');
|
||||
|
||||
// Fix language links in header
|
||||
content = content.replace(/\[English\]\(\.\/README\.md\)/g, '[English](./index.md)');
|
||||
content = content.replace(/\.\/docs\/README-ZH_CN\.md/g, './README-ZH_CN.md');
|
||||
content = content.replace(/\.\/docs\/README-ZH_TW\.md/g, './README-ZH_TW.md');
|
||||
content = content.replace(/\.\/docs\/README\.ru\.md/g, './README.ru.md');
|
||||
content = content.replace(/\.\/docs\/README\.ja\.md/g, './README.ja.md');
|
||||
content = content.replace(/\.\/docs\/README\.it\.md/g, './README.it.md');
|
||||
content = content.replace(/\.\/docs\/README\.es\.md/g, './README.es.md');
|
||||
|
||||
// Fix internal documentation links (./docs/User%20Guide -> ./User%20Guide)
|
||||
content = content.replace(/\.\/docs\/User%20Guide/g, './User%20Guide');
|
||||
|
||||
// Write the adjusted content to docs/index.md
|
||||
fs.writeFileSync(indexPath, content, 'utf-8');
|
||||
results.push({
|
||||
message: `Synced README.md to docs/index.md with path adjustments`
|
||||
});
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
function main(): number {
|
||||
// Get the docs directory
|
||||
const scriptDir = path.dirname(new URL(import.meta.url).pathname);
|
||||
const projectRoot = path.dirname(scriptDir);
|
||||
const docsDir = path.join(projectRoot, 'docs');
|
||||
|
||||
// Handle Windows paths (remove leading slash if on Windows)
|
||||
const normalizedProjectRoot = process.platform === 'win32' && projectRoot.startsWith('/')
|
||||
? projectRoot.substring(1)
|
||||
: projectRoot;
|
||||
const normalizedDocsDir = process.platform === 'win32' && docsDir.startsWith('/')
|
||||
? docsDir.substring(1)
|
||||
: docsDir;
|
||||
|
||||
if (!fs.existsSync(normalizedDocsDir)) {
|
||||
console.error(`Error: docs directory not found at ${normalizedDocsDir}`);
|
||||
return 1;
|
||||
}
|
||||
|
||||
console.log(`Fixing MkDocs structure in ${normalizedDocsDir}`);
|
||||
console.log('-'.repeat(50));
|
||||
|
||||
// Sync README.md to docs/index.md
|
||||
const syncResults = syncReadmeToIndex(normalizedProjectRoot, normalizedDocsDir);
|
||||
if (syncResults.length > 0) {
|
||||
console.log('README sync:');
|
||||
for (const result of syncResults) {
|
||||
console.log(` - ${result.message}`);
|
||||
}
|
||||
console.log();
|
||||
}
|
||||
|
||||
// Fix duplicate entries
|
||||
const fixes = fixDuplicateEntries(normalizedDocsDir);
|
||||
if (fixes.length > 0) {
|
||||
console.log('Files reorganized:');
|
||||
for (const fix of fixes) {
|
||||
console.log(` - ${fix.message}`);
|
||||
}
|
||||
} else {
|
||||
console.log('No duplicate entries found that need fixing');
|
||||
}
|
||||
|
||||
console.log();
|
||||
|
||||
// Update references
|
||||
const updates = updateReferences(normalizedDocsDir);
|
||||
if (updates.length > 0) {
|
||||
console.log('References updated:');
|
||||
for (const update of updates) {
|
||||
console.log(` - ${update.message}`);
|
||||
}
|
||||
} else {
|
||||
console.log('No references needed updating');
|
||||
}
|
||||
|
||||
console.log('-'.repeat(50));
|
||||
console.log(`Structure fix complete: ${syncResults.length} README syncs, ${fixes.length} files moved, ${updates.length} files updated`);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Run the main function
|
||||
process.exit(main());
|
||||
Loading…
x
Reference in New Issue
Block a user