diff --git a/.github/workflows/create-release-branch.yml b/.github/workflows/create-release-branch.yml index 3950d5a..a490691 100644 --- a/.github/workflows/create-release-branch.yml +++ b/.github/workflows/create-release-branch.yml @@ -227,14 +227,14 @@ jobs: output-schema-file: .github/codex/schemas/release-output.json codex-args: "--full-auto" - - name: Apply version bumps and changelogs from Codex + - name: Apply version bumps and changelogs via Nx Release id: versions if: steps.affected.outputs.projects != '' shell: bash run: | set -euo pipefail - echo "Applying version bumps and changelogs from Codex analysis..." + echo "Applying version bumps via Nx Release..." if [ ! -f "${{ env.CODEX_OUTPUT }}" ]; then echo "::error::Codex output file not found" @@ -244,185 +244,19 @@ jobs: echo "Codex output:" cat "${{ env.CODEX_OUTPUT }}" - # Parse Codex JSON output, update package.json files, and update changelogs - RESULT=$(node << 'NODEJS_SCRIPT' - const fs = require('fs'); - const output = JSON.parse(fs.readFileSync('${{ env.CODEX_OUTPUT }}', 'utf8')); - const today = new Date().toISOString().split('T')[0]; - - let maxVersion = '0.0.0'; - const bumpedProjects = []; - const internalVersions = {}; - - // Helper to increment patch version - function bumpPatch(version) { - const parts = version.split('.'); - if (parts.length < 3 || isNaN(parseInt(parts[2], 10))) { - throw new Error('Invalid semantic version: ' + version); - } - parts[2] = String(parseInt(parts[2], 10) + 1); - return parts.join('.'); - } - - // Helper to check if version is exact (no range specifier) - function isExactVersion(version) { - return /^\d+\.\d+\.\d+$/.test(version); - } - - // Helper to generate changelog entry - function generateChangelogEntry(version, changelog) { - const categories = [ - { key: 'added', title: 'Added' }, - { key: 'changed', title: 'Changed' }, - { key: 'deprecated', title: 'Deprecated' }, - { key: 'removed', title: 'Removed' }, - { key: 'fixed', title: 'Fixed' }, - { key: 'security', title: 'Security' } - ]; - - let entry = '## [' + version + '] - ' + today + '\n'; - let hasContent = false; - - for (const cat of categories) { - const items = changelog[cat.key] || []; - if (items.length > 0) { - entry += '\n### ' + cat.title + '\n\n'; - for (const item of items) { - entry += '- ' + item + '\n'; - } - hasContent = true; - } - } - - return hasContent ? entry : null; - } - - // Update per-library changelogs - for (const proj of output.projects) { - if (proj.bump !== 'none') { - // Update package.json - const pkgPath = 'libs/' + proj.name + '/package.json'; - const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8')); - pkg.version = proj.newVersion; - fs.writeFileSync(pkgPath, JSON.stringify(pkg, null, 2) + '\n'); - console.error('Updated ' + proj.name + ' to ' + proj.newVersion + ' (' + proj.bump + '): ' + proj.reason); - bumpedProjects.push(proj.name); - internalVersions[proj.name] = proj.newVersion; - - // Track max version - if (proj.newVersion.localeCompare(maxVersion, undefined, { numeric: true, sensitivity: 'base' }) > 0) { - maxVersion = proj.newVersion; - } - - // Update per-lib changelog - const changelogPath = 'libs/' + proj.name + '/CHANGELOG.md'; - if (fs.existsSync(changelogPath) && proj.changelog) { - const entry = generateChangelogEntry(proj.newVersion, proj.changelog); - if (entry) { - let content = fs.readFileSync(changelogPath, 'utf8'); - // Insert after ## [Unreleased] section - const unreleasedMatch = content.match(/## \[Unreleased\]\n*/); - if (unreleasedMatch) { - const insertPos = unreleasedMatch.index + unreleasedMatch[0].length; - content = content.slice(0, insertPos) + '\n' + entry + '\n' + content.slice(insertPos); - } else { - // No Unreleased section, insert after header - const headerEnd = content.indexOf('\n\n') + 2; - content = content.slice(0, headerEnd) + '## [Unreleased]\n\n' + entry + '\n' + content.slice(headerEnd); - } - fs.writeFileSync(changelogPath, content); - console.error('Updated changelog: ' + changelogPath); - } - } - } - } - - // Second pass: Sync internal dependencies across ALL publishable packages - // If a package depends on a bumped package, update the dep version and patch-bump the dependent - const { execSync } = require('child_process'); - let allLibs = []; - try { - const allPublishable = execSync('npx nx show projects -p tag:scope:publishable --type lib --json', { encoding: 'utf8' }); - allLibs = JSON.parse(allPublishable); - } catch (e) { - console.error('Warning: Could not get publishable libs for dependency sync'); - } - - for (const libName of allLibs) { - const pkgPath = 'libs/' + libName + '/package.json'; - if (!fs.existsSync(pkgPath)) continue; - - const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8')); - let modified = false; - let needsBump = false; - - // Check if any internal dependency was bumped - // Only update exact-pinned versions (internal deps), skip range specifiers (external deps) - if (pkg.dependencies) { - for (const [dep, version] of Object.entries(pkg.dependencies)) { - if (internalVersions[dep] && isExactVersion(version) && version !== internalVersions[dep]) { - pkg.dependencies[dep] = internalVersions[dep]; - modified = true; - needsBump = true; - console.error('Updated ' + libName + ' dep ' + dep + ' to ' + internalVersions[dep]); - } - } - } - - // If this package wasn't already bumped but has updated deps, patch bump it - if (needsBump && !internalVersions[libName]) { - const newVersion = bumpPatch(pkg.version); - pkg.version = newVersion; - internalVersions[libName] = newVersion; - bumpedProjects.push(libName); - console.error('Patch bumped ' + libName + ' to ' + newVersion + ' (dependency update)'); - - // Update max version - if (newVersion.localeCompare(maxVersion, undefined, { numeric: true, sensitivity: 'base' }) > 0) { - maxVersion = newVersion; - } - } - - if (modified) { - fs.writeFileSync(pkgPath, JSON.stringify(pkg, null, 2) + '\n'); - } - } - - // Update global changelog - if (output.globalChangelog && bumpedProjects.length > 0) { - const globalPath = 'CHANGELOG.md'; - if (fs.existsSync(globalPath)) { - let content = fs.readFileSync(globalPath, 'utf8'); - - // Build global entry - let globalEntry = '## [' + maxVersion + '] - ' + today + '\n\n'; - globalEntry += output.globalChangelog.summary + '\n\n'; - globalEntry += '### Updated Libraries\n\n'; - for (const p of output.globalChangelog.projects) { - globalEntry += '- **' + p.name + '** v' + p.version + ' - ' + p.summary + '\n'; - } - - // Insert after ## [Unreleased] section - const unreleasedMatch = content.match(/## \[Unreleased\]\n*/); - if (unreleasedMatch) { - const insertPos = unreleasedMatch.index + unreleasedMatch[0].length; - content = content.slice(0, insertPos) + '\n' + globalEntry + '\n' + content.slice(insertPos); - } - fs.writeFileSync(globalPath, content); - console.error('Updated global changelog'); - } - } - - // Log docs update summary - if (output.docs && output.docs.updated) { - console.error('Documentation updated: ' + output.docs.summary); - console.error('Files modified: ' + output.docs.files.join(', ')); - } - - // Output for GitHub Actions - console.log(JSON.stringify({ maxVersion, bumpedProjects: bumpedProjects.join(',') })); - NODEJS_SCRIPT - ) + # Run Nx Release script to bump versions and update changelogs + node scripts/nx-release.mjs + + # Extract max version and bumped projects from Codex output + RESULT=$(node -e " + const fs = require('fs'); + const output = JSON.parse(fs.readFileSync('${{ env.CODEX_OUTPUT }}', 'utf8')); + const bumped = output.projects.filter(p => p.bump !== 'none'); + const maxVersion = bumped.map(p => p.newVersion) + .sort((a,b) => b.localeCompare(a, undefined, {numeric: true}))[0] || '0.0.0'; + const bumpedProjects = bumped.map(p => p.name).join(','); + console.log(JSON.stringify({ maxVersion, bumpedProjects })); + ") MAX_VERSION=$(echo "$RESULT" | jq -r '.maxVersion') BUMPED=$(echo "$RESULT" | jq -r '.bumpedProjects') @@ -432,6 +266,8 @@ jobs: echo "Max version: $MAX_VERSION" echo "Bumped projects: $BUMPED" + env: + CODEX_OUTPUT: ${{ env.CODEX_OUTPUT }} - name: Log release analysis result if: steps.affected.outputs.projects != '' diff --git a/.github/workflows/publish-on-next-close.yml b/.github/workflows/publish-on-next-close.yml index e5de6a1..59c46ae 100644 --- a/.github/workflows/publish-on-next-close.yml +++ b/.github/workflows/publish-on-next-close.yml @@ -128,25 +128,6 @@ jobs: if: steps.to_publish.outputs.projects == '' run: echo "Nothing to publish." - - name: Determine release version - id: version - shell: bash - env: - PR_HEAD_REF: ${{ github.event.pull_request.head.ref }} - run: | - set -euo pipefail - - HEADREF="$PR_HEAD_REF" - - if [[ "$HEADREF" =~ ^next/([0-9]+\.[0-9]+\.[0-9]+)$ ]]; then - VERSION="${BASH_REMATCH[1]}" - else - VERSION=$(node -e "try{console.log(require('./package.json').version||'0.0.0')}catch{console.log('0.0.0')}") - fi - - echo "version=$VERSION" >> "$GITHUB_OUTPUT" - echo "Release version: $VERSION" - - name: Determine release SHA id: release_sha shell: bash @@ -169,106 +150,95 @@ jobs: git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" - - name: Create git tags + - name: Create git tags (per-project) + if: steps.to_publish.outputs.projects != '' shell: bash run: | set -euo pipefail - VERSION="${{ steps.version.outputs.version }}" TARGET_SHA="${{ steps.release_sha.outputs.sha }}" PROJECTS="${{ steps.to_publish.outputs.projects }}" git fetch --tags - # Create global version tag - GLOBAL_TAG="v$VERSION" - if git rev-parse "$GLOBAL_TAG" >/dev/null 2>&1; then - echo "Tag $GLOBAL_TAG already exists." - else - echo "Creating global tag $GLOBAL_TAG at $TARGET_SHA" - git tag -a "$GLOBAL_TAG" "$TARGET_SHA" -m "Release $GLOBAL_TAG" - git push origin "$GLOBAL_TAG" - fi - - # Create per-project tags - if [ -n "$PROJECTS" ]; then - IFS=',' read -ra LIBS <<< "$PROJECTS" - for lib in "${LIBS[@]}"; do - # Read project version from package.json - LIB_VERSION=$(node -e "const pkg = require('./libs/$lib/package.json'); console.log(pkg.version || '$VERSION');") - - PROJECT_TAG="${lib}@${LIB_VERSION}" - - if git rev-parse "$PROJECT_TAG" >/dev/null 2>&1; then - echo "Tag $PROJECT_TAG already exists." - else - echo "Creating project tag $PROJECT_TAG at $TARGET_SHA" - git tag -a "$PROJECT_TAG" "$TARGET_SHA" -m "Release $PROJECT_TAG" - git push origin "$PROJECT_TAG" - fi - done - fi + # Create per-project tags (independent versioning) + IFS=',' read -ra LIBS <<< "$PROJECTS" + for lib in "${LIBS[@]}"; do + # Read project version from package.json + LIB_VERSION=$(node -e "const pkg = require('./libs/$lib/package.json'); console.log(pkg.version);") + + PROJECT_TAG="${lib}@${LIB_VERSION}" + + if git rev-parse "$PROJECT_TAG" >/dev/null 2>&1; then + echo "Tag $PROJECT_TAG already exists." + else + echo "Creating project tag $PROJECT_TAG at $TARGET_SHA" + git tag -a "$PROJECT_TAG" "$TARGET_SHA" -m "Release $PROJECT_TAG" + git push origin "$PROJECT_TAG" + fi + done - name: Build packages if: steps.to_publish.outputs.projects != '' run: yarn nx run-many --targets=build --projects="${{ steps.to_publish.outputs.projects }}" --parallel - - name: Publish to npm + - name: Publish to npm via Nx Release if: steps.to_publish.outputs.projects != '' shell: bash run: | set -euo pipefail - echo "Publishing selected projects via npm trusted publishing..." + echo "Publishing selected projects via Nx Release..." + npx nx release publish --projects="${{ steps.to_publish.outputs.projects }}" - IFS=',' read -ra PROJECTS <<< "${{ steps.to_publish.outputs.projects }}" - for project in "${PROJECTS[@]}"; do - echo "Publishing $project..." - yarn nx run "$project:publish" - done - - - name: Generate release body - id: release_body + - name: Create GitHub Releases (per-project) + if: steps.to_publish.outputs.projects != '' shell: bash + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | set -euo pipefail PROJECTS="${{ steps.to_publish.outputs.projects }}" - VERSION="${{ steps.version.outputs.version }}" - - if [ -n "$PROJECTS" ]; then - # Build release body with published packages info - BODY="## Published Packages\n\n" - - IFS=',' read -ra LIBS <<< "$PROJECTS" - for lib in "${LIBS[@]}"; do - if [ -f "libs/$lib/package.json" ]; then - LIB_VERSION=$(node -e "console.log(require('./libs/$lib/package.json').version)") - BODY+="- **${lib}** v${LIB_VERSION} - [npm](https://www.npmjs.com/package/${lib}/v/${LIB_VERSION})\n" - fi - done - - BODY+="\n---\n\n" - else - BODY="*No packages published in this release.*\n\n---\n\n" - fi - - # Save body to file for multiline support - echo -e "$BODY" > /tmp/release_body.md - - name: Create GitHub Release - uses: softprops/action-gh-release@v2 - with: - tag_name: v${{ steps.version.outputs.version }} - name: v${{ steps.version.outputs.version }} - body_path: /tmp/release_body.md - generate_release_notes: true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + IFS=',' read -ra LIBS <<< "$PROJECTS" + for lib in "${LIBS[@]}"; do + if [ -f "libs/$lib/package.json" ]; then + LIB_VERSION=$(node -e "console.log(require('./libs/$lib/package.json').version)") + TAG_NAME="${lib}@${LIB_VERSION}" + + echo "Creating GitHub Release for $TAG_NAME..." + + # Generate release body using heredoc + BODY=$(cat <> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY echo "**Published projects:** ${{ steps.to_publish.outputs.projects }}" >> $GITHUB_STEP_SUMMARY - echo "**Release tag:** v${{ steps.version.outputs.version }}" >> $GITHUB_STEP_SUMMARY echo "**Release commit:** ${{ steps.release_sha.outputs.sha }}" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "### Release Tags" >> $GITHUB_STEP_SUMMARY + IFS=',' read -ra LIBS <<< "${{ steps.to_publish.outputs.projects }}" + for lib in "${LIBS[@]}"; do + if [ -f "libs/$lib/package.json" ]; then + LIB_VERSION=$(node -e "console.log(require('./libs/$lib/package.json').version)") + echo "- ${lib}@${LIB_VERSION}" >> $GITHUB_STEP_SUMMARY + fi + done diff --git a/libs/ast-guard/project.json b/libs/ast-guard/project.json index 6b093a4..1f01a18 100644 --- a/libs/ast-guard/project.json +++ b/libs/ast-guard/project.json @@ -22,11 +22,11 @@ "command": "node scripts/strip-dist-from-pkg.js libs/ast-guard/dist/package.json" } }, - "publish": { - "executor": "nx:run-commands", + "nx-release-publish": { + "executor": "@nx/js:release-publish", "dependsOn": ["build"], "options": { - "command": "npm publish libs/ast-guard/dist --access public --registry=https://registry.npmjs.org/" + "packageRoot": "libs/ast-guard/dist" } } } diff --git a/libs/enclave-vm/project.json b/libs/enclave-vm/project.json index 428bfbf..a345a99 100644 --- a/libs/enclave-vm/project.json +++ b/libs/enclave-vm/project.json @@ -23,11 +23,11 @@ "command": "node scripts/strip-dist-from-pkg.js libs/enclave-vm/dist/package.json" } }, - "publish": { - "executor": "nx:run-commands", + "nx-release-publish": { + "executor": "@nx/js:release-publish", "dependsOn": ["build"], "options": { - "command": "npm publish libs/enclave-vm/dist --access public --registry=https://registry.npmjs.org/" + "packageRoot": "libs/enclave-vm/dist" } } } diff --git a/libs/vectoriadb/project.json b/libs/vectoriadb/project.json index 0646598..3b1c37f 100644 --- a/libs/vectoriadb/project.json +++ b/libs/vectoriadb/project.json @@ -22,11 +22,11 @@ "command": "node scripts/strip-dist-from-pkg.js libs/vectoriadb/dist/package.json" } }, - "publish": { - "executor": "nx:run-commands", + "nx-release-publish": { + "executor": "@nx/js:release-publish", "dependsOn": ["build"], "options": { - "command": "npm publish libs/vectoriadb/dist --access public --registry=https://registry.npmjs.org/" + "packageRoot": "libs/vectoriadb/dist" } } } diff --git a/nx.json b/nx.json index c5e2db8..a4f8e3c 100644 --- a/nx.json +++ b/nx.json @@ -82,8 +82,20 @@ }, "release": { "projects": ["libs/*"], + "projectsRelationship": "independent", + "releaseTagPattern": "{projectName}@{version}", "version": { - "preVersionCommand": "npx nx run-many -t build" + "conventionalCommits": false, + "preVersionCommand": "npx nx run-many -t build", + "git": { + "commit": true, + "tag": true, + "commitMessage": "chore(release): bump versions\n\n{projectName}@{version}" + } + }, + "changelog": { + "projectChangelogs": false, + "workspaceChangelog": false } } } diff --git a/scripts/analyze-version-bump.mjs b/scripts/analyze-version-bump.mjs deleted file mode 100644 index 5158a8d..0000000 --- a/scripts/analyze-version-bump.mjs +++ /dev/null @@ -1,339 +0,0 @@ -#!/usr/bin/env node -import { execSync } from "node:child_process"; -import fs from "node:fs/promises"; -import path from "node:path"; - -/** - * Analyze git changes to determine semantic version bump type - * - * Usage: node scripts/analyze-version-bump.mjs [base-ref] - * Example: node scripts/analyze-version-bump.mjs ast-guard v1.0.0 - * - * If base-ref is not provided, uses the latest tag or first commit. - * - * Returns: major | minor | patch - */ - -const [, , projectName, baseRefArg] = process.argv; - -if (!projectName) { - console.error("Usage: node scripts/analyze-version-bump.mjs [base-ref]"); - console.error("Example: node scripts/analyze-version-bump.mjs ast-guard v1.0.0"); - process.exit(1); -} - -/** - * Get the base reference for comparison - * @returns {string} Git reference (tag or commit SHA) - */ -function getBaseRef() { - if (baseRefArg) { - return baseRefArg; - } - - // Try to find the latest tag for this project - try { - const projectTag = execSync(`git tag --list "${projectName}@*" --sort=-version:refname`, { - encoding: "utf8", - }).trim(); - - if (projectTag) { - const firstTag = projectTag.split("\n")[0]; - console.error(`Using project tag: ${firstTag}`); - return firstTag; - } - } catch { - // No project-specific tags found - } - - // Try global version tags - try { - const globalTag = execSync('git tag --list "v*" --sort=-version:refname', { - encoding: "utf8", - }).trim(); - - if (globalTag) { - const firstTag = globalTag.split("\n")[0]; - console.error(`Using global tag: ${firstTag}`); - return firstTag; - } - } catch { - // No global tags found - } - - // Fall back to first commit (initial release) - const firstCommit = execSync("git rev-list --max-parents=0 HEAD", { - encoding: "utf8", - }).trim(); - - console.error(`No tags found, using first commit: ${firstCommit.slice(0, 8)}`); - return firstCommit; -} - -/** - * Get changed files for a project since base ref - * @param {string} projectPath - Path to the project - * @param {string} baseRef - Git reference to compare against - * @returns {Object} Object with arrays of changed files by type - */ -function getChangedFiles(projectPath, baseRef) { - const changes = { - deleted: [], - added: [], - modified: [], - renamed: [], - }; - - try { - const diff = execSync(`git diff --name-status ${baseRef}..HEAD -- ${projectPath}`, { - encoding: "utf8", - }).trim(); - - if (!diff) { - return changes; - } - - for (const line of diff.split("\n")) { - if (!line.trim()) continue; - - const [status, ...fileParts] = line.split("\t"); - const file = fileParts.join("\t"); // Handle files with tabs in names - - // Handle rename status (R100, R095, etc.) - if (status.startsWith("R")) { - changes.renamed.push(file); - } else if (status === "D") { - changes.deleted.push(file); - } else if (status === "A") { - changes.added.push(file); - } else if (status === "M") { - changes.modified.push(file); - } - } - } catch (error) { - console.error(`Error getting diff: ${error.message}`); - } - - return changes; -} - -/** - * Check if a file is a source file (not test, doc, or config) - * @param {string} file - File path - * @returns {boolean} - */ -function isSourceFile(file) { - // Source files are in src/ but not in __tests__ - if (!file.includes("/src/")) return false; - if (file.includes("/__tests__/")) return false; - if (file.includes(".spec.")) return false; - if (file.includes(".test.")) return false; - return true; -} - -/** - * Check if a file is an index/export file - * @param {string} file - File path - * @returns {boolean} - */ -function isExportFile(file) { - const basename = path.basename(file); - return basename === "index.ts" || basename === "index.js"; -} - -/** - * Analyze changes and determine version bump type - * @param {Object} changes - Changed files by type - * @returns {string} major | minor | patch - */ -function analyzeChanges(changes) { - const allChanges = [ - ...changes.deleted, - ...changes.added, - ...changes.modified, - ...changes.renamed, - ]; - - if (allChanges.length === 0) { - console.error("No changes detected"); - return "patch"; - } - - console.error(`Analyzing ${allChanges.length} changed files...`); - - // Check for deleted source files (breaking change) - const deletedSourceFiles = changes.deleted.filter(isSourceFile); - if (deletedSourceFiles.length > 0) { - console.error(`Found ${deletedSourceFiles.length} deleted source files - MAJOR`); - console.error(` ${deletedSourceFiles.slice(0, 3).join("\n ")}`); - return "major"; - } - - // Check for renamed source files (potentially breaking) - const renamedSourceFiles = changes.renamed.filter((f) => { - const parts = f.split("\t"); - return parts.some(isSourceFile); - }); - if (renamedSourceFiles.length > 0) { - console.error(`Found ${renamedSourceFiles.length} renamed source files - MAJOR`); - return "major"; - } - - // Check for modified index/export files (could be breaking or new features) - const modifiedExportFiles = changes.modified.filter(isExportFile); - if (modifiedExportFiles.length > 0) { - // Modified exports could be breaking - treat as minor to be safe - // A proper implementation would parse the exports and compare - console.error(`Found ${modifiedExportFiles.length} modified export files - MINOR`); - return "minor"; - } - - // Check for new source files (new feature) - const newSourceFiles = changes.added.filter(isSourceFile); - if (newSourceFiles.length > 0) { - console.error(`Found ${newSourceFiles.length} new source files - MINOR`); - console.error(` ${newSourceFiles.slice(0, 3).join("\n ")}`); - return "minor"; - } - - // Check for package.json changes (dependency updates) - const packageJsonChanged = allChanges.some((f) => f.endsWith("package.json")); - if (packageJsonChanged) { - console.error("package.json changed - MINOR"); - return "minor"; - } - - // Check for any source file modifications - const modifiedSourceFiles = changes.modified.filter(isSourceFile); - if (modifiedSourceFiles.length > 0) { - console.error(`Found ${modifiedSourceFiles.length} modified source files - PATCH`); - return "patch"; - } - - // Only non-source changes (docs, tests, configs) - console.error("Only non-source changes detected - PATCH"); - return "patch"; -} - -/** - * Set version for a project (used for first release) - * @param {string} projectName - Name of the project - * @param {string} version - Version to set - */ -async function setVersion(projectName, version) { - const packagePath = path.join(process.cwd(), "libs", projectName, "package.json"); - - try { - const content = await fs.readFile(packagePath, "utf8"); - const pkg = JSON.parse(content); - const oldVersion = pkg.version; - pkg.version = version; - await fs.writeFile(packagePath, JSON.stringify(pkg, null, 2) + "\n", "utf8"); - console.error(`Set ${projectName} version: ${oldVersion} → ${version}`); - } catch (error) { - console.error(`Error setting version: ${error.message}`); - process.exit(1); - } -} - -/** - * Bump version for a project - * @param {string} projectName - Name of the project - * @param {string} bumpType - major | minor | patch - */ -async function bumpVersion(projectName, bumpType) { - const packagePath = path.join(process.cwd(), "libs", projectName, "package.json"); - - try { - const content = await fs.readFile(packagePath, "utf8"); - const pkg = JSON.parse(content); - const oldVersion = pkg.version || "0.0.0"; - const [major, minor, patch] = oldVersion.split(".").map(Number); - - let newVersion; - switch (bumpType) { - case "major": - newVersion = `${major + 1}.0.0`; - break; - case "minor": - newVersion = `${major}.${minor + 1}.0`; - break; - case "patch": - default: - newVersion = `${major}.${minor}.${patch + 1}`; - break; - } - - pkg.version = newVersion; - await fs.writeFile(packagePath, JSON.stringify(pkg, null, 2) + "\n", "utf8"); - console.error(`Bumped ${projectName}: ${oldVersion} → ${newVersion} (${bumpType})`); - - // Output new version for CI - console.log(newVersion); - } catch (error) { - console.error(`Error bumping version: ${error.message}`); - process.exit(1); - } -} - -/** - * Check if this is the first release (no prior tags) - * @returns {boolean} - */ -function isFirstRelease() { - try { - // Check for any project-specific or global tags - const projectTags = execSync(`git tag --list "${projectName}@*"`, { encoding: "utf8" }).trim(); - const globalTags = execSync('git tag --list "v*"', { encoding: "utf8" }).trim(); - - return !projectTags && !globalTags; - } catch { - return true; - } -} - -async function main() { - const projectPath = `libs/${projectName}`; - - // Verify project exists - try { - await fs.access(path.join(process.cwd(), projectPath)); - } catch { - console.error(`Project not found: ${projectPath}`); - process.exit(1); - } - - // Check for first release - if (isFirstRelease()) { - console.error("First release detected - setting version to 1.0.0"); - await setVersion(projectName, "1.0.0"); - console.log("1.0.0"); - return; - } - - const baseRef = getBaseRef(); - console.error(`Base ref: ${baseRef}`); - - const changes = getChangedFiles(projectPath, baseRef); - - const totalChanges = - changes.deleted.length + changes.added.length + changes.modified.length + changes.renamed.length; - - if (totalChanges === 0) { - console.error("No changes detected for this project"); - // Still output current version - const packagePath = path.join(process.cwd(), projectPath, "package.json"); - const content = await fs.readFile(packagePath, "utf8"); - const pkg = JSON.parse(content); - console.log(pkg.version || "0.0.0"); - return; - } - - const bumpType = analyzeChanges(changes); - await bumpVersion(projectName, bumpType); -} - -main().catch((error) => { - console.error("Fatal error:", error); - process.exit(1); -}); diff --git a/scripts/bump-synchronized-versions.mjs b/scripts/bump-synchronized-versions.mjs deleted file mode 100755 index 3d17e84..0000000 --- a/scripts/bump-synchronized-versions.mjs +++ /dev/null @@ -1,146 +0,0 @@ -#!/usr/bin/env node -import fs from "node:fs/promises"; -import path from "node:path"; -import { execSync } from "node:child_process"; - -/** - * Bump version for all synchronized libraries - * Usage: node scripts/bump-synchronized-versions.mjs - * Example: node scripts/bump-synchronized-versions.mjs 0.4.0 - */ - -const [, , newVersion] = process.argv; - -if (!newVersion) { - console.error("Usage: node scripts/bump-synchronized-versions.mjs "); - console.error("Example: node scripts/bump-synchronized-versions.mjs 0.4.0"); - process.exit(1); -} - -// Validate version format -if (!/^\d+\.\d+\.\d+$/.test(newVersion)) { - console.error(`Invalid version format: ${newVersion}. Must be semver (e.g., 0.4.0)`); - process.exit(1); -} - -async function getSynchronizedLibs() { - try { - const output = execSync( - 'npx nx show projects -p tag:versioning:synchronized --type lib --json', - { encoding: 'utf8' } - ); - return JSON.parse(output); - } catch (error) { - console.error("Error fetching synchronized libraries:", error.message); - process.exit(1); - } -} - -async function updateLibVersion(libName, newVersion) { - const libPath = path.join(process.cwd(), "libs", libName, "package.json"); - - try { - const content = await fs.readFile(libPath, "utf8"); - const pkg = JSON.parse(content); - - const oldVersion = pkg.version; - pkg.version = newVersion; - - await fs.writeFile(libPath, JSON.stringify(pkg, null, 2) + "\n", "utf8"); - - console.log(`✓ Updated ${libName} from ${oldVersion} to ${newVersion}`); - return { libName, oldVersion, newVersion }; - } catch (error) { - console.error(`✗ Error updating ${libName}:`, error.message); - return null; - } -} - -async function updateRootVersion(newVersion) { - const rootPath = path.join(process.cwd(), "package.json"); - - try { - const content = await fs.readFile(rootPath, "utf8"); - const pkg = JSON.parse(content); - - const oldVersion = pkg.version; - pkg.version = newVersion; - - await fs.writeFile(rootPath, JSON.stringify(pkg, null, 2) + "\n", "utf8"); - - console.log(`✓ Updated root package.json from ${oldVersion} to ${newVersion}`); - } catch (error) { - console.error(`✗ Error updating root package.json:`, error.message); - } -} - -async function updateDependencies(libs, newVersion) { - console.log("\nUpdating internal dependencies..."); - - for (const libName of libs) { - const libPath = path.join(process.cwd(), "libs", libName, "package.json"); - - try { - const content = await fs.readFile(libPath, "utf8"); - const pkg = JSON.parse(content); - - let updated = false; - - // Update dependencies - for (const depType of ["dependencies", "devDependencies", "peerDependencies"]) { - if (pkg[depType]) { - for (const dep of libs) { - const depPkgPath = path.join(process.cwd(), "libs", dep, "package.json"); - const depPkg = JSON.parse(await fs.readFile(depPkgPath, "utf8")); - const depName = depPkg.name; - - if (pkg[depType][depName]) { - pkg[depType][depName] = newVersion; - updated = true; - } - } - } - } - - if (updated) { - await fs.writeFile(libPath, JSON.stringify(pkg, null, 2) + "\n", "utf8"); - console.log(`✓ Updated dependencies in ${libName}`); - } - } catch (error) { - console.error(`✗ Error updating dependencies in ${libName}:`, error.message); - } - } -} - -async function main() { - console.log(`Bumping all synchronized libraries to version ${newVersion}...\n`); - - const libs = await getSynchronizedLibs(); - - if (!libs || libs.length === 0) { - console.log("No synchronized libraries found."); - return; - } - - console.log(`Found ${libs.length} synchronized libraries: ${libs.join(", ")}\n`); - - // Update all library versions - const results = []; - for (const lib of libs) { - const result = await updateLibVersion(lib, newVersion); - if (result) results.push(result); - } - - // Update root package.json - await updateRootVersion(newVersion); - - // Update internal dependencies - await updateDependencies(libs, newVersion); - - console.log(`\n✅ Successfully bumped ${results.length} synchronized libraries to ${newVersion}`); -} - -main().catch((error) => { - console.error("Fatal error:", error); - process.exit(1); -}); diff --git a/scripts/bump-version.mjs b/scripts/bump-version.mjs deleted file mode 100755 index dee28c6..0000000 --- a/scripts/bump-version.mjs +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env node -import fs from "node:fs/promises"; -import path from "node:path"; - -/** - * Bump version script for libraries - * Usage: node scripts/bump-version.mjs - * Example: node scripts/bump-version.mjs json-schema-to-zod-v3 patch - */ - -const [, , libName, bumpType] = process.argv; - -if (!libName || !bumpType) { - console.error("Usage: node scripts/bump-version.mjs "); - console.error("Bump types: major, minor, patch"); - process.exit(1); -} - -if (!["major", "minor", "patch"].includes(bumpType)) { - console.error(`Invalid bump type: ${bumpType}. Must be: major, minor, or patch`); - process.exit(1); -} - -const libPath = path.join(process.cwd(), "libs", libName, "package.json"); - -try { - // Read current package.json - const content = await fs.readFile(libPath, "utf8"); - const pkg = JSON.parse(content); - - const oldVersion = pkg.version; - if (!oldVersion) { - console.error(`No version found in ${libPath}`); - process.exit(1); - } - - // Parse version - const [major, minor, patch] = oldVersion.split(".").map(Number); - - if (isNaN(major) || isNaN(minor) || isNaN(patch)) { - console.error(`Invalid semver format in ${libPath}: ${oldVersion}`); - process.exit(1); - } - - // Bump version - let newVersion; - switch (bumpType) { - case "major": - newVersion = `${major + 1}.0.0`; - break; - case "minor": - newVersion = `${major}.${minor + 1}.0`; - break; - case "patch": - newVersion = `${major}.${minor}.${patch + 1}`; - break; - } - - pkg.version = newVersion; - - // Write updated package.json - await fs.writeFile(libPath, JSON.stringify(pkg, null, 2) + "\n", "utf8"); - - console.log(`✓ Bumped ${libName} from ${oldVersion} to ${newVersion}`); - - // Output the new version for use in workflows - if (process.env.GITHUB_OUTPUT) { - await fs.appendFile(process.env.GITHUB_OUTPUT, `new_version=${newVersion}\n`); - await fs.appendFile(process.env.GITHUB_OUTPUT, `old_version=${oldVersion}\n`); - } - -} catch (error) { - console.error(`Error bumping version for ${libName}:`, error.message); - process.exit(1); -} diff --git a/scripts/nx-release.mjs b/scripts/nx-release.mjs new file mode 100644 index 0000000..9c4e823 --- /dev/null +++ b/scripts/nx-release.mjs @@ -0,0 +1,187 @@ +/** + * Nx Release script that integrates with Codex AI output. + * + * This script reads version decisions from Codex and uses Nx Release's + * programmatic API to bump versions and sync dependencies. + * Changelogs are generated from Codex output (not Nx). + * + * Usage: + * CODEX_OUTPUT=.codex-release/release-output.json node scripts/nx-release.mjs + * + * Options: + * DRY_RUN=true - Preview changes without applying them + */ + +import { releaseVersion } from 'nx/release'; +import fs from 'fs'; +import path from 'path'; + +async function main() { + const codexOutputPath = process.env.CODEX_OUTPUT; + const dryRun = process.env.DRY_RUN === 'true'; + + if (!codexOutputPath) { + console.error('Error: CODEX_OUTPUT environment variable not set'); + process.exit(1); + } + + if (!fs.existsSync(codexOutputPath)) { + console.error(`Error: Codex output file not found: ${codexOutputPath}`); + process.exit(1); + } + + console.log(`Reading Codex output from: ${codexOutputPath}`); + console.log(`Dry run: ${dryRun}`); + + let codexOutput; + try { + codexOutput = JSON.parse(fs.readFileSync(codexOutputPath, 'utf8')); + } catch (err) { + console.error(`Error: Failed to parse Codex output file: ${err.message}`); + process.exit(1); + } + + if (!codexOutput.projects || !Array.isArray(codexOutput.projects)) { + console.error('Error: Codex output missing or invalid "projects" array'); + process.exit(1); + } + + const versionResults = {}; + const today = new Date().toISOString().split('T')[0]; + + // Filter projects that need a version bump + const projectsToBump = codexOutput.projects.filter((p) => p.bump !== 'none'); + + if (projectsToBump.length === 0) { + console.log('No projects need version bumps'); + process.exit(0); + } + + console.log(`\nProjects to bump: ${projectsToBump.map((p) => p.name).join(', ')}`); + + // Process each project that needs a version bump + for (const project of projectsToBump) { + console.log(`\n${'='.repeat(60)}`); + console.log(`Versioning ${project.name} to ${project.newVersion} (${project.bump})`); + console.log(`Reason: ${project.reason || 'N/A'}`); + console.log('='.repeat(60)); + + try { + // Use Nx Release to bump version and sync dependencies + // Git operations are disabled here - the workflow handles git commit/tag + await releaseVersion({ + specifier: project.newVersion, + projects: [project.name], + dryRun, + verbose: true, + gitCommit: false, + gitTag: false, + }); + + versionResults[project.name] = project.newVersion; + console.log(`✓ Version updated for ${project.name}`); + + // Apply Codex-generated changelog (Nx doesn't handle this) + if (project.changelog && !dryRun) { + const changelogPath = path.join('libs', project.name, 'CHANGELOG.md'); + if (fs.existsSync(changelogPath)) { + const entry = formatChangelogEntry(project.newVersion, project.changelog, today); + if (entry) { + let content = fs.readFileSync(changelogPath, 'utf8'); + const unreleasedIdx = content.indexOf('## [Unreleased]'); + if (unreleasedIdx !== -1) { + const afterUnreleased = content.indexOf('\n', unreleasedIdx) + 1; + content = content.slice(0, afterUnreleased) + '\n' + entry + '\n' + content.slice(afterUnreleased); + fs.writeFileSync(changelogPath, content); + console.log(`✓ Updated changelog: ${changelogPath}`); + } else { + console.log(`⚠ Skipped changelog update for ${project.name}: missing "## [Unreleased]" section`); + } + } + } + } + } catch (error) { + console.error(`✗ Failed to version ${project.name}:`, error.message); + process.exit(1); + } + } + + // Update global changelog if provided + if (codexOutput.globalChangelog && !dryRun) { + updateGlobalChangelog(codexOutput.globalChangelog, versionResults, today); + } + + console.log('\n' + '='.repeat(60)); + console.log('Version results:'); + for (const [name, version] of Object.entries(versionResults)) { + console.log(` ${name}: ${version}`); + } + console.log('='.repeat(60)); + + // Output JSON for CI consumption + console.log('\n__VERSION_RESULTS_JSON__'); + console.log(JSON.stringify({ versionResults })); +} + +function formatChangelogEntry(version, changelog, date) { + const categories = [ + { key: 'added', title: 'Added' }, + { key: 'changed', title: 'Changed' }, + { key: 'deprecated', title: 'Deprecated' }, + { key: 'removed', title: 'Removed' }, + { key: 'fixed', title: 'Fixed' }, + { key: 'security', title: 'Security' }, + ]; + + let entry = `## [${version}] - ${date}\n`; + let hasContent = false; + + for (const { key, title } of categories) { + const items = changelog[key] || []; + if (items.length > 0) { + entry += `\n### ${title}\n\n`; + items.forEach((item) => (entry += `- ${item}\n`)); + hasContent = true; + } + } + + return hasContent ? entry : null; +} + +function updateGlobalChangelog(globalChangelog, versionResults, date) { + const globalPath = 'CHANGELOG.md'; + if (!fs.existsSync(globalPath)) { + console.log('Global CHANGELOG.md not found, skipping'); + return; + } + + const versions = Object.values(versionResults); + if (versions.length === 0) return; + + // Find max version (spread to avoid mutating original array) + const maxVersion = [...versions].sort((a, b) => + b.localeCompare(a, undefined, { numeric: true, sensitivity: 'base' }), + )[0]; + + let content = fs.readFileSync(globalPath, 'utf8'); + + let globalEntry = `## [${maxVersion}] - ${date}\n\n`; + globalEntry += globalChangelog.summary + '\n\n'; + globalEntry += '### Updated Libraries\n\n'; + for (const p of globalChangelog.projects || []) { + globalEntry += `- **${p.name}** v${p.version} - ${p.summary}\n`; + } + + const unreleasedIdx = content.indexOf('## [Unreleased]'); + if (unreleasedIdx !== -1) { + const afterUnreleased = content.indexOf('\n', unreleasedIdx) + 1; + content = content.slice(0, afterUnreleased) + '\n' + globalEntry + '\n' + content.slice(afterUnreleased); + fs.writeFileSync(globalPath, content); + console.log('✓ Updated global changelog'); + } +} + +main().catch((err) => { + console.error('Fatal error:', err); + process.exit(1); +});