diff --git a/docs/src/content/docs/configuration.md b/docs/src/content/docs/configuration.md index 4bf15b47..9f5736b1 100644 --- a/docs/src/content/docs/configuration.md +++ b/docs/src/content/docs/configuration.md @@ -485,6 +485,39 @@ Require a minimum Craft version: minVersion: '0.5.0' ``` +### Smart Defaults + +Setting `minVersion` to `2.21.0` or higher enables smart defaults that simplify configuration: + +```yaml +minVersion: '2.21.0' +``` + +| Feature | Default with `minVersion >= 2.21.0` | Default without | +| ------------------- | ----------------------------------- | --------------- | +| `changelog.policy` | `auto` | `none` | +| `versioning.policy` | `auto` (with `>= 2.14.0`) | `manual` | + +This means a minimal configuration like this: + +```yaml +minVersion: '2.21.0' +targets: + - name: npm + - name: github +``` + +Will automatically: + +- Generate changelogs from conventional commits +- Determine version bumps from commit analysis +- Create `CHANGELOG.md` if it doesn't exist + +:::tip[Recommended for New Projects] +Use `minVersion: '2.21.0'` for new projects to take advantage of smart defaults. +Run `craft init` to automatically generate this configuration. +::: + ## Required Files Ensure specific artifacts exist before publishing: diff --git a/docs/src/content/docs/getting-started.md b/docs/src/content/docs/getting-started.md index a5d5ed74..a8c04620 100644 --- a/docs/src/content/docs/getting-started.md +++ b/docs/src/content/docs/getting-started.md @@ -21,6 +21,47 @@ pnpm add -g @sentry/craft npm install -g @sentry/craft ``` +## Quick Start with `craft init` + +The fastest way to get started is using `craft init`, which auto-detects your project type and generates configuration: + +```shell +cd your-project +craft init +``` + +This will: + +1. Detect your project type (npm, PyPI, Cargo, etc.) +2. Generate a `.craft.yml` configuration file +3. Create GitHub Actions workflows for release automation + +Example output: + +``` +[info] Detecting project type... +[info] ✓ Found GitHub repository: your-org/your-repo +[info] ✓ Detected 2 target(s): + - npm + - github +[info] ✓ Detected Node.js project (pnpm) + +Proposed .craft.yml: +──────────────────────────────────────── +minVersion: "2.21.0" +targets: + - name: npm + - name: github +──────────────────────────────────────── +? Create .craft.yml? (Y/n) +``` + +After initialization, validate your configuration: + +```shell +craft validate +``` + ## Usage ```shell @@ -28,9 +69,11 @@ $ craft -h craft Commands: - craft prepare NEW-VERSION 🚢 Prepare a new release branch + craft init Initialize Craft configuration for a new project + craft prepare [NEW-VERSION] 🚢 Prepare a new release branch [aliases: p, prerelease, prepublish, prepare, release] craft publish NEW-VERSION 🛫 Publish artifacts [aliases: pp, publish] + craft validate Validate Craft configuration and workflows craft targets List defined targets as JSON array craft config Print the parsed, processed, and validated Craft config for the current project in pretty-JSON. @@ -48,17 +91,74 @@ Options: ## Workflow +### `craft init`: Initialize a New Project + +Auto-detect your project type and generate configuration: + +```shell +craft init + +Initialize Craft configuration for a new project + +Options: + --skip-workflows Skip generating GitHub Actions workflow files + --force Overwrite existing files + -h, --help Show help +``` + +The `init` command detects: + +- **Package managers**: npm, pnpm, yarn, pip, cargo, etc. +- **Project files**: package.json, pyproject.toml, Cargo.toml, Dockerfile, etc. +- **GitHub info**: owner and repo from git remote + +Generated files: + +- `.craft.yml` - Main configuration +- `.github/workflows/release.yml` - Release preparation workflow +- `.github/workflows/changelog-preview.yml` - PR changelog preview + +:::note +Publishing is typically handled via a separate repository that stores secrets securely. See [Publishing Configuration](/configuration#publishing) for details. +::: + +### `craft validate`: Validate Configuration + +Check your configuration for errors and best practices: + +```shell +craft validate + +Options: + --skip-workflows Skip validating GitHub Actions workflow files + -h, --help Show help +``` + +Validates: + +- YAML syntax and schema +- Target names exist +- No duplicate target IDs +- Regex patterns are valid +- Workflow files use recommended patterns + ### `craft prepare`: Preparing a New Release This command creates a new release branch, checks the changelog entries, runs a version-bumping script, and pushes this branch to GitHub. CI triggered by pushing this branch will build release artifacts and upload them to your artifact provider. **Version Specification** -The `NEW-VERSION` argument can be specified in three ways (or omitted to use `auto`): +The `NEW-VERSION` argument can be specified in several ways (or omitted to use `auto`): + +1. **Omitted**: Uses `auto` by default (or `versioning.policy` from `.craft.yml` if configured) +2. **Explicit version** (e.g., `1.2.3`): Release with the specified version +3. **Bump type** (`major`, `minor`, or `patch`): Automatically increment the latest tag +4. **Auto** (`auto`): Analyze commits since the last tag and determine bump type from conventional commit patterns +5. **CalVer** (`calver`): Use calendar-based versioning -1. **Explicit version** (e.g., `1.2.3`): Release with the specified version -2. **Bump type** (`major`, `minor`, or `patch`): Automatically increment the latest tag -3. **Auto** (`auto` or omit the argument): Analyze commits since the last tag and determine bump type from conventional commit patterns +**First Release** + +When no git tags exist (first release), Craft defaults to a `minor` bump from `0.0.0` (resulting in `0.1.0`) when using auto-versioning. This ensures a sensible starting point for new projects. ```shell craft prepare [NEW-VERSION] @@ -67,9 +167,10 @@ craft prepare [NEW-VERSION] Positionals: NEW-VERSION The new version to release. Can be: a semver string (e.g., - "1.2.3"), a bump type ("major", "minor", or "patch"), or "auto" - to determine automatically from conventional commits. When - omitted, defaults to "auto". [string] + "1.2.3"), a bump type ("major", "minor", or "patch"), "auto" + to determine automatically from conventional commits, or "calver" + for calendar versioning. When omitted, defaults to "auto". + [string] Options: --no-input Suppresses all user prompts [default: false] @@ -80,6 +181,8 @@ Options: --no-changelog Do not check for changelog entries [boolean] [default: false] --publish Run "publish" right after "release"[boolean] [default: false] --remote The git remote to use when pushing [string] [default: "origin"] + --config-from Load .craft.yml from specified remote branch + --calver-offset Days to go back for CalVer date calculation -v, --version Show version number [boolean] -h, --help Show help [boolean] ``` @@ -179,6 +282,7 @@ preview|pre|rc|dev|alpha|beta|unstable|a|b ``` Examples: + - `1.0.0-preview` - `1.0.0-alpha.0` - `1.0.0-beta.1` @@ -227,10 +331,12 @@ diff --git a/CHANGELOG.md b/CHANGELOG.md ``` **What's blocked:** + - Git push (nothing leaves your machine) - GitHub API mutations (no releases, uploads, or changes) **What's allowed:** + - All local operations (in a temporary worktree) - Reading from GitHub API (requires `GITHUB_TOKEN`) @@ -264,6 +370,20 @@ export NUGET_API_TOKEN=abcdefgh ## Integrating Your Project +### Quick Setup (Recommended) + +Use `craft init` to automatically generate configuration: + +```shell +cd your-project +craft init +craft validate +``` + +Then set up required secrets in your GitHub repository and run your first release. + +### Manual Setup + 1. **Set up a workflow** that builds assets and runs tests. Allow building release branches: ```yaml @@ -295,3 +415,34 @@ export NUGET_API_TOKEN=abcdefgh 5. **Configure environment variables** for your targets. 6. **Run** `craft prepare --publish`! + +## First Release + +For new projects with no existing releases, Craft provides a streamlined experience: + +1. **Initialize**: Run `craft init` to generate configuration +2. **Validate**: Run `craft validate` to check your setup +3. **Release**: Run `craft prepare` (version defaults to `0.1.0`) + +Example first release workflow: + +```shell +# Initialize (one-time setup) +craft init +craft validate + +# Set up secrets in GitHub (GH_RELEASE_PAT, NPM_TOKEN, etc.) + +# Create your first release +craft prepare # Defaults to 0.1.0 for first release +# Or explicitly: craft prepare 0.1.0 + +# After CI completes, publish +craft publish 0.1.0 +``` + +With smart defaults enabled (`minVersion: "2.21.0"`), Craft will: + +- Auto-detect version bumps from commits +- Automatically generate changelogs +- Create `CHANGELOG.md` if it doesn't exist diff --git a/docs/src/content/docs/index.mdx b/docs/src/content/docs/index.mdx index 8052b47c..24553382 100644 --- a/docs/src/content/docs/index.mdx +++ b/docs/src/content/docs/index.mdx @@ -21,28 +21,36 @@ import { Card, CardGrid } from '@astrojs/starlight/components'; ## Features - - Prepare and publish releases with a single command. Craft handles version bumping, changelog management, and artifact publishing. + + Run `craft init` to auto-detect your project and generate configuration. + Works with npm, PyPI, Cargo, Docker, and more. - Automatically determine version bumps from conventional commits. Just run `craft prepare auto` and let Craft figure out the rest. + Automatically determine version bumps from conventional commits. Just run + `craft prepare` and let Craft figure out the rest. - Publish to GitHub, NPM, PyPI, Docker, NuGet, Crates.io, and many more registries from a single configuration. + Publish to GitHub, NPM, PyPI, Docker, NuGet, Crates.io, and many more + registries from a single configuration. - Automatic changelog generation using conventional commits or manual changelog policies. + Automatic changelog generation using conventional commits or manual + changelog policies. - Works seamlessly with GitHub Actions and other CI systems. Fetch artifacts and publish them to your targets. + Works seamlessly with GitHub Actions and other CI systems. Fetch artifacts + and publish them to your targets. ## Quick Example ```bash +# Initialize a new project (one-time setup) +craft init + # Auto-determine version from conventional commits -craft prepare auto +craft prepare # Or specify a bump type craft prepare minor diff --git a/src/__tests__/prepare-dry-run.e2e.test.ts b/src/__tests__/prepare-dry-run.e2e.test.ts index f2b36596..0b28636a 100644 --- a/src/__tests__/prepare-dry-run.e2e.test.ts +++ b/src/__tests__/prepare-dry-run.e2e.test.ts @@ -518,4 +518,156 @@ targets: [] expect(combinedOutput).toContain('Creating changelog file'); expect(combinedOutput).toContain('Releasing version 1.1.0'); }, 60000); + + test('first release with no tags defaults to version 0.1.0', async () => { + tempDir = await mkdtemp(join(tmpdir(), 'craft-e2e-')); + // eslint-disable-next-line no-restricted-syntax -- Test setup needs direct git access + const git = simpleGit(tempDir); + + // Initialize git repo + await git.init(); + await git.addConfig('user.email', 'test@example.com'); + await git.addConfig('user.name', 'Test User'); + + // Create .craft.yml with auto versioning - NO TAGS + const craftConfig = ` +minVersion: "2.14.0" +github: + owner: test-owner + repo: test-repo +versioning: + policy: auto +changelog: + policy: none +preReleaseCommand: "" +targets: [] +`; + await writeFile(join(tempDir, '.craft.yml'), craftConfig); + + // Create package.json + const packageJson = { name: 'test-package', version: '0.0.0' }; + await writeFile( + join(tempDir, 'package.json'), + JSON.stringify(packageJson, null, 2), + ); + + // Initial commit - NO TAG + await git.add('.'); + await git.commit('Initial commit'); + + // Add a feature commit + await writeFile(join(tempDir, 'feature.ts'), 'export const foo = 1;'); + await git.add('.'); + await git.commit('feat: Add foo feature'); + + // Create remote + const remoteDir = await mkdtemp(join(tmpdir(), 'craft-e2e-remote-')); + // eslint-disable-next-line no-restricted-syntax -- Test setup needs direct git access + const remoteGit = simpleGit(remoteDir); + await remoteGit.init(true); + await git.addRemote('origin', remoteDir); + const status = await git.status(); + await git.push('origin', status.current!, ['--set-upstream']); + + // Run prepare without version argument - should default to 0.1.0 for first release + const { stdout, stderr } = await execFileAsync( + CLI_BIN, + ['prepare', '--dry-run', '--no-input'], + { + cwd: tempDir, + env: { + ...process.env, + NODE_ENV: 'test', + GITHUB_TOKEN: 'test-token', + }, + }, + ); + + const combinedOutput = stdout + stderr; + + // Should detect first release and default to 0.1.0 (minor bump from 0.0.0) + expect(combinedOutput).toContain('No previous releases found'); + expect(combinedOutput).toContain('first release'); + expect(combinedOutput).toContain( + 'default bump type for first release: minor', + ); + expect(combinedOutput).toContain('Releasing version 0.1.0'); + expect(combinedOutput).toContain('release/0.1.0'); + }, 60000); + + test('first release with auto changelog creates CHANGELOG.md', async () => { + tempDir = await mkdtemp(join(tmpdir(), 'craft-e2e-')); + // eslint-disable-next-line no-restricted-syntax -- Test setup needs direct git access + const git = simpleGit(tempDir); + + // Initialize git repo + await git.init(); + await git.addConfig('user.email', 'test@example.com'); + await git.addConfig('user.name', 'Test User'); + + // Create .craft.yml with auto versioning AND auto changelog - NO TAGS, NO CHANGELOG + const craftConfig = ` +minVersion: "2.14.0" +github: + owner: test-owner + repo: test-repo +versioning: + policy: auto +changelog: + policy: auto +preReleaseCommand: "" +targets: [] +`; + await writeFile(join(tempDir, '.craft.yml'), craftConfig); + + // Create package.json + const packageJson = { name: 'test-package', version: '0.0.0' }; + await writeFile( + join(tempDir, 'package.json'), + JSON.stringify(packageJson, null, 2), + ); + + // Initial commit - NO TAG, NO CHANGELOG + await git.add('.'); + await git.commit('Initial commit'); + + // Add a feature commit + await writeFile(join(tempDir, 'feature.ts'), 'export const foo = 1;'); + await git.add('.'); + await git.commit('feat: Add foo feature'); + + // Create remote + const remoteDir = await mkdtemp(join(tmpdir(), 'craft-e2e-remote-')); + // eslint-disable-next-line no-restricted-syntax -- Test setup needs direct git access + const remoteGit = simpleGit(remoteDir); + await remoteGit.init(true); + await git.addRemote('origin', remoteDir); + const status = await git.status(); + await git.push('origin', status.current!, ['--set-upstream']); + + // Verify no CHANGELOG.md exists + expect(existsSync(join(tempDir, 'CHANGELOG.md'))).toBe(false); + + // Run prepare - should default to 0.1.0 and create CHANGELOG.md + const { stdout, stderr } = await execFileAsync( + CLI_BIN, + ['prepare', '--dry-run', '--no-input'], + { + cwd: tempDir, + env: { + ...process.env, + NODE_ENV: 'test', + GITHUB_TOKEN: 'test-token', + }, + }, + ); + + const combinedOutput = stdout + stderr; + + // Should detect first release + expect(combinedOutput).toContain('No previous releases found'); + expect(combinedOutput).toContain('Releasing version 0.1.0'); + // Should create CHANGELOG.md + expect(combinedOutput).toContain('Creating changelog file'); + }, 60000); }); diff --git a/src/commands/__tests__/validate.test.ts b/src/commands/__tests__/validate.test.ts new file mode 100644 index 00000000..0bb857c4 --- /dev/null +++ b/src/commands/__tests__/validate.test.ts @@ -0,0 +1,389 @@ +import { vi, describe, test, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtempSync, writeFileSync, mkdirSync, rmSync } from 'fs'; +import { join } from 'path'; +import { tmpdir } from 'os'; + +import { handler } from '../validate'; + +// Mock config module to control config file location +vi.mock('../../config', async importOriginal => { + const original = await importOriginal(); + return { + ...original, + findConfigFile: vi.fn(), + getConfigFileDir: vi.fn(), + }; +}); + +// Mock logger to suppress output during tests +vi.mock('../../logger', () => ({ + logger: { + info: vi.fn(), + error: vi.fn(), + warn: vi.fn(), + debug: vi.fn(), + }, +})); + +describe('validate command', () => { + let tmpDir: string; + let configModule: typeof import('../../config'); + + beforeEach(async () => { + tmpDir = mkdtempSync(join(tmpdir(), 'craft-validate-test-')); + configModule = await import('../../config'); + process.exitCode = undefined; + }); + + afterEach(() => { + rmSync(tmpDir, { recursive: true, force: true }); + vi.clearAllMocks(); + process.exitCode = undefined; + }); + + test('reports error when no config file found', async () => { + vi.mocked(configModule.findConfigFile).mockReturnValue(undefined); + + await handler({}); + + expect(process.exitCode).toBe(1); + }); + + test('validates a minimal valid config', async () => { + const configPath = join(tmpDir, '.craft.yml'); + writeFileSync( + configPath, + ` +github: + owner: getsentry + repo: craft +minVersion: "2.21.0" +targets: + - name: github +`, + ); + + vi.mocked(configModule.findConfigFile).mockReturnValue(configPath); + vi.mocked(configModule.getConfigFileDir).mockReturnValue(tmpDir); + + // Create minimal workflow directory + const workflowsDir = join(tmpDir, '.github', 'workflows'); + mkdirSync(workflowsDir, { recursive: true }); + writeFileSync( + join(workflowsDir, 'release.yml'), + ` +name: Release +on: + workflow_dispatch: +jobs: + release: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: getsentry/craft@v2 +`, + ); + + await handler({}); + + expect(process.exitCode).toBeUndefined(); + }); + + test('reports error for invalid YAML', async () => { + const configPath = join(tmpDir, '.craft.yml'); + writeFileSync(configPath, 'invalid: yaml: content: ['); + + vi.mocked(configModule.findConfigFile).mockReturnValue(configPath); + vi.mocked(configModule.getConfigFileDir).mockReturnValue(tmpDir); + + await handler({ 'skip-workflows': true }); + + expect(process.exitCode).toBe(1); + }); + + test('reports error for unknown target', async () => { + const configPath = join(tmpDir, '.craft.yml'); + writeFileSync( + configPath, + ` +github: + owner: getsentry + repo: craft +targets: + - name: unknown-target +`, + ); + + vi.mocked(configModule.findConfigFile).mockReturnValue(configPath); + vi.mocked(configModule.getConfigFileDir).mockReturnValue(tmpDir); + + await handler({ 'skip-workflows': true }); + + expect(process.exitCode).toBe(1); + }); + + test('reports error for duplicate target IDs', async () => { + const configPath = join(tmpDir, '.craft.yml'); + writeFileSync( + configPath, + ` +github: + owner: getsentry + repo: craft +targets: + - name: npm + - name: npm +`, + ); + + vi.mocked(configModule.findConfigFile).mockReturnValue(configPath); + vi.mocked(configModule.getConfigFileDir).mockReturnValue(tmpDir); + + await handler({ 'skip-workflows': true }); + + expect(process.exitCode).toBe(1); + }); + + test('allows duplicate target names with different IDs', async () => { + const configPath = join(tmpDir, '.craft.yml'); + writeFileSync( + configPath, + ` +github: + owner: getsentry + repo: craft +minVersion: "2.21.0" +targets: + - name: npm + id: npm-main + - name: npm + id: npm-secondary +`, + ); + + vi.mocked(configModule.findConfigFile).mockReturnValue(configPath); + vi.mocked(configModule.getConfigFileDir).mockReturnValue(tmpDir); + + await handler({ 'skip-workflows': true }); + + // Should not have errors (only warnings about missing workflows) + expect(process.exitCode).toBeUndefined(); + }); + + test('reports error for invalid regex pattern', async () => { + const configPath = join(tmpDir, '.craft.yml'); + writeFileSync( + configPath, + ` +github: + owner: getsentry + repo: craft +targets: + - name: github + includeNames: "[invalid-regex" +`, + ); + + vi.mocked(configModule.findConfigFile).mockReturnValue(configPath); + vi.mocked(configModule.getConfigFileDir).mockReturnValue(tmpDir); + + await handler({ 'skip-workflows': true }); + + expect(process.exitCode).toBe(1); + }); + + test('warns about deprecated changelogPolicy field', async () => { + const configPath = join(tmpDir, '.craft.yml'); + writeFileSync( + configPath, + ` +github: + owner: getsentry + repo: craft +minVersion: "2.21.0" +changelogPolicy: auto +targets: + - name: github +`, + ); + + vi.mocked(configModule.findConfigFile).mockReturnValue(configPath); + vi.mocked(configModule.getConfigFileDir).mockReturnValue(tmpDir); + + // Create workflow directory to avoid workflow warnings + const workflowsDir = join(tmpDir, '.github', 'workflows'); + mkdirSync(workflowsDir, { recursive: true }); + writeFileSync( + join(workflowsDir, 'release.yml'), + ` +name: Release +on: + workflow_dispatch: +jobs: + release: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: getsentry/craft@v2 +`, + ); + + await handler({}); + + // Deprecated fields generate warnings, not errors + expect(process.exitCode).toBeUndefined(); + }); + + test('warns about missing minVersion', async () => { + const configPath = join(tmpDir, '.craft.yml'); + writeFileSync( + configPath, + ` +github: + owner: getsentry + repo: craft +targets: + - name: github +`, + ); + + vi.mocked(configModule.findConfigFile).mockReturnValue(configPath); + vi.mocked(configModule.getConfigFileDir).mockReturnValue(tmpDir); + + await handler({ 'skip-workflows': true }); + + // Missing minVersion is a warning, not an error + expect(process.exitCode).toBeUndefined(); + }); + + test('warns about missing workflows directory', async () => { + const configPath = join(tmpDir, '.craft.yml'); + writeFileSync( + configPath, + ` +github: + owner: getsentry + repo: craft +minVersion: "2.21.0" +targets: + - name: github +`, + ); + + vi.mocked(configModule.findConfigFile).mockReturnValue(configPath); + vi.mocked(configModule.getConfigFileDir).mockReturnValue(tmpDir); + + await handler({}); + + // Missing workflows is a warning, not an error + expect(process.exitCode).toBeUndefined(); + }); + + test('skips workflow validation with --skip-workflows', async () => { + const configPath = join(tmpDir, '.craft.yml'); + writeFileSync( + configPath, + ` +github: + owner: getsentry + repo: craft +minVersion: "2.21.0" +targets: + - name: github +`, + ); + + vi.mocked(configModule.findConfigFile).mockReturnValue(configPath); + vi.mocked(configModule.getConfigFileDir).mockReturnValue(tmpDir); + + await handler({ 'skip-workflows': true }); + + expect(process.exitCode).toBeUndefined(); + }); + + test('warns about missing fetch-depth in release workflow', async () => { + const configPath = join(tmpDir, '.craft.yml'); + writeFileSync( + configPath, + ` +github: + owner: getsentry + repo: craft +minVersion: "2.21.0" +targets: + - name: github +`, + ); + + vi.mocked(configModule.findConfigFile).mockReturnValue(configPath); + vi.mocked(configModule.getConfigFileDir).mockReturnValue(tmpDir); + + const workflowsDir = join(tmpDir, '.github', 'workflows'); + mkdirSync(workflowsDir, { recursive: true }); + writeFileSync( + join(workflowsDir, 'release.yml'), + ` +name: Release +on: + workflow_dispatch: +jobs: + release: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: getsentry/craft@v2 +`, + ); + + await handler({}); + + // Missing fetch-depth is a warning + expect(process.exitCode).toBeUndefined(); + }); + + test('warns about workflow not using Craft action', async () => { + const configPath = join(tmpDir, '.craft.yml'); + writeFileSync( + configPath, + ` +github: + owner: getsentry + repo: craft +minVersion: "2.21.0" +targets: + - name: github +`, + ); + + vi.mocked(configModule.findConfigFile).mockReturnValue(configPath); + vi.mocked(configModule.getConfigFileDir).mockReturnValue(tmpDir); + + const workflowsDir = join(tmpDir, '.github', 'workflows'); + mkdirSync(workflowsDir, { recursive: true }); + writeFileSync( + join(workflowsDir, 'release.yml'), + ` +name: Release +on: + workflow_dispatch: +jobs: + release: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - run: npm run release +`, + ); + + await handler({}); + + // Not using Craft action is a warning + expect(process.exitCode).toBeUndefined(); + }); +}); diff --git a/src/commands/init.ts b/src/commands/init.ts new file mode 100644 index 00000000..89076b1a --- /dev/null +++ b/src/commands/init.ts @@ -0,0 +1,279 @@ +import { existsSync, mkdirSync, writeFileSync } from 'fs'; +import { join } from 'path'; +import prompts from 'prompts'; +import { Argv } from 'yargs'; + +import { logger } from '../logger'; +import { CONFIG_FILE_NAME, findConfigFile, getConfigFileDir } from '../config'; +import { TARGET_MAP } from '../targets'; +import { BaseTarget } from '../targets/base'; +import { + DetectionContext, + DetectionResult, + RequiredSecret, + WorkflowSetup, +} from '../utils/detection'; +import { + generateCraftConfig, + generateReleaseWorkflow, + generateChangelogPreviewWorkflow, + TemplateContext, +} from '../utils/templates'; +import { createGitClient, getGitHubInfoFromRemote } from '../utils/git'; +import { isDryRun, hasInput } from '../utils/helpers'; + +export const command = ['init']; +export const description = 'Initialize Craft configuration for a new project'; + +interface InitArgs { + 'skip-workflows'?: boolean; + force?: boolean; +} + +export const builder = (yargs: Argv) => + yargs + .option('skip-workflows', { + describe: 'Skip generating GitHub Actions workflow files', + type: 'boolean', + default: false, + }) + .option('force', { + describe: 'Overwrite existing files', + type: 'boolean', + default: false, + }); + +/** + * Detect all applicable targets for the project + */ +async function detectTargets( + context: DetectionContext, +): Promise { + const results: DetectionResult[] = []; + + for (const [_name, TargetClass] of Object.entries(TARGET_MAP)) { + // Check if the target class has a detect method + if (typeof (TargetClass as typeof BaseTarget).detect === 'function') { + try { + const result = await (TargetClass as typeof BaseTarget).detect!( + context, + ); + if (result) { + results.push(result); + } + } catch (error) { + logger.debug(`Error detecting target ${_name}:`, error); + } + } + } + + // Sort by priority (lower priority values first, GitHub last) + results.sort((a, b) => a.priority - b.priority); + + return results; +} + +/** + * Format detected targets for display + */ +function formatDetectedTargets(results: DetectionResult[]): string { + return results + .map(r => { + const extras = Object.entries(r.config) + .filter(([k]) => k !== 'name') + .map(([k, v]) => `${k}=${v}`) + .join(', '); + return extras + ? ` - ${r.config.name} (${extras})` + : ` - ${r.config.name}`; + }) + .join('\n'); +} + +export async function handler(args: InitArgs = {}): Promise { + const rootDir = getConfigFileDir() || process.cwd(); + const existingConfig = findConfigFile(); + + // Check for existing config + if (existingConfig && !args.force) { + logger.error(`Configuration file already exists: ${existingConfig}`); + logger.info('Use --force to overwrite existing files'); + process.exitCode = 1; + return; + } + + logger.info('Detecting project type...'); + + // Detect GitHub info + const git = createGitClient(rootDir); + let githubInfo: Awaited> = null; + try { + githubInfo = await getGitHubInfoFromRemote(git); + } catch (error) { + logger.debug('Error detecting GitHub info:', error); + } + if (githubInfo) { + logger.info( + `✓ Found GitHub repository: ${githubInfo.owner}/${githubInfo.repo}`, + ); + } else { + logger.warn('Could not detect GitHub repository from git remote'); + } + + // Build detection context + const context: DetectionContext = { + rootDir, + githubOwner: githubInfo?.owner, + githubRepo: githubInfo?.repo, + }; + + // Detect targets + const detectedTargets = await detectTargets(context); + + if (detectedTargets.length === 0) { + logger.warn('No publishable targets detected'); + logger.info('You can manually configure targets in .craft.yml'); + } else { + logger.info(`✓ Detected ${detectedTargets.length} target(s):`); + console.log(formatDetectedTargets(detectedTargets)); + } + + // Aggregate workflow setup from detected targets + const workflowSetup: WorkflowSetup = {}; + for (const target of detectedTargets) { + if (target.workflowSetup?.node && !workflowSetup.node) { + workflowSetup.node = target.workflowSetup.node; + logger.info( + `✓ Detected Node.js project (${workflowSetup.node.packageManager})`, + ); + } + if (target.workflowSetup?.python && !workflowSetup.python) { + workflowSetup.python = target.workflowSetup.python; + logger.info( + `✓ Detected Python project${workflowSetup.python.version ? ` (${workflowSetup.python.version})` : ''}`, + ); + } + } + + // Aggregate required secrets from detected targets + const requiredSecrets: RequiredSecret[] = []; + const seenSecrets = new Set(); + for (const target of detectedTargets) { + for (const secret of target.requiredSecrets || []) { + if (!seenSecrets.has(secret.name)) { + seenSecrets.add(secret.name); + requiredSecrets.push(secret); + } + } + } + + // Build template context + const templateContext: TemplateContext = { + githubOwner: githubInfo?.owner || 'YOUR_ORG', + githubRepo: githubInfo?.repo || 'YOUR_REPO', + targets: detectedTargets.map(r => r.config), + workflowSetup: + workflowSetup.node || workflowSetup.python ? workflowSetup : undefined, + }; + + // Generate config preview + const craftConfig = generateCraftConfig(templateContext); + + console.log('\nProposed .craft.yml:'); + console.log('─'.repeat(40)); + console.log(craftConfig); + console.log('─'.repeat(40)); + + // Ask for confirmation + if (hasInput() && !isDryRun()) { + const { proceed } = await prompts({ + type: 'confirm', + name: 'proceed', + message: 'Create .craft.yml?', + initial: true, + }); + + if (!proceed) { + logger.info('Aborted'); + return; + } + } + + // Write .craft.yml + const craftConfigPath = join(rootDir, CONFIG_FILE_NAME); + if (isDryRun()) { + logger.info(`[dry-run] Would create ${craftConfigPath}`); + } else { + writeFileSync(craftConfigPath, craftConfig); + logger.info(`✓ Created ${craftConfigPath}`); + } + + // Generate workflows + if (!args['skip-workflows'] && githubInfo) { + const workflowsDir = join(rootDir, '.github', 'workflows'); + + // Ask for confirmation for workflows + let createWorkflows = true; + if (hasInput() && !isDryRun()) { + const { proceed } = await prompts({ + type: 'confirm', + name: 'proceed', + message: 'Generate GitHub Actions workflows?', + initial: true, + }); + createWorkflows = proceed; + } + + if (createWorkflows) { + if (isDryRun()) { + logger.info('[dry-run] Would create GitHub Actions workflows'); + } else { + // Ensure workflows directory exists + if (!existsSync(workflowsDir)) { + mkdirSync(workflowsDir, { recursive: true }); + } + + // Generate release workflow + const releaseWorkflow = generateReleaseWorkflow(templateContext); + const releaseWorkflowPath = join(workflowsDir, 'release.yml'); + if (!existsSync(releaseWorkflowPath) || args.force) { + writeFileSync(releaseWorkflowPath, releaseWorkflow); + logger.info(`✓ Created ${releaseWorkflowPath}`); + } else { + logger.info(`Skipped ${releaseWorkflowPath} (already exists)`); + } + + // Generate changelog preview workflow + const changelogWorkflow = generateChangelogPreviewWorkflow(); + const changelogWorkflowPath = join( + workflowsDir, + 'changelog-preview.yml', + ); + if (!existsSync(changelogWorkflowPath) || args.force) { + writeFileSync(changelogWorkflowPath, changelogWorkflow); + logger.info(`✓ Created ${changelogWorkflowPath}`); + } else { + logger.info(`Skipped ${changelogWorkflowPath} (already exists)`); + } + } + } + } + + logger.info('\nDone! Next steps:'); + logger.info('1. Review the generated configuration'); + if (requiredSecrets.length > 0) { + logger.info('2. Set up required secrets in your GitHub repository:'); + for (const secret of requiredSecrets) { + logger.info(` - ${secret.name}: ${secret.description}`); + } + logger.info( + '3. Configure publishing in your publish repository (see docs for details)', + ); + logger.info('4. Run `craft validate` to verify your configuration'); + } else { + logger.info( + '2. Configure publishing in your publish repository (see docs for details)', + ); + logger.info('3. Run `craft validate` to verify your configuration'); + } +} diff --git a/src/commands/prepare.ts b/src/commands/prepare.ts index 5ab3a7c4..3adea6b4 100644 --- a/src/commands/prepare.ts +++ b/src/commands/prepare.ts @@ -15,6 +15,7 @@ import { loadConfigurationFromString, CONFIG_FILE_NAME, getVersioningPolicy, + getChangelogConfig, } from '../config'; import { logger } from '../logger'; import { @@ -76,6 +77,9 @@ const AUTO_VERSION_MIN_VERSION = '2.14.0'; /** Minimum craft version required for automatic version bumping from targets */ const AUTO_BUMP_MIN_VERSION = '2.21.0'; +/** Default bump type for first release when using auto-versioning */ +const DEFAULT_FIRST_RELEASE_BUMP: BumpType = 'minor'; + export const builder: CommandBuilder = (yargs: Argv) => yargs .positional('NEW-VERSION', { @@ -662,18 +666,33 @@ async function resolveVersion( } const latestTag = await getLatestTag(git); + const isFirstRelease = !latestTag; + + if (isFirstRelease) { + logger.info( + `No previous releases found. This appears to be the first release.`, + ); + } // Determine bump type - either from arg or from commit analysis let bumpType: BumpType; if (version === 'auto') { - const changelogResult = await getChangelogWithBumpType(git, latestTag); - validateBumpType(changelogResult); - bumpType = changelogResult.bumpType; + if (isFirstRelease) { + // For first release with auto, default to minor bump (0.0.0 -> 0.1.0) + logger.info( + `Using default bump type for first release: ${DEFAULT_FIRST_RELEASE_BUMP}`, + ); + bumpType = DEFAULT_FIRST_RELEASE_BUMP; + } else { + const changelogResult = await getChangelogWithBumpType(git, latestTag); + validateBumpType(changelogResult); + bumpType = changelogResult.bumpType; + } } else { bumpType = version as BumpType; } - // Calculate new version from latest tag + // Calculate new version from latest tag (or 0.0.0 for first release) const currentVersion = latestTag && latestTag.replace(/^v/, '').match(/^\d/) ? latestTag.replace(/^v/, '') @@ -769,23 +788,14 @@ export async function prepareMain(argv: PrepareOptions): Promise { const oldVersion = await getLatestTag(git); // Check & update the changelog - // Extract changelog path from config (can be string or object) - const changelogPath = - typeof config.changelog === 'string' - ? config.changelog - : config.changelog?.filePath; - // Get policy from new format or legacy changelogPolicy - const changelogPolicy = ( - typeof config.changelog === 'object' && config.changelog?.policy - ? config.changelog.policy - : config.changelogPolicy - ) as ChangelogPolicy | undefined; + // Use getChangelogConfig() to apply smart defaults (auto for minVersion >= 2.21.0) + const changelogConfig = getChangelogConfig(); const changelogBody = await prepareChangelog( git, oldVersion, newVersion, - argv.noChangelog ? ChangelogPolicy.None : changelogPolicy, - changelogPath, + argv.noChangelog ? ChangelogPolicy.None : changelogConfig.policy, + changelogConfig.filePath, ); // Run a pre-release script (e.g. for version bumping) diff --git a/src/commands/validate.ts b/src/commands/validate.ts new file mode 100644 index 00000000..39c89951 --- /dev/null +++ b/src/commands/validate.ts @@ -0,0 +1,369 @@ +import { existsSync, readFileSync, readdirSync } from 'fs'; +import { join } from 'path'; +import { load } from 'js-yaml'; +import { Argv } from 'yargs'; + +import { logger } from '../logger'; +import { + CONFIG_FILE_NAME, + SMART_DEFAULTS_MIN_VERSION, + findConfigFile, + getConfigFileDir, + validateConfiguration, +} from '../config'; +import { getAllTargetNames } from '../targets'; +import { stringToRegexp } from '../utils/filters'; +import { ConfigurationError } from '../utils/errors'; +import { parseVersion, versionGreaterOrEqualThan } from '../utils/version'; + +export const command = ['validate']; +export const description = 'Validate Craft configuration and workflows'; + +interface ValidateArgs { + 'skip-workflows'?: boolean; +} + +export const builder = (yargs: Argv) => + yargs.option('skip-workflows', { + describe: 'Skip validating GitHub Actions workflow files', + type: 'boolean', + default: false, + }); + +interface ValidationIssue { + level: 'error' | 'warning'; + message: string; + file?: string; + line?: number; +} + +/** + * Validate the .craft.yml configuration file + */ +function validateCraftConfig(configPath: string): ValidationIssue[] { + const issues: ValidationIssue[] = []; + + // Read raw config + let rawConfig: Record; + try { + const content = readFileSync(configPath, 'utf-8'); + rawConfig = load(content) as Record; + } catch (error) { + issues.push({ + level: 'error', + message: `Failed to parse YAML: ${error instanceof Error ? error.message : String(error)}`, + file: configPath, + }); + return issues; + } + + // Validate schema + try { + validateConfiguration(rawConfig); + } catch (error) { + if (error instanceof ConfigurationError) { + issues.push({ + level: 'error', + message: error.message, + file: configPath, + }); + return issues; + } + throw error; + } + + // Validate targets + const validTargetNames = new Set(getAllTargetNames()); + const targets = + (rawConfig.targets as Array<{ name: string; id?: string }>) || []; + const seenIds = new Set(); + + for (const target of targets) { + if (!target.name) { + issues.push({ + level: 'error', + message: 'Target missing required "name" field', + file: configPath, + }); + continue; + } + + if (!validTargetNames.has(target.name)) { + issues.push({ + level: 'error', + message: `Unknown target "${target.name}". Valid targets: ${Array.from(validTargetNames).join(', ')}`, + file: configPath, + }); + } + + // Check for duplicate IDs + const id = target.id || target.name; + if (seenIds.has(id)) { + issues.push({ + level: 'error', + message: `Duplicate target ID "${id}". Use the "id" field to distinguish multiple targets of the same type.`, + file: configPath, + }); + } + seenIds.add(id); + } + + // Validate regex patterns + const regexFields = ['includeNames', 'excludeNames'] as const; + for (const target of targets) { + for (const field of regexFields) { + const value = target[field as keyof typeof target]; + if (typeof value === 'string') { + try { + stringToRegexp(value); + } catch { + issues.push({ + level: 'error', + message: `Invalid regex pattern in target "${target.name}": ${field}="${value}"`, + file: configPath, + }); + } + } + } + } + + // Validate requireNames patterns + const requireNames = rawConfig.requireNames as string[] | undefined; + if (requireNames) { + for (const pattern of requireNames) { + try { + stringToRegexp(pattern); + } catch { + issues.push({ + level: 'error', + message: `Invalid regex pattern in requireNames: "${pattern}"`, + file: configPath, + }); + } + } + } + + // Check for deprecated fields + if (rawConfig.changelogPolicy !== undefined) { + issues.push({ + level: 'warning', + message: + 'The "changelogPolicy" field is deprecated. Use "changelog.policy" instead.', + file: configPath, + }); + } + + // Recommend minVersion >= SMART_DEFAULTS_MIN_VERSION for smart defaults + const minVersion = rawConfig.minVersion as string | undefined; + if (!minVersion) { + issues.push({ + level: 'warning', + message: `Consider adding minVersion: "${SMART_DEFAULTS_MIN_VERSION}" to enable smart defaults`, + file: configPath, + }); + } else { + const parsedMinVersion = parseVersion(minVersion); + const parsedSmartDefaultsVersion = parseVersion(SMART_DEFAULTS_MIN_VERSION); + if ( + parsedMinVersion && + parsedSmartDefaultsVersion && + !versionGreaterOrEqualThan(parsedMinVersion, parsedSmartDefaultsVersion) + ) { + issues.push({ + level: 'warning', + message: `Consider updating minVersion to "${SMART_DEFAULTS_MIN_VERSION}" or later for smart defaults`, + file: configPath, + }); + } + } + + return issues; +} + +/** + * Validate GitHub Actions workflow files + * + * Scans all workflow files and validates those that use the Craft action. + */ +function validateWorkflows(rootDir: string): ValidationIssue[] { + const issues: ValidationIssue[] = []; + const workflowsDir = join(rootDir, '.github', 'workflows'); + + if (!existsSync(workflowsDir)) { + issues.push({ + level: 'warning', + message: + 'No .github/workflows directory found. Consider running `craft init` to generate workflows.', + }); + return issues; + } + + // Scan all workflow files + let workflowFiles: string[]; + try { + workflowFiles = readdirSync(workflowsDir).filter( + f => f.endsWith('.yml') || f.endsWith('.yaml'), + ); + } catch { + issues.push({ + level: 'error', + message: 'Failed to read workflows directory', + file: workflowsDir, + }); + return issues; + } + + if (workflowFiles.length === 0) { + issues.push({ + level: 'warning', + message: + 'No workflow files found. Consider running `craft init` to generate workflows.', + file: workflowsDir, + }); + return issues; + } + + // Validate each workflow that uses Craft + let craftWorkflowCount = 0; + for (const file of workflowFiles) { + const filePath = join(workflowsDir, file); + const workflowIssues = validateCraftWorkflow(filePath); + + // Only count and report issues for workflows that use Craft + if (workflowIssues.usesCraft) { + craftWorkflowCount++; + issues.push(...workflowIssues.issues); + } + } + + if (craftWorkflowCount === 0) { + issues.push({ + level: 'warning', + message: + 'No workflows using getsentry/craft action found. Consider running `craft init` to generate workflows.', + file: workflowsDir, + }); + } + + return issues; +} + +/** + * Validate a workflow file that may use Craft + */ +function validateCraftWorkflow(filePath: string): { + usesCraft: boolean; + issues: ValidationIssue[]; +} { + const issues: ValidationIssue[] = []; + + let content: string; + try { + content = readFileSync(filePath, 'utf-8'); + // Parse to validate YAML syntax + load(content); + } catch { + // If we can't parse the file, skip it (might not be a valid YAML) + return { usesCraft: false, issues: [] }; + } + + // Check if this workflow uses Craft + const usesCraft = content.includes('getsentry/craft'); + if (!usesCraft) { + return { usesCraft: false, issues: [] }; + } + + // Check if workflow uses reusable workflow vs composite action + const usesReusableWorkflow = content.includes( + 'getsentry/craft/.github/workflows/', + ); + const usesCompositeAction = /uses:\s*['"]?getsentry\/craft@/.test(content); + + // If only using reusable workflow (e.g., changelog-preview.yml), skip validation + // Reusable workflows are self-contained and handle their own checkout + if (usesReusableWorkflow && !usesCompositeAction) { + return { usesCraft: true, issues: [] }; + } + + // Check for proper checkout with fetch-depth (only for composite action usage) + const hasFetchDepth = + content.includes('fetch-depth: 0') || content.includes('fetch-depth: "0"'); + if (!hasFetchDepth) { + issues.push({ + level: 'warning', + message: + 'Checkout step should use "fetch-depth: 0" for Craft to access full git history', + file: filePath, + }); + } + + return { usesCraft: true, issues }; +} + +export async function handler(args: ValidateArgs = {}): Promise { + const configPath = findConfigFile(); + + if (!configPath) { + logger.error( + `No ${CONFIG_FILE_NAME} found. Run \`craft init\` to create one.`, + ); + process.exitCode = 1; + return; + } + + const rootDir = getConfigFileDir() || process.cwd(); + const issues: ValidationIssue[] = []; + + logger.info(`Validating ${configPath}...`); + issues.push(...validateCraftConfig(configPath)); + + if (!args['skip-workflows']) { + logger.info('Validating GitHub workflows...'); + issues.push(...validateWorkflows(rootDir)); + } + + // Report results + const errors = issues.filter(i => i.level === 'error'); + const warnings = issues.filter(i => i.level === 'warning'); + + console.log(''); + + if (errors.length > 0) { + console.log('Errors:'); + for (const issue of errors) { + const location = issue.file + ? issue.line + ? `${issue.file}:${issue.line}` + : issue.file + : ''; + console.log(` ✗ ${issue.message}${location ? ` (${location})` : ''}`); + } + console.log(''); + } + + if (warnings.length > 0) { + console.log('Warnings:'); + for (const issue of warnings) { + const location = issue.file + ? issue.line + ? `${issue.file}:${issue.line}` + : issue.file + : ''; + console.log(` ⚠ ${issue.message}${location ? ` (${location})` : ''}`); + } + console.log(''); + } + + // Summary + if (errors.length === 0 && warnings.length === 0) { + logger.info('✓ Configuration is valid'); + } else { + logger.info( + `Found ${errors.length} error(s) and ${warnings.length} warning(s)`, + ); + } + + if (errors.length > 0) { + process.exitCode = 1; + } +} diff --git a/src/config.ts b/src/config.ts index 723fd05b..0df1ab85 100644 --- a/src/config.ts +++ b/src/config.ts @@ -2,8 +2,7 @@ import { existsSync, lstatSync, readFileSync } from 'fs'; import path from 'path'; import { load } from 'js-yaml'; -import GitUrlParse from 'git-url-parse'; -import { createGitClient } from './utils/git'; +import { createGitClient, getGitHubInfoFromRemote } from './utils/git'; import { ZodError } from 'zod'; import { logger } from './logger'; @@ -248,6 +247,9 @@ export function requiresMinVersion(requiredVersion: string): boolean { /** Minimum craft version required for auto-versioning and CalVer */ const AUTO_VERSION_MIN_VERSION = '2.14.0'; +/** Minimum craft version required for smart defaults (auto changelog, etc.) */ +export const SMART_DEFAULTS_MIN_VERSION = '2.21.0'; + /** * Returns the effective versioning policy for the project. * @@ -301,24 +303,11 @@ export async function getGlobalGitHubConfig( if (!repoGitHubConfig) { const configDir = getConfigFileDir() || '.'; const git = createGitClient(configDir); - let remoteUrl; try { - const remotes = await git.getRemotes(true); - const defaultRemote = - remotes.find(remote => remote.name === 'origin') || remotes[0]; - remoteUrl = - defaultRemote && - GitUrlParse(defaultRemote.refs.push || defaultRemote.refs.fetch); + repoGitHubConfig = await getGitHubInfoFromRemote(git); } catch (error) { logger.warn('Error when trying to get git remotes: ', error); } - - if (remoteUrl?.source === 'github.com') { - repoGitHubConfig = { - owner: remoteUrl.owner, - repo: remoteUrl.name, - }; - } } _globalGitHubConfigCache = Object.freeze(repoGitHubConfig); @@ -424,13 +413,18 @@ const DEFAULT_CHANGELOG_FILE_PATH = 'CHANGELOG.md'; * * Handles both legacy `changelogPolicy` and new `changelog` object format. * Emits deprecation warning when using `changelogPolicy`. + * + * Smart defaults (when minVersion >= 2.21.0): + * - policy defaults to 'auto' instead of 'none' */ export function getChangelogConfig(): NormalizedChangelogConfig { const config = getConfiguration(); - // Default values + // Default values - use smart defaults for minVersion >= 2.21.0 let filePath = DEFAULT_CHANGELOG_FILE_PATH; - let policy = ChangelogPolicy.None; + let policy = requiresMinVersion(SMART_DEFAULTS_MIN_VERSION) + ? ChangelogPolicy.Auto + : ChangelogPolicy.None; let scopeGrouping = true; // Handle legacy changelogPolicy (deprecated) diff --git a/src/index.ts b/src/index.ts index d3cfeefe..96a2a645 100644 --- a/src/index.ts +++ b/src/index.ts @@ -18,6 +18,8 @@ import * as targets from './commands/targets'; import * as config from './commands/config'; import * as artifacts from './commands/artifacts'; import * as changelog from './commands/changelog'; +import * as init from './commands/init'; +import * as validate from './commands/validate'; function printVersion(): void { if (!process.argv.includes('-v') && !process.argv.includes('--version')) { @@ -87,6 +89,8 @@ async function main(): Promise { .command(config) .command(artifacts) .command(changelog) + .command(init) + .command(validate) .demandCommand() .version(getPackageVersion()) .alias('v', 'version') diff --git a/src/targets/base.ts b/src/targets/base.ts index 7fb08e45..0f6b88ad 100644 --- a/src/targets/base.ts +++ b/src/targets/base.ts @@ -10,6 +10,7 @@ import { BaseArtifactProvider, RemoteArtifact, } from '../artifact_providers/base'; +import { DetectionContext, DetectionResult } from '../utils/detection'; /** * Base class for all remote targets @@ -32,10 +33,23 @@ export class BaseTarget { : target.name || '__undefined__'; } + /** + * Detect if this target applies to the given project. + * + * This static method is called during `craft init` to automatically + * discover which targets should be configured for a project. + * + * @param _context Detection context with project information + * @returns Detection result with config and priority, or null if not applicable + */ + public static detect?( + _context: DetectionContext, + ): Promise | DetectionResult | null; + public constructor( config: TargetConfig, artifactProvider: BaseArtifactProvider, - githubRepo?: GitHubGlobalConfig + githubRepo?: GitHubGlobalConfig, ) { this.logger = loggerRaw.withScope(`[target/${config.name}]`); this.artifactProvider = artifactProvider; @@ -45,12 +59,12 @@ export class BaseTarget { this.filterOptions = {}; if (this.config.includeNames) { this.filterOptions.includeNames = stringToRegexp( - this.config.includeNames + this.config.includeNames, ); } if (this.config.excludeNames) { this.filterOptions.excludeNames = stringToRegexp( - this.config.excludeNames + this.config.excludeNames, ); } } @@ -64,7 +78,7 @@ export class BaseTarget { public async publish( _version: string, - _revision: string + _revision: string, ): Promise { throw new Error('Not implemented'); return; @@ -80,7 +94,7 @@ export class BaseTarget { */ public async getArtifactsForRevision( revision: string, - defaultFilterOptions: RawFilterOptions = {} + defaultFilterOptions: RawFilterOptions = {}, ): Promise { const filterOptions = { ...parseFilterOptions(defaultFilterOptions), @@ -88,12 +102,12 @@ export class BaseTarget { }; this.logger.debug( `Getting artifact list for revision "${revision}", filtering options: {includeNames: ${String( - filterOptions.includeNames - )}, excludeNames:${String(filterOptions.excludeNames)}}` + filterOptions.includeNames, + )}, excludeNames:${String(filterOptions.excludeNames)}}`, ); return this.artifactProvider.filterArtifactsForRevision( revision, - filterOptions + filterOptions, ); } } diff --git a/src/targets/crates.ts b/src/targets/crates.ts index 6eb1596c..cbdc7097 100644 --- a/src/targets/crates.ts +++ b/src/targets/crates.ts @@ -15,6 +15,12 @@ import { } from '../utils/system'; import { BaseTarget } from './base'; import { BaseArtifactProvider } from '../artifact_providers/base'; +import { + DetectionContext, + DetectionResult, + fileExists, + readTextFile, +} from '../utils/detection'; /** Cargo executable configuration */ const CARGO_CONFIG = { @@ -115,6 +121,9 @@ export class CratesTarget extends BaseTarget { /** GitHub repo configuration */ public readonly githubRepo: GitHubGlobalConfig; + /** Priority for ordering in config (package registries appear first) */ + public static readonly priority = 30; + /** * Bump version in Cargo.toml using cargo set-version (from cargo-edit). * @@ -152,6 +161,49 @@ export class CratesTarget extends BaseTarget { return true; } + /** + * Detect if this project should use the crates target. + * + * Checks for Cargo.toml with package definition. + */ + public static detect(context: DetectionContext): DetectionResult | null { + const { rootDir } = context; + + // Check for Cargo.toml + if (!fileExists(rootDir, 'Cargo.toml')) { + return null; + } + + const content = readTextFile(rootDir, 'Cargo.toml'); + if (!content) { + return null; + } + + const result: DetectionResult = { + config: { name: 'crates' }, + priority: CratesTarget.priority, + requiredSecrets: [ + { + name: 'CRATES_IO_TOKEN', + description: 'crates.io API token for publishing', + }, + ], + }; + + // Check if it has a [package] section (indicates a crate) + // Workspace-only Cargo.toml files may not have [package] + if (content.includes('[package]')) { + return result; + } + + // Check for workspace with members + if (content.includes('[workspace]') && content.includes('members')) { + return result; + } + + return null; + } + public constructor( config: TargetConfig, artifactProvider: BaseArtifactProvider, diff --git a/src/targets/docker.ts b/src/targets/docker.ts index 5309b0b1..3ee5801f 100644 --- a/src/targets/docker.ts +++ b/src/targets/docker.ts @@ -8,6 +8,11 @@ import { ConfigurationError } from '../utils/errors'; import { renderTemplateSafe } from '../utils/strings'; import { checkExecutableIsPresent, spawnProcess } from '../utils/system'; import { BaseTarget } from './base'; +import { + DetectionContext, + DetectionResult, + fileExists, +} from '../utils/detection'; const DEFAULT_DOCKER_BIN = 'docker'; @@ -17,7 +22,11 @@ const DEFAULT_DOCKER_BIN = 'docker'; const DOCKER_BIN = process.env.DOCKER_BIN || DEFAULT_DOCKER_BIN; /** Docker Hub registry hostnames that should be treated as the default registry */ -const DOCKER_HUB_REGISTRIES = ['docker.io', 'index.docker.io', 'registry-1.docker.io']; +const DOCKER_HUB_REGISTRIES = [ + 'docker.io', + 'index.docker.io', + 'registry-1.docker.io', +]; /** * Google Cloud registry patterns. @@ -68,7 +77,7 @@ export function hasGcloudCredentials(): boolean { homedir(), '.config', 'gcloud', - 'application_default_credentials.json' + 'application_default_credentials.json', ); if (existsSync(defaultAdcPath)) { return true; @@ -188,14 +197,14 @@ const LEGACY_KEYS: Record<'source' | 'target', LegacyConfigKeys> = { */ export function normalizeImageRef( config: Record, - type: 'source' | 'target' + type: 'source' | 'target', ): ImageRefConfig { const ref = config[type] as ImageRef; // Validate that the required field is present if (ref === undefined || ref === null) { throw new ConfigurationError( - `Docker target requires a '${type}' property. Please specify the ${type} image.` + `Docker target requires a '${type}' property. Please specify the ${type} image.`, ); } @@ -258,9 +267,58 @@ export class DockerTarget extends BaseTarget { /** Target options */ public readonly dockerConfig: DockerTargetOptions; + /** Priority for ordering in config (storage/CDN targets) */ + public static readonly priority = 110; + + /** + * Detect if this project should use the docker target. + * + * Checks for Dockerfile in the root directory. + */ + public static detect(context: DetectionContext): DetectionResult | null { + const { rootDir, githubOwner, githubRepo } = context; + + // Check for Dockerfile + if (!fileExists(rootDir, 'Dockerfile')) { + return null; + } + + const config: TargetConfig = { name: 'docker' }; + + // If we have GitHub info, suggest ghcr.io as source + // Otherwise, default to Docker Hub with placeholders + if (githubOwner && githubRepo) { + config.source = `ghcr.io/${githubOwner}/${githubRepo}`; + config.target = `${githubOwner}/${githubRepo}`; + } else { + // Default to Docker Hub - user must fill in their username/repo + config.source = 'YOUR_DOCKERHUB_USERNAME/YOUR_REPO'; + config.target = 'YOUR_DOCKERHUB_USERNAME/YOUR_REPO'; + } + + return { + config, + priority: DockerTarget.priority, + // ghcr.io uses GITHUB_TOKEN, Docker Hub needs explicit credentials + requiredSecrets: + githubOwner && githubRepo + ? [] + : [ + { + name: 'DOCKER_USERNAME', + description: 'Docker Hub username', + }, + { + name: 'DOCKER_PASSWORD', + description: 'Docker Hub password or access token', + }, + ], + }; + } + public constructor( config: TargetConfig, - artifactProvider: BaseArtifactProvider + artifactProvider: BaseArtifactProvider, ) { super(config, artifactProvider); this.dockerConfig = this.getDockerConfig(); @@ -292,7 +350,7 @@ export class DockerTarget extends BaseTarget { usernameVar?: string, passwordVar?: string, required = true, - useDefaultFallback = true + useDefaultFallback = true, ): RegistryCredentials | undefined { let username: string | undefined; let password: string | undefined; @@ -301,7 +359,7 @@ export class DockerTarget extends BaseTarget { if (usernameVar || passwordVar) { if (!usernameVar || !passwordVar) { throw new ConfigurationError( - 'Both usernameVar and passwordVar must be specified together' + 'Both usernameVar and passwordVar must be specified together', ); } username = process.env[usernameVar]; @@ -310,7 +368,7 @@ export class DockerTarget extends BaseTarget { if (!username || !password) { if (required) { throw new ConfigurationError( - `Missing credentials: ${usernameVar} and/or ${passwordVar} environment variable(s) not set` + `Missing credentials: ${usernameVar} and/or ${passwordVar} environment variable(s) not set`, ); } return undefined; @@ -334,7 +392,10 @@ export class DockerTarget extends BaseTarget { // GITHUB_API_TOKEN is used by getsentry/publish workflow with release bot token // x-access-token works with GitHub App installation tokens and PATs username = username || process.env.GITHUB_ACTOR || 'x-access-token'; - password = password || process.env.GITHUB_TOKEN || process.env.GITHUB_API_TOKEN; + password = + password || + process.env.GITHUB_TOKEN || + process.env.GITHUB_API_TOKEN; } } @@ -351,12 +412,12 @@ export class DockerTarget extends BaseTarget { const registryHint = registry ? `DOCKER_${registryToEnvPrefix(registry)}_USERNAME/PASSWORD or ` : ''; - throw new ConfigurationError( - `Cannot perform Docker release: missing credentials. + throw new ConfigurationError( + `Cannot perform Docker release: missing credentials. Please use ${registryHint}DOCKER_USERNAME and DOCKER_PASSWORD environment variables.`.replace( - /^\s+/gm, - '' - ) + /^\s+/gm, + '', + ), ); } return undefined; @@ -400,7 +461,7 @@ Please use ${registryHint}DOCKER_USERNAME and DOCKER_PASSWORD environment variab target.usernameVar, target.passwordVar, // Required unless it's a GCR registry (which can use gcloud auth) - !isGcrTarget + !isGcrTarget, ); } @@ -416,7 +477,7 @@ Please use ${registryHint}DOCKER_USERNAME and DOCKER_PASSWORD environment variab // Only required if explicit source env vars are specified !!(source.usernameVar || source.passwordVar), // Don't fall back to DOCKER_USERNAME/PASSWORD for source - false + false, ); } @@ -441,7 +502,9 @@ Please use ${registryHint}DOCKER_USERNAME and DOCKER_PASSWORD environment variab * * @param credentials The registry credentials to use */ - private async loginToRegistry(credentials: RegistryCredentials): Promise { + private async loginToRegistry( + credentials: RegistryCredentials, + ): Promise { const { username, password, registry } = credentials; const args = ['login', `--username=${username}`, '--password-stdin']; if (registry) { @@ -467,23 +530,34 @@ Please use ${registryHint}DOCKER_USERNAME and DOCKER_PASSWORD environment variab // Check if gcloud credentials are available if (!hasGcloudCredentials()) { - this.logger.debug('No gcloud credentials detected, skipping gcloud auth configure-docker'); + this.logger.debug( + 'No gcloud credentials detected, skipping gcloud auth configure-docker', + ); return false; } // Check if gcloud is available if (!(await isGcloudAvailable())) { - this.logger.debug('gcloud CLI not available, skipping gcloud auth configure-docker'); + this.logger.debug( + 'gcloud CLI not available, skipping gcloud auth configure-docker', + ); return false; } const registryList = registries.join(','); - this.logger.debug(`Configuring Docker for Google Cloud registries: ${registryList}`); + this.logger.debug( + `Configuring Docker for Google Cloud registries: ${registryList}`, + ); try { // Run gcloud auth configure-docker with the registries // This configures Docker's credential helper to use gcloud for these registries - await spawnProcess('gcloud', ['auth', 'configure-docker', registryList, '--quiet'], {}, {}); + await spawnProcess( + 'gcloud', + ['auth', 'configure-docker', registryList, '--quiet'], + {}, + {}, + ); this.logger.info(`Configured Docker authentication for: ${registryList}`); return true; } catch (error) { @@ -555,18 +629,23 @@ Please use ${registryHint}DOCKER_USERNAME and DOCKER_PASSWORD environment variab ) { // Source registry needs auth but we couldn't configure it // This is okay - source might be public or already authenticated - this.logger.debug(`No credentials for source registry ${sourceRegistry}, assuming public`); + this.logger.debug( + `No credentials for source registry ${sourceRegistry}, assuming public`, + ); } // Login to target registry (if needed and not already configured via gcloud) if (target.credentials) { await this.loginToRegistry(target.credentials); - } else if (!target.skipLogin && !gcrConfiguredRegistries.has(targetRegistry || '')) { + } else if ( + !target.skipLogin && + !gcrConfiguredRegistries.has(targetRegistry || '') + ) { // Target registry needs auth but we have no credentials and couldn't configure gcloud // This will likely fail when pushing, but we let it proceed if (targetRegistry) { this.logger.warn( - `No credentials for target registry ${targetRegistry}. Push may fail.` + `No credentials for target registry ${targetRegistry}. Push may fail.`, ); } } @@ -601,7 +680,7 @@ Please use ${registryHint}DOCKER_USERNAME and DOCKER_PASSWORD environment variab DOCKER_BIN, ['buildx', 'imagetools', 'create', '--tag', targetImage, sourceImage], {}, - { showStdout: true } + { showStdout: true }, ); } diff --git a/src/targets/gem.ts b/src/targets/gem.ts index 5dec8c11..21f7807c 100644 --- a/src/targets/gem.ts +++ b/src/targets/gem.ts @@ -11,6 +11,12 @@ import { checkExecutableIsPresent, spawnProcess } from '../utils/system'; import { BaseTarget } from './base'; import { TargetConfig } from '../schemas/project_config'; import { logger } from '../logger'; +import { + DetectionContext, + DetectionResult, + fileExists, +} from '../utils/detection'; +import { readdirSync } from 'fs'; const DEFAULT_GEM_BIN = 'gem'; @@ -31,6 +37,9 @@ export class GemTarget extends BaseTarget { /** Target name */ public readonly name: string = 'gem'; + /** Priority for ordering in config (package registries appear first) */ + public static readonly priority = 40; + /** * Bump version in Ruby gem project files. * @@ -128,6 +137,42 @@ export class GemTarget extends BaseTarget { return updated; } + /** + * Detect if this project should use the gem target. + * + * Checks for *.gemspec files in the root directory. + */ + public static detect(context: DetectionContext): DetectionResult | null { + const { rootDir } = context; + + // Check for Gemfile (indicates Ruby project) + if (!fileExists(rootDir, 'Gemfile')) { + return null; + } + + // Look for .gemspec files (indicates a gem) + try { + const files = readdirSync(rootDir); + const hasGemspec = files.some(f => f.endsWith('.gemspec')); + if (hasGemspec) { + return { + config: { name: 'gem' }, + priority: GemTarget.priority, + requiredSecrets: [ + { + name: 'GEM_HOST_API_KEY', + description: 'RubyGems API key for publishing', + }, + ], + }; + } + } catch { + // Ignore errors reading directory + } + + return null; + } + public constructor( config: TargetConfig, artifactProvider: BaseArtifactProvider, diff --git a/src/targets/github.ts b/src/targets/github.ts index eec71200..9c96a872 100644 --- a/src/targets/github.ts +++ b/src/targets/github.ts @@ -27,6 +27,7 @@ import { import { BaseTarget } from './base'; import { BaseArtifactProvider } from '../artifact_providers/base'; import { logger } from '../logger'; +import { DetectionContext, DetectionResult } from '../utils/detection'; /** * Default content type for GitHub release assets. @@ -94,6 +95,33 @@ export class GitHubTarget extends BaseTarget { /** GitHub repo configuration */ public readonly githubRepo: GitHubGlobalConfig; + /** Priority for ordering in config (GitHub should always be last) */ + public static readonly priority = 900; + + /** + * Detect if this project should use the github target. + * + * The GitHub target is always recommended for projects with GitHub remotes, + * as it creates GitHub Releases with changelogs and uploaded artifacts. + */ + public static detect(context: DetectionContext): DetectionResult | null { + // GitHub target should be included when we detect a GitHub repo + if (context.githubOwner && context.githubRepo) { + return { + config: { name: 'github' }, + priority: GitHubTarget.priority, + requiredSecrets: [ + { + name: 'GH_RELEASE_PAT', + description: 'GitHub Personal Access Token with repo scope', + }, + ], + }; + } + + return null; + } + public constructor( config: TargetConfig, artifactProvider: BaseArtifactProvider, diff --git a/src/targets/npm.ts b/src/targets/npm.ts index f3e0f420..8fa5afcd 100644 --- a/src/targets/npm.ts +++ b/src/targets/npm.ts @@ -32,6 +32,12 @@ import { import { withTempFile } from '../utils/files'; import { writeFileSync } from 'fs'; import { logger } from '../logger'; +import { + DetectionContext, + DetectionResult, + fileExists, + readJsonFile, +} from '../utils/detection'; /** npm executable config */ export const NPM_CONFIG = { name: 'npm', envVar: 'NPM_BIN' } as const; @@ -127,6 +133,82 @@ export class NpmTarget extends BaseTarget { /** Target options */ public readonly npmConfig: NpmTargetOptions; + /** Priority for ordering in config (package registries appear first) */ + public static readonly priority = 10; + + /** + * Detect if this project should use the npm target. + * + * Checks for package.json and whether it's publishable (not private without workspaces). + * Also detects Node.js setup (package manager, version file) for workflow generation. + */ + public static detect(context: DetectionContext): DetectionResult | null { + const { rootDir } = context; + + // Check for package.json + if (!fileExists(rootDir, 'package.json')) { + return null; + } + + const pkg = readJsonFile<{ + private?: boolean; + workspaces?: string[] | { packages: string[] }; + name?: string; + packageManager?: string; + volta?: { node?: string }; + }>(rootDir, 'package.json'); + + if (!pkg) { + return null; + } + + // If it's private without workspaces, it's not publishable to npm + if (pkg.private && !pkg.workspaces) { + return null; + } + + // Build the target config + const config: TargetConfig = { name: 'npm' }; + + // If there are workspaces, enable workspace discovery + if (pkg.workspaces) { + config.workspaces = true; + } + + // Detect package manager + let packageManager: 'npm' | 'pnpm' | 'yarn' = 'npm'; + if (pkg.packageManager?.startsWith('pnpm')) { + packageManager = 'pnpm'; + } else if (pkg.packageManager?.startsWith('yarn')) { + packageManager = 'yarn'; + } else if (fileExists(rootDir, 'pnpm-lock.yaml')) { + packageManager = 'pnpm'; + } else if (fileExists(rootDir, 'yarn.lock')) { + packageManager = 'yarn'; + } + + // Detect Node version file + let versionFile: string | undefined; + if (pkg.volta?.node) { + versionFile = 'package.json'; + } else if (fileExists(rootDir, '.nvmrc')) { + versionFile = '.nvmrc'; + } else if (fileExists(rootDir, '.node-version')) { + versionFile = '.node-version'; + } + + return { + config, + priority: NpmTarget.priority, + workflowSetup: { + node: { packageManager, versionFile }, + }, + requiredSecrets: [ + { name: 'NPM_TOKEN', description: 'npm access token for publishing' }, + ], + }; + } + /** * Expand an npm target config into multiple targets if workspaces is enabled. * This static method is called during config loading to expand workspace targets. diff --git a/src/targets/pubDev.ts b/src/targets/pubDev.ts index 8786d5d0..a74fa482 100644 --- a/src/targets/pubDev.ts +++ b/src/targets/pubDev.ts @@ -23,6 +23,11 @@ import { checkExecutableIsPresent, spawnProcess } from '../utils/system'; import { isDryRun } from '../utils/helpers'; import { logDryRun } from '../utils/dryRun'; import { logger } from '../logger'; +import { + DetectionContext, + DetectionResult, + fileExists, +} from '../utils/detection'; export const targetSecrets = [ 'PUBDEV_ACCESS_TOKEN', @@ -70,6 +75,9 @@ export class PubDevTarget extends BaseTarget { /** GitHub repo configuration */ public readonly githubRepo: GitHubGlobalConfig; + /** Priority for ordering in config (package registries appear first) */ + public static readonly priority = 60; + /** * Bump version in pubspec.yaml for Dart/Flutter projects. * @@ -103,6 +111,32 @@ export class PubDevTarget extends BaseTarget { return true; } + /** + * Detect if this project should use the pub-dev target. + * + * Checks for pubspec.yaml (Dart/Flutter package). + */ + public static detect(context: DetectionContext): DetectionResult | null { + const { rootDir } = context; + + // Check for pubspec.yaml + if (fileExists(rootDir, 'pubspec.yaml')) { + return { + config: { name: 'pub-dev' }, + priority: PubDevTarget.priority, + requiredSecrets: [ + { name: 'PUBDEV_ACCESS_TOKEN', description: 'pub.dev access token' }, + { + name: 'PUBDEV_REFRESH_TOKEN', + description: 'pub.dev refresh token', + }, + ], + }; + } + + return null; + } + public constructor( config: TargetConfig, artifactProvider: BaseArtifactProvider, diff --git a/src/targets/pypi.ts b/src/targets/pypi.ts index 98070f86..006a264e 100644 --- a/src/targets/pypi.ts +++ b/src/targets/pypi.ts @@ -14,6 +14,12 @@ import { } from '../utils/system'; import { BaseTarget } from './base'; import { logger } from '../logger'; +import { + DetectionContext, + DetectionResult, + fileExists, + readTextFile, +} from '../utils/detection'; const DEFAULT_TWINE_BIN = 'twine'; @@ -44,6 +50,9 @@ export class PypiTarget extends BaseTarget { /** Target options */ public readonly pypiConfig: PypiTargetOptions; + /** Priority for ordering in config (package registries appear first) */ + public static readonly priority = 20; + /** * Bump version in Python project files. * @@ -164,6 +173,73 @@ export class PypiTarget extends BaseTarget { return true; } + /** + * Detect if this project should use the pypi target. + * + * Checks for pyproject.toml or setup.py. + * Also detects Python version for workflow generation. + */ + public static detect(context: DetectionContext): DetectionResult | null { + const { rootDir } = context; + + // Detect Python version + let pythonVersion: string | undefined; + + // Check .python-version file first + if (fileExists(rootDir, '.python-version')) { + pythonVersion = readTextFile(rootDir, '.python-version')?.trim(); + } + + let isPythonPackage = false; + + // Check for pyproject.toml (modern Python packaging) + if (fileExists(rootDir, 'pyproject.toml')) { + const content = readTextFile(rootDir, 'pyproject.toml'); + if (content) { + // Try to extract requires-python if we don't have a version yet + if (!pythonVersion) { + const match = content.match( + /requires-python\s*=\s*["']>=?(\d+\.\d+)/, + ); + if (match) { + pythonVersion = match[1]; + } + } + + // Check if it has a [project] or [tool.poetry] section (indicates a package) + isPythonPackage = + content.includes('[project]') || content.includes('[tool.poetry]'); + } + } + + // Check for setup.py (legacy Python packaging) + if (!isPythonPackage) { + isPythonPackage = fileExists(rootDir, 'setup.py'); + } + + if (isPythonPackage) { + return { + config: { name: 'pypi' }, + priority: PypiTarget.priority, + workflowSetup: { + python: { version: pythonVersion }, + }, + requiredSecrets: [ + { + name: 'TWINE_USERNAME', + description: 'PyPI username (use __token__ for API tokens)', + }, + { + name: 'TWINE_PASSWORD', + description: 'PyPI API token for publishing', + }, + ], + }; + } + + return null; + } + public constructor( config: TargetConfig, artifactProvider: BaseArtifactProvider, diff --git a/src/utils/__tests__/detection.test.ts b/src/utils/__tests__/detection.test.ts index 53bec729..b919f672 100644 --- a/src/utils/__tests__/detection.test.ts +++ b/src/utils/__tests__/detection.test.ts @@ -1,16 +1,24 @@ import { describe, test, expect, beforeEach, afterEach } from 'vitest'; import { mkdirSync, writeFileSync, rmSync } from 'fs'; import { join } from 'path'; -import os from 'os'; +import { tmpdir } from 'os'; import { isCompiledGitHubAction } from '../detection'; +import { NpmTarget } from '../../targets/npm'; +import { PypiTarget } from '../../targets/pypi'; +import { CratesTarget } from '../../targets/crates'; +import { DockerTarget } from '../../targets/docker'; +import { GemTarget } from '../../targets/gem'; +import { PubDevTarget } from '../../targets/pubDev'; +import { GitHubTarget } from '../../targets/github'; +import { DetectionContext } from '../detection'; describe('isCompiledGitHubAction', () => { let tempDir: string; beforeEach(() => { tempDir = join( - os.tmpdir(), + tmpdir(), `craft-test-${Date.now()}-${Math.random().toString(36).slice(2)}`, ); mkdirSync(tempDir, { recursive: true }); @@ -183,3 +191,226 @@ runs: expect(isCompiledGitHubAction(tempDir)).toBe(false); }); }); + +describe('Target Detection', () => { + let tempDir: string; + + beforeEach(() => { + tempDir = join( + tmpdir(), + `craft-test-${Date.now()}-${Math.random().toString(36).slice(2)}`, + ); + mkdirSync(tempDir, { recursive: true }); + }); + + afterEach(() => { + rmSync(tempDir, { recursive: true, force: true }); + }); + + const createContext = ( + overrides?: Partial, + ): DetectionContext => ({ + rootDir: tempDir, + ...overrides, + }); + + describe('NpmTarget.detect', () => { + test('returns null when no package.json exists', () => { + const result = NpmTarget.detect(createContext()); + expect(result).toBeNull(); + }); + + test('returns null for private package without workspaces', () => { + writeFileSync( + join(tempDir, 'package.json'), + JSON.stringify({ name: 'test-pkg', private: true }), + ); + const result = NpmTarget.detect(createContext()); + expect(result).toBeNull(); + }); + + test('detects public npm package', () => { + writeFileSync( + join(tempDir, 'package.json'), + JSON.stringify({ name: 'test-pkg', version: '1.0.0' }), + ); + const result = NpmTarget.detect(createContext()); + expect(result).not.toBeNull(); + expect(result?.config.name).toBe('npm'); + expect(result?.priority).toBe(NpmTarget.priority); + }); + + test('detects npm workspace package', () => { + writeFileSync( + join(tempDir, 'package.json'), + JSON.stringify({ + name: 'test-monorepo', + private: true, + workspaces: ['packages/*'], + }), + ); + const result = NpmTarget.detect(createContext()); + expect(result).not.toBeNull(); + expect(result?.config.name).toBe('npm'); + expect(result?.config.workspaces).toBe(true); + }); + }); + + describe('PypiTarget.detect', () => { + test('returns null when no Python files exist', () => { + const result = PypiTarget.detect(createContext()); + expect(result).toBeNull(); + }); + + test('detects pyproject.toml with [project]', () => { + writeFileSync( + join(tempDir, 'pyproject.toml'), + '[project]\nname = "test-pkg"\nversion = "1.0.0"', + ); + const result = PypiTarget.detect(createContext()); + expect(result).not.toBeNull(); + expect(result?.config.name).toBe('pypi'); + expect(result?.priority).toBe(PypiTarget.priority); + }); + + test('detects pyproject.toml with [tool.poetry]', () => { + writeFileSync( + join(tempDir, 'pyproject.toml'), + '[tool.poetry]\nname = "test-pkg"\nversion = "1.0.0"', + ); + const result = PypiTarget.detect(createContext()); + expect(result).not.toBeNull(); + expect(result?.config.name).toBe('pypi'); + }); + + test('detects setup.py', () => { + writeFileSync(join(tempDir, 'setup.py'), 'from setuptools import setup'); + const result = PypiTarget.detect(createContext()); + expect(result).not.toBeNull(); + expect(result?.config.name).toBe('pypi'); + }); + }); + + describe('CratesTarget.detect', () => { + test('returns null when no Cargo.toml exists', () => { + const result = CratesTarget.detect(createContext()); + expect(result).toBeNull(); + }); + + test('detects Cargo.toml with [package]', () => { + writeFileSync( + join(tempDir, 'Cargo.toml'), + '[package]\nname = "test-crate"\nversion = "1.0.0"', + ); + const result = CratesTarget.detect(createContext()); + expect(result).not.toBeNull(); + expect(result?.config.name).toBe('crates'); + expect(result?.priority).toBe(CratesTarget.priority); + }); + + test('detects Cargo.toml with workspace', () => { + writeFileSync( + join(tempDir, 'Cargo.toml'), + '[workspace]\nmembers = ["crate-a", "crate-b"]', + ); + const result = CratesTarget.detect(createContext()); + expect(result).not.toBeNull(); + expect(result?.config.name).toBe('crates'); + }); + }); + + describe('DockerTarget.detect', () => { + test('returns null when no Dockerfile exists', () => { + const result = DockerTarget.detect(createContext()); + expect(result).toBeNull(); + }); + + test('detects Dockerfile', () => { + writeFileSync(join(tempDir, 'Dockerfile'), 'FROM node:18'); + const result = DockerTarget.detect(createContext()); + expect(result).not.toBeNull(); + expect(result?.config.name).toBe('docker'); + expect(result?.priority).toBe(DockerTarget.priority); + }); + + test('includes ghcr.io source when GitHub info available', () => { + writeFileSync(join(tempDir, 'Dockerfile'), 'FROM node:18'); + const result = DockerTarget.detect( + createContext({ githubOwner: 'getsentry', githubRepo: 'craft' }), + ); + expect(result).not.toBeNull(); + expect(result?.config.source).toBe('ghcr.io/getsentry/craft'); + expect(result?.config.target).toBe('getsentry/craft'); + }); + }); + + describe('GemTarget.detect', () => { + test('returns null when no Gemfile exists', () => { + const result = GemTarget.detect(createContext()); + expect(result).toBeNull(); + }); + + test('returns null when Gemfile exists but no gemspec', () => { + writeFileSync(join(tempDir, 'Gemfile'), 'source "https://rubygems.org"'); + const result = GemTarget.detect(createContext()); + expect(result).toBeNull(); + }); + + test('detects gemspec file', () => { + writeFileSync(join(tempDir, 'Gemfile'), 'source "https://rubygems.org"'); + writeFileSync(join(tempDir, 'test.gemspec'), 'Gem::Specification.new'); + const result = GemTarget.detect(createContext()); + expect(result).not.toBeNull(); + expect(result?.config.name).toBe('gem'); + expect(result?.priority).toBe(GemTarget.priority); + }); + }); + + describe('PubDevTarget.detect', () => { + test('returns null when no pubspec.yaml exists', () => { + const result = PubDevTarget.detect(createContext()); + expect(result).toBeNull(); + }); + + test('detects pubspec.yaml', () => { + writeFileSync( + join(tempDir, 'pubspec.yaml'), + 'name: test_pkg\nversion: 1.0.0', + ); + const result = PubDevTarget.detect(createContext()); + expect(result).not.toBeNull(); + expect(result?.config.name).toBe('pub-dev'); + expect(result?.priority).toBe(PubDevTarget.priority); + }); + }); + + describe('GitHubTarget.detect', () => { + test('returns null when no GitHub info available', () => { + const result = GitHubTarget.detect(createContext()); + expect(result).toBeNull(); + }); + + test('detects GitHub repo', () => { + const result = GitHubTarget.detect( + createContext({ githubOwner: 'getsentry', githubRepo: 'craft' }), + ); + expect(result).not.toBeNull(); + expect(result?.config.name).toBe('github'); + expect(result?.priority).toBe(GitHubTarget.priority); + }); + }); + + describe('Priority ordering', () => { + test('npm comes before github', () => { + expect(NpmTarget.priority).toBeLessThan(GitHubTarget.priority); + }); + + test('pypi comes before docker', () => { + expect(PypiTarget.priority).toBeLessThan(DockerTarget.priority); + }); + + test('docker comes before github', () => { + expect(DockerTarget.priority).toBeLessThan(GitHubTarget.priority); + }); + }); +}); diff --git a/src/utils/__tests__/dryRun.test.ts b/src/utils/__tests__/dryRun.test.ts index 24f4f57b..00e7b92c 100644 --- a/src/utils/__tests__/dryRun.test.ts +++ b/src/utils/__tests__/dryRun.test.ts @@ -1,4 +1,7 @@ import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { tmpdir } from 'os'; +import { join } from 'path'; +import { unlinkSync } from 'fs'; import * as helpers from '../helpers'; // Mock the helpers module to control isDryRun @@ -50,7 +53,7 @@ describe('dryRun utilities', () => { it('logs with consistent format', () => { logDryRun('test operation'); expect(logger.info).toHaveBeenCalledWith( - '[dry-run] Would execute: test operation' + '[dry-run] Would execute: test operation', ); }); }); @@ -98,7 +101,7 @@ describe('dryRun utilities', () => { await git.push(); expect(mockGit.push).not.toHaveBeenCalled(); expect(logger.info).toHaveBeenCalledWith( - expect.stringContaining('[dry-run]') + expect.stringContaining('[dry-run]'), ); }); @@ -110,7 +113,7 @@ describe('dryRun utilities', () => { await git.raw('push', 'origin', 'main'); expect(mockGit.raw).not.toHaveBeenCalled(); expect(logger.info).toHaveBeenCalledWith( - expect.stringContaining('git push origin main') + expect.stringContaining('git push origin main'), ); }); @@ -162,7 +165,7 @@ describe('dryRun utilities', () => { // Verify dry-run messages were logged expect(logger.info).toHaveBeenCalledWith( - expect.stringContaining('[dry-run]') + expect.stringContaining('[dry-run]'), ); }); @@ -194,7 +197,11 @@ describe('dryRun utilities', () => { vi.mocked(helpers.isDryRun).mockReturnValue(false); const octokit = createDryRunOctokit(mockOctokit as any); - await octokit.repos.getContent({ owner: 'test', repo: 'test', path: '/' }); + await octokit.repos.getContent({ + owner: 'test', + repo: 'test', + path: '/', + }); expect(mockOctokit.repos.getContent).toHaveBeenCalled(); }); @@ -222,7 +229,7 @@ describe('dryRun utilities', () => { }); expect(mockOctokit.repos.createRelease).not.toHaveBeenCalled(); expect(logger.info).toHaveBeenCalledWith( - expect.stringContaining('[dry-run]') + expect.stringContaining('[dry-run]'), ); }); @@ -244,7 +251,11 @@ describe('dryRun utilities', () => { vi.mocked(helpers.isDryRun).mockReturnValue(true); const octokit = createDryRunOctokit(mockOctokit as any); - await octokit.repos.getContent({ owner: 'test', repo: 'test', path: '/' }); + await octokit.repos.getContent({ + owner: 'test', + repo: 'test', + path: '/', + }); expect(mockOctokit.repos.getContent).toHaveBeenCalled(); }); }); @@ -256,7 +267,7 @@ describe('dryRun utilities', () => { await safeFs.writeFile('/tmp/test.txt', 'content'); expect(logger.info).toHaveBeenCalledWith( - '[dry-run] Would execute: fs.writeFile(/tmp/test.txt)' + '[dry-run] Would execute: fs.writeFile(/tmp/test.txt)', ); }); @@ -265,7 +276,7 @@ describe('dryRun utilities', () => { await safeFs.unlink('/tmp/test.txt'); expect(logger.info).toHaveBeenCalledWith( - '[dry-run] Would execute: fs.unlink(/tmp/test.txt)' + '[dry-run] Would execute: fs.unlink(/tmp/test.txt)', ); }); @@ -274,7 +285,7 @@ describe('dryRun utilities', () => { await safeFs.rename('/tmp/old.txt', '/tmp/new.txt'); expect(logger.info).toHaveBeenCalledWith( - '[dry-run] Would execute: fs.rename(/tmp/old.txt, /tmp/new.txt)' + '[dry-run] Would execute: fs.rename(/tmp/old.txt, /tmp/new.txt)', ); }); }); @@ -299,7 +310,7 @@ describe('dryRun utilities', () => { expect(action).not.toHaveBeenCalled(); expect(result).toBeUndefined(); expect(logger.info).toHaveBeenCalledWith( - '[dry-run] Would execute: test action' + '[dry-run] Would execute: test action', ); }); @@ -336,7 +347,7 @@ describe('dryRun utilities', () => { expect(action).not.toHaveBeenCalled(); expect(result).toBeUndefined(); expect(logger.info).toHaveBeenCalledWith( - '[dry-run] Would execute: test action' + '[dry-run] Would execute: test action', ); }); @@ -406,7 +417,7 @@ describe('dryRun utilities', () => { await git.push(); expect(mockGit.push).not.toHaveBeenCalled(); expect(logger.info).toHaveBeenCalledWith( - expect.stringContaining('[dry-run]') + expect.stringContaining('[dry-run]'), ); }); }); @@ -416,23 +427,34 @@ describe('dryRun utilities', () => { vi.mocked(helpers.isDryRun).mockReturnValue(true); enableWorktreeMode(); + // Use os.tmpdir() which respects TMPDIR env var + const testFile = join(tmpdir(), 'craft-dryrun-test.txt'); + // In worktree mode, safeFs should not block or log vi.mocked(logger.info).mockClear(); - await safeFs.writeFile('/tmp/test.txt', 'content'); + await safeFs.writeFile(testFile, 'content'); // Should NOT have logged a dry-run message (operation is allowed) expect(logger.info).not.toHaveBeenCalledWith( - expect.stringContaining('[dry-run] Would execute: fs.writeFile') + expect.stringContaining('[dry-run] Would execute: fs.writeFile'), ); + + // Clean up the test file + try { + unlinkSync(testFile); + } catch { + // Ignore cleanup errors + } }); it('blocks file operations in strict dry-run mode', async () => { vi.mocked(helpers.isDryRun).mockReturnValue(true); disableWorktreeMode(); - await safeFs.writeFile('/tmp/test.txt', 'content'); + const testFile = join(tmpdir(), 'craft-dryrun-test.txt'); + await safeFs.writeFile(testFile, 'content'); expect(logger.info).toHaveBeenCalledWith( - '[dry-run] Would execute: fs.writeFile(/tmp/test.txt)' + `[dry-run] Would execute: fs.writeFile(${testFile})`, ); }); }); diff --git a/src/utils/__tests__/templates.test.ts b/src/utils/__tests__/templates.test.ts new file mode 100644 index 00000000..d74c97aa --- /dev/null +++ b/src/utils/__tests__/templates.test.ts @@ -0,0 +1,171 @@ +import { describe, test, expect } from 'vitest'; +import { load } from 'js-yaml'; + +import { + generateCraftConfig, + generateReleaseWorkflow, + generateChangelogPreviewWorkflow, + TemplateContext, +} from '../templates'; + +describe('Template Generation', () => { + const baseContext: TemplateContext = { + githubOwner: 'getsentry', + githubRepo: 'test-repo', + targets: [{ name: 'npm' }, { name: 'github' }], + }; + + describe('generateCraftConfig', () => { + test('generates minimal config', () => { + const yaml = generateCraftConfig(baseContext); + const parsed = load(yaml) as Record; + + expect(parsed.minVersion).toBe('2.21.0'); + expect(parsed.targets).toHaveLength(2); + }); + + test('includes all target properties', () => { + const context: TemplateContext = { + ...baseContext, + targets: [ + { name: 'npm', workspaces: true }, + { name: 'docker', source: 'ghcr.io/test/repo', target: 'test/repo' }, + { name: 'github' }, + ], + }; + + const yaml = generateCraftConfig(context); + const parsed = load(yaml) as Record; + const targets = parsed.targets as Record[]; + + expect(targets[0]).toEqual({ name: 'npm', workspaces: true }); + expect(targets[1]).toEqual({ + name: 'docker', + source: 'ghcr.io/test/repo', + target: 'test/repo', + }); + }); + }); + + describe('generateReleaseWorkflow', () => { + test('generates basic workflow structure', () => { + const yaml = generateReleaseWorkflow(baseContext); + const parsed = load(yaml) as Record; + + expect(parsed.name).toBe('Release'); + expect(parsed.on).toHaveProperty('workflow_dispatch'); + expect(parsed.jobs).toHaveProperty('release'); + }); + + test('includes checkout step', () => { + const yaml = generateReleaseWorkflow(baseContext); + const parsed = load(yaml) as Record; + const job = (parsed.jobs as Record).release as Record< + string, + unknown + >; + const steps = job.steps as Record[]; + + const checkoutStep = steps.find(s => + (s.uses as string)?.includes('checkout'), + ); + expect(checkoutStep).toBeDefined(); + expect( + (checkoutStep?.with as Record)['fetch-depth'], + ).toBe(0); + }); + + test('includes pnpm setup for pnpm projects', () => { + const context: TemplateContext = { + ...baseContext, + workflowSetup: { node: { packageManager: 'pnpm' } }, + }; + + const yaml = generateReleaseWorkflow(context); + const parsed = load(yaml) as Record; + const job = (parsed.jobs as Record).release as Record< + string, + unknown + >; + const steps = job.steps as Record[]; + + const pnpmStep = steps.find(s => + (s.uses as string)?.includes('pnpm/action-setup'), + ); + expect(pnpmStep).toBeDefined(); + }); + + test('includes Python setup for Python projects', () => { + const context: TemplateContext = { + ...baseContext, + workflowSetup: { python: { version: '3.11' } }, + }; + + const yaml = generateReleaseWorkflow(context); + const parsed = load(yaml) as Record; + const job = (parsed.jobs as Record).release as Record< + string, + unknown + >; + const steps = job.steps as Record[]; + + const pythonStep = steps.find(s => + (s.uses as string)?.includes('setup-python'), + ); + expect(pythonStep).toBeDefined(); + expect( + (pythonStep?.with as Record)['python-version'], + ).toBe('3.11'); + }); + + test('includes Craft action', () => { + const yaml = generateReleaseWorkflow(baseContext); + const parsed = load(yaml) as Record; + const job = (parsed.jobs as Record).release as Record< + string, + unknown + >; + const steps = job.steps as Record[]; + + const craftStep = steps.find(s => + (s.uses as string)?.includes('getsentry/craft'), + ); + expect(craftStep).toBeDefined(); + expect((craftStep?.with as Record).version).toBe( + '${{ inputs.version }}', + ); + }); + }); + + describe('generateChangelogPreviewWorkflow', () => { + test('generates changelog preview workflow with pull_request_target', () => { + const yaml = generateChangelogPreviewWorkflow(); + const parsed = load(yaml) as Record; + + expect(parsed.name).toBe('Changelog Preview'); + expect(parsed.on).toHaveProperty('pull_request_target'); + }); + + test('uses craft reusable workflow', () => { + const yaml = generateChangelogPreviewWorkflow(); + const parsed = load(yaml) as Record; + const job = (parsed.jobs as Record)[ + 'changelog-preview' + ] as Record; + + expect(job.uses).toBe( + 'getsentry/craft/.github/workflows/changelog-preview.yml@v2', + ); + expect(job.secrets).toBe('inherit'); + }); + + test('sets required permissions', () => { + const yaml = generateChangelogPreviewWorkflow(); + const parsed = load(yaml) as Record; + const permissions = parsed.permissions as Record; + + expect(permissions.contents).toBe('read'); + expect(permissions['pull-requests']).toBe('write'); + }); + }); +}); diff --git a/src/utils/detection.ts b/src/utils/detection.ts index dd08deae..ba36651a 100644 --- a/src/utils/detection.ts +++ b/src/utils/detection.ts @@ -1,6 +1,7 @@ import { existsSync, readFileSync } from 'fs'; import path from 'path'; import { load } from 'js-yaml'; +import { TargetConfig } from '../schemas/project_config'; /** * GitHub Action manifest structure (partial, only what we need) @@ -12,10 +13,107 @@ interface ActionManifest { }; } +/** + * Context for target detection, providing information about the project + * that targets can use to determine if they apply. + */ +export interface DetectionContext { + /** Root directory of the project */ + rootDir: string; + /** GitHub owner (if detected) */ + githubOwner?: string; + /** GitHub repo name (if detected) */ + githubRepo?: string; +} + +/** + * Information about a required secret for a target + */ +export interface RequiredSecret { + /** Environment variable name (e.g., 'NPM_TOKEN') */ + name: string; + /** Human-readable description */ + description: string; +} + +/** + * Workflow setup information detected from the project. + * Used to generate appropriate GitHub Actions workflows. + */ +export interface WorkflowSetup { + /** Node.js setup (if applicable) */ + node?: { + /** Package manager to use */ + packageManager: 'npm' | 'pnpm' | 'yarn'; + /** Node version file path (e.g., .nvmrc, package.json for volta) */ + versionFile?: string; + }; + /** Python setup (if applicable) */ + python?: { + /** Python version */ + version?: string; + }; +} + +/** + * Result of target detection, including the config and a priority for ordering. + * Higher priority targets appear later in the generated config (e.g., github should be last). + */ +export interface DetectionResult { + /** The detected target configuration */ + config: TargetConfig; + /** + * Priority for ordering in the config file. + * Lower numbers appear first. Use these guidelines: + * - 0-99: Package registries (npm, pypi, crates, etc.) + * - 100-199: Storage/CDN targets (gcs, docker, etc.) + * - 200-299: Registry/metadata targets + * - 900-999: GitHub and other "final" targets + */ + priority: number; + /** + * Workflow setup information for this target. + * Used to generate appropriate GitHub Actions workflows. + */ + workflowSetup?: WorkflowSetup; + /** + * Secrets required by this target for publishing. + */ + requiredSecrets?: RequiredSecret[]; +} + +/** + * Check if a file exists in the given directory + */ +export function fileExists( + rootDir: string, + ...pathSegments: string[] +): boolean { + return existsSync(path.join(rootDir, ...pathSegments)); +} + +/** + * Read a JSON file from the project directory + */ +export function readJsonFile( + rootDir: string, + ...pathSegments: string[] +): T | null { + const filePath = path.join(rootDir, ...pathSegments); + if (!existsSync(filePath)) { + return null; + } + try { + return JSON.parse(readFileSync(filePath, 'utf-8')); + } catch { + return null; + } +} + /** * Read a file as text from the project directory */ -function readTextFile( +export function readTextFile( rootDir: string, ...pathSegments: string[] ): string | null { @@ -80,3 +178,16 @@ export function isCompiledGitHubAction(rootDir: string): boolean { return false; } } + +/** + * Recommended priority values for target ordering in generated configs. + * Lower numbers appear first in the config file. + * + * Each target should define its own `static readonly priority` property. + * + * Guidelines: + * - 0-99: Package registries (npm=10, pypi=20, crates=30, gem=40, nuget=50, pub-dev=60, hex=70, maven=80, cocoapods=90) + * - 100-199: Storage/CDN targets (gcs=100, docker=110, aws-lambda=120, powershell=130) + * - 200-299: Registry/metadata targets (registry=200, brew=210, symbol-collector=220, gh-pages=230) + * - 900-999: GitHub and other "final" targets (github=900) + */ diff --git a/src/utils/git.ts b/src/utils/git.ts index 523b402b..4b04e689 100644 --- a/src/utils/git.ts +++ b/src/utils/git.ts @@ -4,12 +4,18 @@ import simpleGit, { type Options, type StatusResult, } from 'simple-git'; +import GitUrlParse from 'git-url-parse'; import { getConfigFileDir } from '../config'; import { ConfigurationError } from './errors'; import { createDryRunGit } from './dryRun'; import { logger } from '../logger'; +export interface GitHubInfo { + owner: string; + repo: string; +} + export interface GitChange { hash: string; title: string; @@ -175,3 +181,37 @@ export function isRepoDirty(repoStatus: StatusResult): boolean { repoStatus.staged.length ); } + +/** + * Extract GitHub owner/repo from git remote. + * + * Looks for the 'origin' remote first, then falls back to the first available remote. + * Returns null if no GitHub remote is found. + * + * @param git SimpleGit instance for the repository + * @returns GitHub owner and repo, or null if not a GitHub repo + */ +export async function getGitHubInfoFromRemote( + git: SimpleGit, +): Promise { + const remotes = await git.getRemotes(true); + const defaultRemote = + remotes.find(remote => remote.name === 'origin') || remotes[0]; + + if (!defaultRemote) { + return null; + } + + const remoteUrl = GitUrlParse( + defaultRemote.refs.push || defaultRemote.refs.fetch, + ); + + if (remoteUrl?.source === 'github.com') { + return { + owner: remoteUrl.owner, + repo: remoteUrl.name, + }; + } + + return null; +} diff --git a/src/utils/templates.ts b/src/utils/templates.ts new file mode 100644 index 00000000..0b719187 --- /dev/null +++ b/src/utils/templates.ts @@ -0,0 +1,202 @@ +/** + * Template system for generating GitHub Actions workflows and .craft.yml files. + * + * Uses a simple AST-like structure that can be materialized into YAML. + * This approach is more compact than storing full YAML templates and + * allows for conditional sections based on project detection. + */ + +import { dump } from 'js-yaml'; +import { SMART_DEFAULTS_MIN_VERSION } from '../config'; +import { TargetConfig } from '../schemas/project_config'; +import { WorkflowSetup } from './detection'; + +/** + * Context for generating templates + */ +export interface TemplateContext { + /** GitHub owner */ + githubOwner: string; + /** GitHub repo name */ + githubRepo: string; + /** Detected targets */ + targets: TargetConfig[]; + /** Workflow setup (aggregated from targets) */ + workflowSetup?: WorkflowSetup; +} + +/** + * Generate a .craft.yml configuration file + */ +export function generateCraftConfig(context: TemplateContext): string { + const config: Record = { + minVersion: SMART_DEFAULTS_MIN_VERSION, + }; + + // Sort targets by priority (already sorted from detection) + if (context.targets.length > 0) { + config.targets = context.targets.map(t => { + // Clean up undefined values + const cleanTarget: Record = { name: t.name }; + for (const [key, value] of Object.entries(t)) { + if (value !== undefined && key !== 'name') { + cleanTarget[key] = value; + } + } + return cleanTarget; + }); + } + + return dump(config, { + indent: 2, + lineWidth: 120, + noRefs: true, + sortKeys: false, + }); +} + +/** + * Generate a GitHub Actions release workflow + */ +export function generateReleaseWorkflow(context: TemplateContext): string { + const workflow: Record = { + name: 'Release', + on: { + workflow_dispatch: { + inputs: { + version: { + description: 'Version to release (leave empty for auto)', + required: false, + }, + }, + }, + }, + jobs: { + release: generateReleaseJob(context), + }, + }; + + return dump(workflow, { + indent: 2, + lineWidth: 120, + noRefs: true, + sortKeys: false, + quotingType: '"', + forceQuotes: false, + }); +} + +/** + * Generate the release job for the workflow + */ +function generateReleaseJob(context: TemplateContext): Record { + const steps: Record[] = []; + + // Checkout + steps.push({ + uses: 'actions/checkout@v4', + with: { + 'fetch-depth': 0, + token: '${{ secrets.GH_RELEASE_PAT }}', + }, + }); + + // Node.js setup (if needed) + if (context.workflowSetup?.node) { + if (context.workflowSetup.node.packageManager === 'pnpm') { + steps.push({ + uses: 'pnpm/action-setup@v4', + }); + } + + const nodeStep: Record = { + uses: 'actions/setup-node@v4', + with: { + cache: context.workflowSetup.node.packageManager, + }, + }; + + if (context.workflowSetup.node.versionFile) { + nodeStep.with = { + ...(nodeStep.with as Record), + 'node-version-file': context.workflowSetup.node.versionFile, + }; + } + + steps.push(nodeStep); + } + + // Python setup (if needed) + if (context.workflowSetup?.python) { + const pythonStep: Record = { + uses: 'actions/setup-python@v5', + }; + + if (context.workflowSetup.python.version) { + pythonStep.with = { + 'python-version': context.workflowSetup.python.version, + }; + } + + steps.push(pythonStep); + } + + // Craft action + steps.push({ + uses: 'getsentry/craft@v2', + with: { + version: '${{ inputs.version }}', + }, + env: { + GITHUB_TOKEN: '${{ secrets.GH_RELEASE_PAT }}', + }, + }); + + return { + 'runs-on': 'ubuntu-latest', + steps, + }; +} + +/** + * Generate a changelog preview workflow for PRs + * + * Uses pull_request_target to allow posting comments on PRs from forks. + * Calls the reusable workflow from getsentry/craft. + */ +export function generateChangelogPreviewWorkflow(): string { + const workflow: Record = { + name: 'Changelog Preview', + on: { + pull_request_target: { + types: [ + 'opened', + 'synchronize', + 'reopened', + 'edited', + 'labeled', + 'unlabeled', + ], + }, + }, + permissions: { + contents: 'read', + 'pull-requests': 'write', + }, + jobs: { + 'changelog-preview': { + uses: 'getsentry/craft/.github/workflows/changelog-preview.yml@v2', + secrets: 'inherit', + }, + }, + }; + + return dump(workflow, { + indent: 2, + lineWidth: 120, + noRefs: true, + sortKeys: false, + quotingType: '"', + forceQuotes: false, + }); +}